[automerger skipped] SIGKILL crosvm since `crosvm stop` may be slow am: 004598c980 am: 7df0c22db2 -s ours

am skip reason: Merged-In Iec376c1bb02c98dd32ff869c708cace40ad86ff9 with SHA-1 91041ad910 is already in history

Original change: https://googleplex-android-review.googlesource.com/c/device/google/cuttlefish/+/23746824

Change-Id: Id7eba4043a8e3dc965e05913a02aa7eb0c3e7c65
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/Android.bp b/Android.bp
index 7ad70ee..3109bf3 100644
--- a/Android.bp
+++ b/Android.bp
@@ -51,6 +51,11 @@
         "//apex_available:platform",
         "com.android.virt",
     ],
+    target: {
+        windows: {
+            enabled: true,
+        },
+    },
 }
 
 cc_defaults {
@@ -61,10 +66,12 @@
     ],
     target: {
         host: {
-            host_ldlibs: ["-lrt"],
             cflags: ["-DCUTTLEFISH_HOST"],
             compile_multilib: "64",
         },
+        linux: {
+            host_ldlibs: ["-lrt"],
+        },
         // We don't need Darwin host-side builds
         darwin: {
             enabled: false,
@@ -77,6 +84,27 @@
     ],
 }
 
+soong_config_module_type {
+    name: "cf_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "cvdhost",
+    bool_variables: ["enforce_mac80211_hwsim"],
+    properties: ["cflags"],
+}
+
+// This is the customization layer driven by soong config variables.
+cf_cc_defaults {
+    name: "cvd_cc_defaults",
+    soong_config_variables: {
+        // PRODUCT_ENFORCE_MAC80211_HWSIM sets this
+        enforce_mac80211_hwsim: {
+            cflags: ["-DENFORCE_MAC80211_HWSIM=true"],
+            conditions_default: {
+                cflags: [],
+            }
+        },
+    }
+}
 // Defaults for cuttlefish modules that are available only in the guest OS (i.e. cuttlefish running
 // in a guest VM).
 cc_defaults {
@@ -102,7 +130,7 @@
     host_supported: true, // this "host" means (1)
     device_supported: true, // this is for (2)
     vendor_available: true,
-    defaults: ["cuttlefish_base"],
+    defaults: ["cuttlefish_base", "cvd_cc_defaults"],
 }
 
 // Same as "cuttlefish_host", but only for non-Android OS.
diff --git a/Android.mk b/Android.mk
index 78b607f..eca8725 100644
--- a/Android.mk
+++ b/Android.mk
@@ -27,8 +27,10 @@
 $(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.cutf_cvm.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,bt_vhci_forwarder.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
-$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.f2fs,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
-$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.ext4,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,audio_policy.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 
@@ -45,7 +47,7 @@
 $(eval $(call declare-1p-copy-files,device/google/cuttlefish,preinstalled-packages-product-car-cuttlefish.xml))
 $(eval $(call declare-1p-copy-files,hardware/google/camera/devices,.json))
 
-ifneq ($(filter vsoc_arm vsoc_arm64 vsoc_x86 vsoc_x86_64, $(TARGET_BOARD_PLATFORM)),)
+ifneq ($(filter vsoc_arm vsoc_arm64 vsoc_riscv64 vsoc_x86 vsoc_x86_64, $(TARGET_BOARD_PLATFORM)),)
 LOCAL_PATH:= $(call my-dir)
 
 include $(CLEAR_VARS)
diff --git a/AndroidProducts.mk b/AndroidProducts.mk
index d345316..a2a7e6c 100644
--- a/AndroidProducts.mk
+++ b/AndroidProducts.mk
@@ -15,21 +15,34 @@
 #
 
 PRODUCT_MAKEFILES := \
-	aosp_cf_arm_only_phone:$(LOCAL_DIR)/vsoc_arm_only/phone/aosp_cf.mk \
+	aosp_cf_arm_minidroid:$(LOCAL_DIR)/vsoc_arm_minidroid/aosp_cf.mk \
 	aosp_cf_arm64_auto:$(LOCAL_DIR)/vsoc_arm64_only/auto/aosp_cf.mk \
 	aosp_cf_arm64_phone:$(LOCAL_DIR)/vsoc_arm64/phone/aosp_cf.mk \
+	aosp_cf_arm64_phone_fullmte:$(LOCAL_DIR)/vsoc_arm64_only/phone/aosp_cf_fullmte.mk \
 	aosp_cf_arm64_phone_hwasan:$(LOCAL_DIR)/vsoc_arm64/phone/aosp_cf_hwasan.mk \
 	aosp_cf_arm64_only_phone:$(LOCAL_DIR)/vsoc_arm64_only/phone/aosp_cf.mk \
 	aosp_cf_arm64_only_phone_hwasan:$(LOCAL_DIR)/vsoc_arm64_only/phone/aosp_cf_hwasan.mk \
+	aosp_cf_arm64_minidroid:$(LOCAL_DIR)/vsoc_arm64_minidroid/aosp_cf.mk \
 	aosp_cf_arm64_slim:$(LOCAL_DIR)/vsoc_arm64_only/slim/aosp_cf.mk \
-	aosp_cf_x86_64_auto:$(LOCAL_DIR)/vsoc_x86_64/auto/aosp_cf.mk \
-	aosp_cf_x86_64_pc:$(LOCAL_DIR)/vsoc_x86_64/pc/aosp_cf.mk \
+	aosp_cf_riscv64_minidroid:$(LOCAL_DIR)/vsoc_riscv64_minidroid/aosp_cf.mk \
+	aosp_cf_riscv64_slim:$(LOCAL_DIR)/vsoc_riscv64/slim/aosp_cf.mk \
+	aosp_cf_riscv64_wear:$(LOCAL_DIR)/vsoc_riscv64/wear/aosp_cf.mk \
+	aosp_cf_riscv64_phone:$(LOCAL_DIR)/vsoc_riscv64/phone/aosp_cf.mk \
+	aosp_cf_x86_64_auto:$(LOCAL_DIR)/vsoc_x86_64_only/auto/aosp_cf.mk \
+	aosp_cf_x86_64_auto_md:$(LOCAL_DIR)/vsoc_x86_64_only/auto_md/aosp_cf.mk \
+	aosp_cf_x86_64_auto_mdnd:$(LOCAL_DIR)/vsoc_x86_64_only/auto_mdnd/aosp_cf.mk \
+	aosp_cf_x86_64_auto_portrait:$(LOCAL_DIR)/vsoc_x86_64_only/auto_portrait/aosp_cf.mk \
+	aosp_cf_x86_64_pc:$(LOCAL_DIR)/vsoc_x86_64_only/pc/aosp_cf.mk \
 	aosp_cf_x86_64_phone:$(LOCAL_DIR)/vsoc_x86_64/phone/aosp_cf.mk \
+	aosp_cf_x86_64_phone_vendor:$(LOCAL_DIR)/vsoc_x86_64/phone/aosp_cf_vendor.mk \
+	aosp_cf_x86_64_ssi:$(LOCAL_DIR)/vsoc_x86_64/phone/aosp_cf_ssi.mk \
 	aosp_cf_x86_64_tv:$(LOCAL_DIR)/vsoc_x86_64/tv/aosp_cf.mk \
 	aosp_cf_x86_64_foldable:$(LOCAL_DIR)/vsoc_x86_64/phone/aosp_cf_foldable.mk \
+	aosp_cf_x86_64_minidroid:$(LOCAL_DIR)/vsoc_x86_64_minidroid/aosp_cf.mk \
 	aosp_cf_x86_64_only_phone:$(LOCAL_DIR)/vsoc_x86_64_only/phone/aosp_cf.mk \
+	aosp_cf_x86_64_only_phone_hsum:$(LOCAL_DIR)/vsoc_x86_64_only/phone/aosp_cf_hsum.mk \
 	aosp_cf_x86_64_slim:$(LOCAL_DIR)/vsoc_x86_64_only/slim/aosp_cf.mk \
-	aosp_cf_x86_auto:$(LOCAL_DIR)/vsoc_x86/auto/aosp_cf.mk \
+	aosp_cf_x86_64_wear:$(LOCAL_DIR)/vsoc_x86_64_only/wear/aosp_cf.mk \
 	aosp_cf_x86_pasan:$(LOCAL_DIR)/vsoc_x86/pasan/aosp_cf.mk \
 	aosp_cf_x86_phone:$(LOCAL_DIR)/vsoc_x86/phone/aosp_cf.mk \
 	aosp_cf_x86_only_phone:$(LOCAL_DIR)/vsoc_x86_only/phone/aosp_cf.mk \
@@ -40,10 +53,12 @@
 COMMON_LUNCH_CHOICES := \
 	aosp_cf_arm64_auto-userdebug \
 	aosp_cf_arm64_phone-userdebug \
+	aosp_cf_x86_64_only_phone_hsum-userdebug \
 	aosp_cf_x86_64_pc-userdebug \
 	aosp_cf_x86_64_phone-userdebug \
 	aosp_cf_x86_64_foldable-userdebug \
-	aosp_cf_x86_auto-userdebug \
+	aosp_cf_x86_64_auto-userdebug \
+	aosp_cf_x86_64_auto_mdnd-userdebug \
 	aosp_cf_x86_phone-userdebug \
 	aosp_cf_x86_tv-userdebug \
 	aosp_cf_x86_64_tv-userdebug
diff --git a/OWNERS b/OWNERS
index a670158..28c10b0 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,12 +1,17 @@
-# Current team members
 adelva@google.com
+chadreynolds@google.com
+ellisr@google.com
+jaeman@google.com
+jaewan@google.com
 jemoreira@google.com
+jeongik@google.com
 kwstephenkim@google.com
 malchev@google.com
+moelsherif@google.com
 muntsinger@google.com
 natsu@google.com
 rammuthiah@google.com
 schuffelen@google.com
-
-# Former team members
-ghartman@google.com
+seungjaeyoo@google.com
+sorama@google.com
+weihsu@google.com
diff --git a/README.md b/README.md
index 61c9987..23abbbf 100644
--- a/README.md
+++ b/README.md
@@ -19,18 +19,21 @@
    - When running on an ARM machine, the most direct way is to check
    for the existence of `/dev/kvm`. Note that this method can also be used to
    confirm support of KVM on any environment.
-   - Before proceeding to the next step, please first follow
-   [the guide](multiarch-howto.md) to adjust APT sources.
 ***
 
-2. Download, build, and install the host debian package:
+2. Download, build, and install the host debian packages:
 
    ```bash
-   sudo apt install -y git devscripts config-package-dev debhelper-compat golang
+   sudo apt install -y git devscripts config-package-dev debhelper-compat golang curl
    git clone https://github.com/google/android-cuttlefish
    cd android-cuttlefish
-   debuild -i -us -uc -b -d
-   sudo dpkg -i ../cuttlefish-common_*_*64.deb || sudo apt-get install -f
+   for dir in base frontend; do
+     cd $dir
+     debuild -i -us -uc -b -d
+     cd ..
+   done
+   sudo dpkg -i ./cuttlefish-base_*_*64.deb || sudo apt-get install -f
+   sudo dpkg -i ./cuttlefish-user_*_*64.deb || sudo apt-get install -f
    sudo usermod -aG kvm,cvdnetwork,render $USER
    sudo reboot
    ```
@@ -84,5 +87,5 @@
 You will need to stop the virtual device within the same directory as you used
 to launch the device.
 
-    `$ HOME=$PWD ./bin/stop_cvd`
+   `$ HOME=$PWD ./bin/stop_cvd`
 
diff --git a/TEST_MAPPING b/TEST_MAPPING
index 92e1712..f711b63 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -29,5 +29,10 @@
     {
       "name": "CtsScopedStorageDeviceOnlyTest"
     }
+  ],
+  "auto-presubmit": [
+    {
+      "name": "hal_implementation_test"
+    }
   ]
 }
diff --git a/apex/com.google.cf.bt/Android.bp b/apex/com.google.cf.bt/Android.bp
index 438db02..d8b2337 100644
--- a/apex/com.google.cf.bt/Android.bp
+++ b/apex/com.google.cf.bt/Android.bp
@@ -32,7 +32,7 @@
     updatable: false,
     soc_specific: true,
     binaries: [
-        "android.hardware.bluetooth@1.1-service.btlinux",
+        "android.hardware.bluetooth-service.default",
         "bt_vhci_forwarder",
     ],
     prebuilts: [
@@ -40,6 +40,5 @@
         "android.hardware.bluetooth.prebuilt.xml",
         "com.google.cf.bt.rc",
     ],
-    init_rc: ["com.google.cf.bt.trig.rc"],
-    vintf_fragments: [":manifest_android.hardware.bluetooth@1.1-service.xml"],
+    vintf_fragments: [":manifest_android.hardware.bluetooth-service.default.xml"],
 }
diff --git a/apex/com.google.cf.bt/com.google.cf.bt.rc b/apex/com.google.cf.bt/com.google.cf.bt.rc
index a5f2ae7..8dca890 100644
--- a/apex/com.google.cf.bt/com.google.cf.bt.rc
+++ b/apex/com.google.cf.bt/com.google.cf.bt.rc
@@ -1,8 +1,12 @@
+# start bt_vhci_forwarder when apex is ready
+on property:apex.all.ready=true
+    start bt_vhci_forwarder
+
 service bt_vhci_forwarder /apex/com.google.cf.bt/bin/bt_vhci_forwarder -virtio_console_dev=${vendor.ser.bt-uart}
     user bluetooth
     group bluetooth
 
-service btlinux-1.1 /apex/com.google.cf.bt/bin/hw/android.hardware.bluetooth@1.1-service.btlinux
+service btlinux /apex/com.google.cf.bt/bin/hw/android.hardware.bluetooth-service.default
     class hal
     user bluetooth
     group bluetooth net_admin net_bt_admin
diff --git a/apex/com.google.cf.bt/com.google.cf.bt.trig.rc b/apex/com.google.cf.bt/com.google.cf.bt.trig.rc
deleted file mode 100644
index a082c18..0000000
--- a/apex/com.google.cf.bt/com.google.cf.bt.trig.rc
+++ /dev/null
@@ -1,6 +0,0 @@
-## Init files within the APEX do not support triggers (b/202731768)
-## By adding this as an init_rc parameter of the APEX the file will be installed
-## outside of the APEX and instead be installed under /vendor/etc/init.
-on post-fs-data
-    start bt_vhci_forwarder
-
diff --git a/apex/com.google.cf.bt/file_contexts b/apex/com.google.cf.bt/file_contexts
index b148753..101c5b3 100644
--- a/apex/com.google.cf.bt/file_contexts
+++ b/apex/com.google.cf.bt/file_contexts
@@ -1,4 +1,4 @@
 (/.*)?                                                 u:object_r:vendor_file:s0
-/bin/hw/android.hardware.bluetooth@1.1-service.btlinux u:object_r:hal_bluetooth_btlinux_exec:s0
+/bin/hw/android.hardware.bluetooth-service.default     u:object_r:hal_bluetooth_btlinux_exec:s0
 /bin/bt_vhci_forwarder                                 u:object_r:bt_vhci_forwarder_exec:s0
-/etc/permissions(/.*)?                                 u:object_r:vendor_configs_file:s0
\ No newline at end of file
+/etc/permissions(/.*)?                                 u:object_r:vendor_configs_file:s0
diff --git a/apex/com.google.cf.input.config/Android.bp b/apex/com.google.cf.input.config/Android.bp
index 00edee9..13e7415 100644
--- a/apex/com.google.cf.input.config/Android.bp
+++ b/apex/com.google.cf.input.config/Android.bp
@@ -18,8 +18,6 @@
 
 apex {
     name: "com.google.cf.input.config",
-    // InputDevice expects input config files in /apex/com.android.input.config/etc
-    apex_name: "com.android.input.config",
     manifest: "apex_manifest.json",
     key: "com.google.cf.apex.key",
     certificate: ":com.google.cf.apex.certificate",
diff --git a/apex/com.google.cf.rild/Android.bp b/apex/com.google.cf.rild/Android.bp
index 9e237b5..aee3fac 100644
--- a/apex/com.google.cf.rild/Android.bp
+++ b/apex/com.google.cf.rild/Android.bp
@@ -47,6 +47,7 @@
     prebuilts: [
         "android.hardware.telephony.gsm.prebuilt.xml",
         "android.hardware.telephony.ims.prebuilt.xml",
+        "android.hardware.telephony.satellite.prebuilt.xml",
         "com.google.cf.rild.rc",
         "ld.config.txt",
     ],
diff --git a/apex/com.google.cf.wifi/Android.bp b/apex/com.google.cf.wifi/Android.bp
index 3a57dd6..6d63886 100644
--- a/apex/com.google.cf.wifi/Android.bp
+++ b/apex/com.google.cf.wifi/Android.bp
@@ -30,10 +30,8 @@
     installable: false,
 }
 
-apex_defaults {
-    name: "com.google.cf.wifi.defaults",
-    // Name expected by wpa_supplicant when it looks for config files.
-    apex_name: "com.android.wifi.hal",
+apex {
+    name: "com.google.cf.wifi",
     manifest: "apex_manifest.json",
     key: "com.google.cf.apex.key",
     certificate: ":com.google.cf.apex.certificate",
@@ -44,30 +42,23 @@
     soc_specific: true,
     binaries: [
         "rename_netiface",
-        "setup_wifi",
         "wpa_supplicant_cf",
+        "setup_wifi",
+        "//device/generic/goldfish:mac80211_create_radios",
+        "hostapd_cf",
     ],
+    sh_binaries: ["init.wifi_apex"],
     prebuilts: [
+        "android.hardware.wifi.direct.prebuilt.xml",
+        "android.hardware.wifi.passpoint.prebuilt.xml",
         "android.hardware.wifi.prebuilt.xml",
         "com.google.cf.wifi.rc",
         "wpa_supplicant.conf.cf",
         "wpa_supplicant_overlay.conf.cf",
+        "p2p_supplicant.conf.cf",
     ],
-    // TODO(b/202992812): Use the vintf_fragment from the wpa_supplicant project.
-    vintf_fragments: ["com.google.cf.wifi.xml"],
-}
-
-apex {
-    name: "com.google.cf.wifi",
-    defaults: ["com.google.cf.wifi.defaults"],
-    prebuilts: [
-        "android.hardware.wifi.passpoint.prebuilt.xml",
+    vintf_fragments: [
+        ":android.hardware.wifi.supplicant.xml",
+        ":android.hardware.wifi.hostapd.xml",
     ],
-    multi_install_skip_symbol_files: true,
-}
-
-apex {
-    name: "com.google.cf.wifi.no-passpoint",
-    defaults: ["com.google.cf.wifi.defaults"],
-    multi_install_skip_symbol_files: true,
-}
+}
\ No newline at end of file
diff --git a/apex/com.google.cf.wifi/com.google.cf.wifi.rc b/apex/com.google.cf.wifi/com.google.cf.wifi.rc
index 8551506..1465d13 100644
--- a/apex/com.google.cf.wifi/com.google.cf.wifi.rc
+++ b/apex/com.google.cf.wifi/com.google.cf.wifi.rc
@@ -1,12 +1,36 @@
 service rename_eth0 /apex/com.android.wifi.hal/bin/rename_netiface eth0 buried_eth0
+    user root
     oneshot
 
+# For legacy wifi without openwrt
 service setup_wifi /apex/com.android.wifi.hal/bin/setup_wifi
+    user root
     oneshot
 
-service wpa_supplicant /apex/com.android.wifi.hal/bin/hw/wpa_supplicant_cf -g@android:wpa_wlan0
+# For wifi with openwrt
+service init_wifi_sh /apex/com.android.wifi.hal/bin/init.wifi
+    class late_start
+    user root
+    group root wakelock wifi
+    oneshot
+    disabled    # Started on post-fs-data
+
+service wpa_supplicant /apex/com.android.wifi.hal/bin/hw/wpa_supplicant_cf \
+        -O/data/vendor/wifi/wpa/sockets -puse_p2p_group_interface=1p2p_device=1 \
+        -m/apex/com.android.wifi.hal/etc/wifi/p2p_supplicant.conf \
+        -g@android:wpa_wlan0 -dd
     interface aidl android.hardware.wifi.supplicant.ISupplicant/default
     socket wpa_wlan0 dgram 660 wifi wifi
     group system wifi inet
+    user root
+    disabled
+    oneshot
+
+service hostapd /apex/com.android.wifi.hal/bin/hw/hostapd_cf
+    interface aidl android.hardware.wifi.hostapd.IHostapd/default
+    class main
+    capabilities NET_ADMIN NET_RAW
+    user wifi
+    group wifi net_raw net_admin
     disabled
     oneshot
diff --git a/apex/com.google.cf.wifi/com.google.cf.wifi.xml b/apex/com.google.cf.wifi/com.google.cf.wifi.xml
deleted file mode 100644
index 772096c..0000000
--- a/apex/com.google.cf.wifi/com.google.cf.wifi.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.wifi.supplicant</name>
-        <transport>hwbinder</transport>
-        <version>1.4</version>
-        <interface>
-            <name>ISupplicant</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/apex/com.google.cf.wifi/file_contexts b/apex/com.google.cf.wifi/file_contexts
index b11e272..8c9bf89 100644
--- a/apex/com.google.cf.wifi/file_contexts
+++ b/apex/com.google.cf.wifi/file_contexts
@@ -1,5 +1,8 @@
 (/.*)?                       u:object_r:vendor_file:s0
 /bin/rename_netiface         u:object_r:rename_netiface_exec:s0
 /bin/setup_wifi              u:object_r:setup_wifi_exec:s0
+/bin/init\.wifi            u:object_r:init_wifi_sh_exec:s0
 /bin/hw/wpa_supplicant_cf    u:object_r:hal_wifi_supplicant_default_exec:s0
+/bin/hw/hostapd_cf           u:object_r:hal_wifi_hostapd_default_exec:s0
+/bin/mac80211_create_radios  u:object_r:mac80211_create_radios_exec:s0
 /etc/permissions(/.*)?       u:object_r:vendor_configs_file:s0
diff --git a/apex/com.google.cf.wifi_hwsim/Android.bp b/apex/com.google.cf.wifi_hwsim/Android.bp
deleted file mode 100644
index 6b8b9ce..0000000
--- a/apex/com.google.cf.wifi_hwsim/Android.bp
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright (C) 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-soong_namespace {
-    imports: [
-        "device/generic/goldfish",
-    ],
-}
-
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-prebuilt_etc {
-    name: "com.google.cf.wifi_hwsim.rc",
-    src: "com.google.cf.wifi_hwsim.rc",
-    installable: false,
-}
-
-cc_binary {
-    name: "android.hardware.wifi@1.0-service_cf",
-    defaults: ["android.hardware.wifi@1.0-service_default"],
-    shared_libs: ["libwifi-hal_cf"],
-    static_libs: ["android.hardware.wifi@1.0-service-lib_cf"],
-}
-
-cc_library_static {
-    name: "android.hardware.wifi@1.0-service-lib_cf",
-    defaults: ["android.hardware.wifi@1.0-service-lib_defaults"],
-    shared_libs: ["libwifi-hal_cf"],
-}
-
-cc_library_shared {
-    name: "libwifi-hal_cf",
-    defaults: ["libwifi-hal_defaults"],
-    whole_static_libs: ["libwifi-hal-emu"],
-}
-
-apex {
-    name: "com.google.cf.wifi_hwsim",
-    // Name expected by wpa_supplicant when it looks for config files.
-    apex_name: "com.android.wifi.hal",
-    manifest: "apex_manifest.json",
-    key: "com.google.cf.apex.key",
-    certificate: ":com.google.cf.apex.certificate",
-    file_contexts: "file_contexts",
-    use_vndk_as_stable: true,
-    updatable: false,
-    // Install the apex in /vendor/apex
-    soc_specific: true,
-    binaries: [
-        "mac80211_create_radios",
-        "rename_netiface",
-        "wpa_supplicant_cf",
-        "hostapd_cf",
-        "android.hardware.wifi@1.0-service_cf",
-    ],
-    sh_binaries: ["init.wifi.sh_apex"],
-    prebuilts: [
-        "android.hardware.wifi.direct.prebuilt.xml",
-        "android.hardware.wifi.passpoint.prebuilt.xml",
-        "android.hardware.wifi.prebuilt.xml",
-        "com.google.cf.wifi_hwsim.rc",
-        "p2p_supplicant.conf.cf",
-        "wpa_supplicant.conf.cf",
-        "wpa_supplicant_overlay.conf.cf",
-    ],
-    // TODO(b/202992812): Use the vintf_fragment from the wpa_supplicant project.
-    vintf_fragments: ["com.google.cf.wifi_hwsim.xml"],
-}
diff --git a/apex/com.google.cf.wifi_hwsim/apex_manifest.json b/apex/com.google.cf.wifi_hwsim/apex_manifest.json
deleted file mode 100644
index ffd1a2c..0000000
--- a/apex/com.google.cf.wifi_hwsim/apex_manifest.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "name": "com.android.wifi.hal",
-  "version": 1
-}
diff --git a/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.rc b/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.rc
deleted file mode 100644
index 3dd0440..0000000
--- a/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.rc
+++ /dev/null
@@ -1,41 +0,0 @@
-
-service rename_eth0 /apex/com.android.wifi.hal/bin/rename_netiface eth0 buried_eth0
-    oneshot
-
-service init_wifi_sh /apex/com.android.wifi.hal/bin/init.wifi.sh
-    class late_start
-    user root
-    group root wakelock wifi
-    oneshot
-    disabled    # Started on post-fs-data
-
-service wpa_supplicant /apex/com.android.wifi.hal/bin/hw/wpa_supplicant_cf \
-        -O/data/vendor/wifi/wpa/sockets -puse_p2p_group_interface=1p2p_device=1 \
-        -m/apex/com.android.wifi.hal/etc/wifi/p2p_supplicant.conf \
-        -g@android:wpa_wlan0 -dd
-    interface aidl android.hardware.wifi.supplicant.ISupplicant/default
-    socket wpa_wlan0 dgram 660 wifi wifi
-    group system wifi inet
-    disabled
-    oneshot
-
-service hostapd /apex/com.android.wifi.hal/bin/hw/hostapd_cf
-    interface aidl android.hardware.wifi.hostapd.IHostapd/default
-    class main
-    capabilities NET_ADMIN NET_RAW
-    user wifi
-    group wifi net_raw net_admin
-    disabled
-    oneshot
-
-service vendor.wifi_hal_legacy /apex/com.android.wifi.hal/bin/hw/android.hardware.wifi@1.0-service_cf
-    interface android.hardware.wifi@1.0::IWifi default
-    interface android.hardware.wifi@1.1::IWifi default
-    interface android.hardware.wifi@1.2::IWifi default
-    interface android.hardware.wifi@1.3::IWifi default
-    interface android.hardware.wifi@1.4::IWifi default
-    interface android.hardware.wifi@1.5::IWifi default
-    class hal
-    capabilities NET_ADMIN NET_RAW SYS_MODULE
-    user wifi
-    group wifi gps
diff --git a/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.xml b/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.xml
deleted file mode 100644
index 05eb2c0..0000000
--- a/apex/com.google.cf.wifi_hwsim/com.google.cf.wifi_hwsim.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<manifest version="1.0" type="device">
-    <hal format="aidl">
-        <name>android.hardware.wifi.supplicant</name>
-        <fqname>ISupplicant/default</fqname>
-    </hal>
-    <hal format="aidl">
-        <name>android.hardware.wifi.hostapd</name>
-        <fqname>IHostapd/default</fqname>
-    </hal>
-    <hal format="hidl">
-        <name>android.hardware.wifi</name>
-        <transport>hwbinder</transport>
-        <version>1.6</version>
-        <interface>
-            <name>IWifi</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/apex/com.google.cf.wifi_hwsim/file_contexts b/apex/com.google.cf.wifi_hwsim/file_contexts
deleted file mode 100644
index 083cfed..0000000
--- a/apex/com.google.cf.wifi_hwsim/file_contexts
+++ /dev/null
@@ -1,8 +0,0 @@
-(/.*)?                       u:object_r:vendor_file:s0
-/bin/rename_netiface         u:object_r:rename_netiface_exec:s0
-/bin/init\.wifi\.sh            u:object_r:init_wifi_sh_exec:s0
-/bin/hw/wpa_supplicant_cf    u:object_r:hal_wifi_supplicant_default_exec:s0
-/bin/hw/hostapd_cf           u:object_r:hal_wifi_hostapd_default_exec:s0
-/bin/mac80211_create_radios  u:object_r:mac80211_create_radios_exec:s0
-/etc/permissions(/.*)?       u:object_r:vendor_configs_file:s0
-/bin/hw/android\.hardware\.wifi@1\.0-service_cf      u:object_r:hal_wifi_default_exec:s0
diff --git a/build/Android.bp b/build/Android.bp
index dbae9bf..800fb37 100644
--- a/build/Android.bp
+++ b/build/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -31,19 +46,21 @@
 }
 
 cvd_host_tools = [
-    "android.hardware.automotive.vehicle@2.0-virtualization-grpc-server",
     "adb",
     "adb_connector",
     "allocd",
     "allocd_client",
     "assemble_cvd",
     "avbtool",
-    "bt_connector",
+    "tcp_connector",
     "common_crosvm",
     "config_server",
     "console_forwarder",
     "crosvm",
     "cvd",
+    "cvd_internal_display",
+    "cvd_internal_env",
+    "echo_server",
     "cvd_internal_host_bugreport",
     "cvd_internal_start",
     "cvd_internal_status",
@@ -51,8 +68,12 @@
     "cvd_host_bugreport",
     "cvd_status",
     "cvd_test_gce_driver",
+    "cvdremote",
+    "e2fsdroid",
     "extract-ikconfig",
     "extract-vmlinux",
+    "fastboot",
+    "fec",
     "fsck.f2fs",
     "gnss_grpc_proxy",
     "health",
@@ -62,6 +83,7 @@
     "libgrpc++_unsecure",
     "log_tee",
     "logcat_receiver",
+    "lpadd",
     "lpmake",
     "lpunpack",
     "lz4",
@@ -70,20 +92,32 @@
     "mkbootfs",
     "mkbootimg",
     "mkenvimage_slim",
+    "mke2fs",
+    "mkuserimg_mke2fs",
     "modem_simulator",
     "ms-tpm-20-ref",
     "mcopy",
     "mmd",
     "mtools",
+    "netsim",
+    "netsimd",
     "newfs_msdos",
+    "openwrt_control_server",
+    "pica",
     "powerwash_cvd",
+    "process_restarter",
     "restart_cvd",
     "root-canal",
     "run_cvd",
     "secure_env",
+    "sefcontext_compile",
     "cvd_send_sms",
+    "cvd_update_location",
+    "cvd_import_locations",
+    "simg2img",
     "socket_vsock_proxy",
     "stop_cvd",
+    "test_cvd_load_parser",
     "tombstone_receiver",
     "toybox",
     "unpack_bootimg",
@@ -96,8 +130,10 @@
 ]
 
 cvd_openwrt_images = [
-    "kernel_for_openwrt",
-    "openwrt_rootfs",
+    "openwrt_kernel_x86_64",
+    "openwrt_rootfs_x86_64",
+    "openwrt_kernel_aarch64",
+    "openwrt_rootfs_aarch64",
 ]
 
 cvd_bluetooth_config_files = [
@@ -120,6 +156,7 @@
     "webrtc_index.html",
     "webrtc_client.html",
     "webrtc_rootcanal.js",
+    "webrtc_location.js",
     "webrtc_server.crt",
     "webrtc_server.key",
     "webrtc_server.p12",
@@ -135,13 +172,19 @@
     "numeric_operator.xml_host",
 ]
 
+cvd_host_acloud_data = [
+    "default.config_host",
+]
+
 cvd_host_seccomp_policy_x86_64 = [
     "9p_device.policy_x86_64",
     "balloon_device.policy_x86_64",
     "battery.policy_x86_64",
     "block_device.policy_x86_64",
+    "block_device_vhost_user.policy_x86_64",
+    "block_device_vvu.policy_x86_64",
+    "coiommu_device.policy_x86_64",
     "cras_audio_device.policy_x86_64",
-    "cras_snd_device.policy_x86_64",
     "fs_device.policy_x86_64",
     "gpu_device.policy_x86_64",
     "gpu_render_server.policy_x86_64",
@@ -151,15 +194,21 @@
     "null_audio_device.policy_x86_64",
     "pmem_device.policy_x86_64",
     "rng_device.policy_x86_64",
-    "serial.policy_x86_64",
+    "serial_device.policy_x86_64",
+    "serial_device_vhost_user.policy_x86_64",
+    "serial_device_vvu.policy_x86_64",
+    "snd_cras_device.policy_x86_64",
+    "snd_null_device.policy_x86_64",
     "tpm_device.policy_x86_64",
     "vfio_device.policy_x86_64",
     "vhost_net_device.policy_x86_64",
     "vhost_vsock_device.policy_x86_64",
     "video_device.policy_x86_64",
     "vios_audio_device.policy_x86_64",
+    "vtpm_proxy_device.policy_x86_64",
+    "vvu_proxy_device.policy_x86_64",
     "wl_device.policy_x86_64",
-    "xhci.policy_x86_64",
+    "xhci_device.policy_x86_64",
 ]
 
 cvd_host_seccomp_policy_aarch64 = [
@@ -167,8 +216,8 @@
     "balloon_device.policy_aarch64",
     "battery.policy_aarch64",
     "block_device.policy_aarch64",
+    "coiommu_device.policy_aarch64",
     "cras_audio_device.policy_aarch64",
-    "cras_snd_device.policy_aarch64",
     "fs_device.policy_aarch64",
     "gpu_device.policy_aarch64",
     "gpu_render_server.policy_aarch64",
@@ -177,19 +226,25 @@
     "null_audio_device.policy_aarch64",
     "pmem_device.policy_aarch64",
     "rng_device.policy_aarch64",
-    "serial.policy_aarch64",
+    "serial_device.policy_aarch64",
+    "snd_cras_device.policy_aarch64",
+    "snd_null_device.policy_aarch64",
     "tpm_device.policy_aarch64",
     "vhost_net_device.policy_aarch64",
     "vhost_vsock_device.policy_aarch64",
+    "video_device.policy_aarch64",
     "vios_audio_device.policy_aarch64",
     "wl_device.policy_aarch64",
-    "xhci.policy_aarch64",
+    "xhci_device.policy_aarch64",
 ]
 
-cvd_host_qemu_bootloader = [
-    "bootloader_qemu_x86_64",
+cvd_host_bootloader = [
+    "bootloader_crosvm_x86_64",
+    "bootloader_crosvm_aarch64",
     "bootloader_qemu_aarch64",
     "bootloader_qemu_arm",
+    "bootloader_qemu_riscv64",
+    "bootloader_qemu_x86_64",
 ]
 
 prebuilt_etc_host {
@@ -203,6 +258,30 @@
     "cvd_avb_testkey",
 ]
 
+cvd_host_netsim_gui_assets = [
+    "netsim_ui_index.html",
+    "netsim_ui_js_cube-sprite.js",
+    "netsim_ui_js_customize-map-button.js",
+    "netsim_ui_js_device-dragzone.js",
+    "netsim_ui_js_device-dropzone.js",
+    "netsim_ui_js_device-info.js",
+    "netsim_ui_js_device-list.js",
+    "netsim_ui_js_device-map.js",
+    "netsim_ui_js_device-observer.js",
+    "netsim_ui_js_license-info.js",
+    "netsim_ui_js_navigation-bar.js",
+    "netsim_ui_js_netsim-app.js",
+    "netsim_ui_js_packet-info.js",
+    "netsim_ui_js_pyramid-sprite.js",
+    "netsim_ui_js_model.js",
+    "netsim_ui_tslib",
+    "netsim_ui_assets_grid-background.svg",
+    "netsim_ui_assets_hexagonal-background.png",
+    "netsim_ui_assets_netsim-logo-b.svg",
+    "netsim_ui_assets_netsim-logo.svg",
+    "netsim_ui_assets_polar-background.svg",
+]
+
 cvd_host_package_customization {
     name: "cvd-host_package",
     deps: cvd_host_tools +
@@ -212,9 +291,11 @@
             deps: cvd_host_webrtc_assets +
                 cvd_host_avb_testkey +
                 cvd_host_model_simulator_files +
-                cvd_host_qemu_bootloader +
+                cvd_host_acloud_data +
+                cvd_host_bootloader +
                 cvd_bluetooth_config_files +
-                cvd_openwrt_images,
+                cvd_openwrt_images +
+                cvd_host_netsim_gui_assets,
         },
     },
 
diff --git a/build/cvd-host-package.go b/build/cvd-host-package.go
index 5ff885e..57e4a8c 100644
--- a/build/cvd-host-package.go
+++ b/build/cvd-host-package.go
@@ -86,27 +86,26 @@
 var pctx = android.NewPackageContext("android/soong/cuttlefish")
 
 func (c *cvdHostPackage) GenerateAndroidBuildActions(ctx android.ModuleContext) {
-	zipFile := android.PathForModuleOut(ctx, "package.zip")
-	c.CopyDepsToZip(ctx, c.GatherPackagingSpecs(ctx), zipFile)
+	packageDir := android.PathForModuleInstall(ctx, c.BaseModuleName())
 
-	// Dir where to extract the zip file and construct the final tar.gz from
-	packageDir := android.PathForModuleOut(ctx, ".temp")
-	builder := android.NewRuleBuilder(pctx, ctx)
-	builder.Command().
-		BuiltTool("zipsync").
-		FlagWithArg("-d ", packageDir.String()).
-		Input(zipFile)
+	stamp := android.PathForModuleOut(ctx, "package.stamp")
+	dirBuilder := android.NewRuleBuilder(pctx, ctx)
+	dirBuilder.Command().Text("rm").Flag("-rf").Text(packageDir.String())
+	dirBuilder.Command().Text("mkdir").Flag("-p").Text(packageDir.String())
+	c.CopySpecsToDir(ctx, dirBuilder, c.GatherPackagingSpecs(ctx), packageDir)
+	dirBuilder.Command().Text("touch").Output(stamp)
+	dirBuilder.Build("cvd_host_package", fmt.Sprintf("Packaging %s", c.BaseModuleName()))
+	ctx.InstallFile(android.PathForModuleInstall(ctx), c.BaseModuleName()+".stamp", stamp)
 
-	output := android.PathForModuleOut(ctx, "package.tar.gz")
-	builder.Command().Text("tar Scfz").
-		Output(output).
-		FlagWithArg("-C ", packageDir.String()).
+	tarball := android.PathForModuleOut(ctx, "package.tar.gz")
+	tarballBuilder := android.NewRuleBuilder(pctx, ctx)
+	tarballBuilder.Command().Text("tar Scfz").
+		Output(tarball).
+		Flag("-C").
+		Text(packageDir.String()).
+		Implicit(stamp).
 		Flag("--mtime='2020-01-01'"). // to have reproducible builds
 		Text(".")
-
-	builder.Command().Text("rm").Flag("-rf").Text(packageDir.String())
-
-	builder.Build("cvd_host_package", fmt.Sprintf("Packaging %s", c.BaseModuleName()))
-
-	ctx.InstallFile(android.PathForModuleInstall(ctx), c.BaseModuleName()+".tar.gz", output)
+	tarballBuilder.Build("cvd_host_tarball", fmt.Sprintf("Creating tarball for %s", c.BaseModuleName()))
+	ctx.InstallFile(android.PathForModuleInstall(ctx), c.BaseModuleName()+".tar.gz", tarball)
 }
diff --git a/common/frontend/socket_vsock_proxy/Android.bp b/common/frontend/socket_vsock_proxy/Android.bp
index 427d9c2..02fb2f1 100644
--- a/common/frontend/socket_vsock_proxy/Android.bp
+++ b/common/frontend/socket_vsock_proxy/Android.bp
@@ -20,7 +20,9 @@
 cc_binary {
     name: "socket_vsock_proxy",
     srcs: [
-        "main.cpp",
+        "client.cpp",
+        "server.cpp",
+        "socket_vsock_proxy.cpp",
     ],
     shared_libs: [
         "libext2_blkid",
diff --git a/common/frontend/socket_vsock_proxy/client.cpp b/common/frontend/socket_vsock_proxy/client.cpp
new file mode 100644
index 0000000..90aee5a
--- /dev/null
+++ b/common/frontend/socket_vsock_proxy/client.cpp
@@ -0,0 +1,108 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/strings.h>
+#include <string>
+
+#include "common/frontend/socket_vsock_proxy/client.h"
+
+namespace cuttlefish {
+namespace socket_proxy {
+namespace {
+
+bool IsIpv6(const std::string& address) {
+  return address.find(':') != std::string::npos;
+}
+
+SharedFD StartIpv4(const std::string& host, int port) {
+  return SharedFD::SocketClient(host, port, SOCK_STREAM);
+}
+
+SharedFD StartIpv6(const std::string& host, int port) {
+  const auto host_parsed = android::base::Tokenize(host, "%");
+  const auto host_interface_tokens_count = host_parsed.size();
+
+  CHECK(host_interface_tokens_count == 1 || host_interface_tokens_count == 2)
+      << "Cannot parse passed host " << host << " to extract the network interface separated by %";
+
+  std::string host_name;
+  std::string interface_name;
+  if (host_parsed.size() == 2) {
+    host_name = host_parsed[0];
+    interface_name = host_parsed[1];
+  } else {
+    host_name = host;
+  }
+
+  return SharedFD::Socket6Client(host_name, interface_name, port, SOCK_STREAM);
+}
+
+}
+
+TcpClient::TcpClient(std::string host, int port) : host_(std::move(host)), port_(port) {}
+
+SharedFD TcpClient::Start() {
+  SharedFD client;
+
+  if (IsIpv6(host_)) {
+    client = StartIpv6(host_, port_);
+  } else {
+    client = StartIpv4(host_, port_);
+  }
+
+  if (client->IsOpen()) {
+    last_failure_reason_ = 0;
+    LOG(DEBUG) << "Connected to socket:" << host_ << ":" << port_;
+    return client;
+  } else {
+    // Don't log if the previous connection failed with the same error
+    if (last_failure_reason_ != client->GetErrno()) {
+      last_failure_reason_ = client->GetErrno();
+      LOG(ERROR) << "Unable to connect to tcp server: " << client->StrError();
+    }
+  }
+
+  return client;
+}
+
+std::string TcpClient::Describe() const {
+  return fmt::format("tcp: {}:{}", host_, port_);
+}
+
+VsockClient::VsockClient(int id, int port) : id_(id), port_(port) {}
+
+SharedFD VsockClient::Start() {
+  auto vsock_socket = SharedFD::VsockClient(id_, port_, SOCK_STREAM);
+
+  if (vsock_socket->IsOpen()) {
+    last_failure_reason_ = 0;
+    LOG(DEBUG) << "Connected to vsock:" << id_ << ":" << port_;
+  } else {
+    // Don't log if the previous connection failed with the same error
+    if (last_failure_reason_ != vsock_socket->GetErrno()) {
+      last_failure_reason_ = vsock_socket->GetErrno();
+      LOG(ERROR) << "Unable to connect to vsock server: "
+                 << vsock_socket->StrError();
+    }
+  }
+  return vsock_socket;
+}
+
+std::string VsockClient::Describe() const {
+  return fmt::format("vsock: {}:{}", id_, port_);
+}
+
+}
+}
\ No newline at end of file
diff --git a/common/frontend/socket_vsock_proxy/client.h b/common/frontend/socket_vsock_proxy/client.h
new file mode 100644
index 0000000..ca21005
--- /dev/null
+++ b/common/frontend/socket_vsock_proxy/client.h
@@ -0,0 +1,55 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include "common/libs/fs/shared_fd.h"
+
+namespace cuttlefish {
+namespace socket_proxy {
+
+class Client {
+ public:
+  virtual SharedFD Start() = 0;
+  virtual std::string Describe() const = 0;
+  virtual ~Client() = default;
+};
+
+class TcpClient : public Client {
+ public:
+  TcpClient(std::string host, int port);
+  SharedFD Start() override;
+  std::string Describe() const override;
+
+ private:
+  std::string host_;
+  int port_;
+  int last_failure_reason_ = 0;
+};
+
+class VsockClient : public Client {
+ public:
+  VsockClient(int id, int port);
+  SharedFD Start() override;
+  std::string Describe() const override;
+
+ private:
+  int id_;
+  int port_;
+  int last_failure_reason_ = 0;
+};
+
+}
+}
\ No newline at end of file
diff --git a/common/frontend/socket_vsock_proxy/main.cpp b/common/frontend/socket_vsock_proxy/main.cpp
deleted file mode 100644
index 5d08790..0000000
--- a/common/frontend/socket_vsock_proxy/main.cpp
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <set>
-#include <android-base/logging.h>
-#include <gflags/gflags.h>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/socket2socket_proxy.h"
-#include "host/commands/kernel_log_monitor/utils.h"
-
-#ifdef CUTTLEFISH_HOST
-#include "host/libs/config/logging.h"
-#endif // CUTTLEFISH_HOST
-
-DEFINE_string(server, "",
-              "The type of server to host, `vsock` or `tcp`. When hosting a server "
-              "of one type, the proxy will take inbound connections of this type and "
-              "make outbound connections of the other type.");
-DEFINE_uint32(tcp_port, 0, "TCP port");
-DEFINE_uint32(vsock_port, 0, "vsock port");
-DEFINE_uint32(vsock_cid, 0, "Vsock cid to initiate connections to");
-DEFINE_int32(adbd_events_fd, -1, "A file descriptor. If set it will wait for "
-                                 "AdbdStarted boot event from the kernel log "
-                                 "monitor before creating a tcp-vsock tunnel."
-                                 "This option is used by --server=tcp only "
-                                 "when socket_vsock_proxy runs as a host service");
-DEFINE_int32(
-    server_fd, -1,
-    "A file descriptor. If set the passed file descriptor will be used as the "
-    "server and the corresponding port flag will be ignored");
-
-namespace {
-void WaitForAdbdToBeStarted(int events_fd) {
-  auto evt_shared_fd = cuttlefish::SharedFD::Dup(events_fd);
-  close(events_fd);
-  while (evt_shared_fd->IsOpen()) {
-    std::optional<monitor::ReadEventResult> read_result =
-        monitor::ReadEvent(evt_shared_fd);
-    if (!read_result) {
-      LOG(ERROR) << "Failed to read a complete kernel log adb event.";
-      // The file descriptor can't be trusted anymore, stop waiting and try to
-      // connect
-      return;
-    }
-
-    if (read_result->event == monitor::Event::AdbdStarted) {
-      LOG(DEBUG) << "Adbd has started in the guest, connecting adb";
-      return;
-    }
-  }
-}
-
-// intented to run as cuttlefish host service
-void TcpServer() {
-  LOG(DEBUG) << "starting TCP server on " << FLAGS_tcp_port
-             << " for vsock port " << FLAGS_vsock_port;
-  cuttlefish::SharedFD server;
-  if (FLAGS_server_fd < 0) {
-    server =
-        cuttlefish::SharedFD::SocketLocalServer(FLAGS_tcp_port, SOCK_STREAM);
-  } else {
-    server = cuttlefish::SharedFD::Dup(FLAGS_server_fd);
-    close(FLAGS_server_fd);
-  }
-  CHECK(server->IsOpen()) << "Could not start server on " << FLAGS_tcp_port;
-  LOG(DEBUG) << "Accepting client connections";
-  int last_failure_reason = 0;
-  cuttlefish::Proxy(server, [&last_failure_reason]() {
-    auto vsock_socket = cuttlefish::SharedFD::VsockClient(
-        FLAGS_vsock_cid, FLAGS_vsock_port, SOCK_STREAM);
-    if (vsock_socket->IsOpen()) {
-      last_failure_reason = 0;
-      LOG(DEBUG) << "Connected to vsock:" << FLAGS_vsock_cid << ":"
-                 << FLAGS_vsock_port;
-    } else {
-      // Don't log if the previous connection failed with the same error
-      if (last_failure_reason != vsock_socket->GetErrno()) {
-        last_failure_reason = vsock_socket->GetErrno();
-        LOG(ERROR) << "Unable to connect to vsock server: "
-                   << vsock_socket->StrError();
-      }
-    }
-    return vsock_socket;
-  });
-}
-
-cuttlefish::SharedFD OpenSocketConnection() {
-  while (true) {
-    auto sock = cuttlefish::SharedFD::SocketLocalClient(FLAGS_tcp_port, SOCK_STREAM);
-    if (sock->IsOpen()) {
-      return sock;
-    }
-    LOG(WARNING) << "could not connect on port " << FLAGS_tcp_port
-                 << ". sleeping for 1 second";
-    sleep(1);
-  }
-}
-
-bool socketErrorIsRecoverable(int error) {
-  std::set<int> unrecoverable{EACCES, EAFNOSUPPORT, EINVAL, EPROTONOSUPPORT};
-  return unrecoverable.find(error) == unrecoverable.end();
-}
-
-[[noreturn]] static void SleepForever() {
-  while (true) {
-    sleep(std::numeric_limits<unsigned int>::max());
-  }
-}
-
-// intended to run inside Android guest
-void VsockServer() {
-  LOG(DEBUG) << "Starting vsock server on " << FLAGS_vsock_port;
-  cuttlefish::SharedFD vsock;
-  if (FLAGS_server_fd < 0) {
-    do {
-      vsock = cuttlefish::SharedFD::VsockServer(FLAGS_vsock_port, SOCK_STREAM);
-      if (!vsock->IsOpen() && !socketErrorIsRecoverable(vsock->GetErrno())) {
-        LOG(ERROR) << "Could not open vsock socket: " << vsock->StrError();
-        SleepForever();
-      }
-    } while (!vsock->IsOpen());
-  } else {
-    vsock = cuttlefish::SharedFD::Dup(FLAGS_server_fd);
-    close(FLAGS_server_fd);
-  }
-  CHECK(vsock->IsOpen()) << "Could not start server on " << FLAGS_vsock_port;
-  cuttlefish::Proxy(vsock, []() {
-    LOG(DEBUG) << "vsock socket accepted";
-    auto client = OpenSocketConnection();
-    CHECK(client->IsOpen()) << "error connecting to guest client";
-    return client;
-  });
-}
-
-}  // namespace
-
-int main(int argc, char* argv[]) {
-#ifdef CUTTLEFISH_HOST
-  cuttlefish::DefaultSubprocessLogging(argv);
-#else
-  ::android::base::InitLogging(argv, android::base::LogdLogger());
-#endif
-  google::ParseCommandLineFlags(&argc, &argv, true);
-
-  CHECK((FLAGS_server == "tcp" && FLAGS_server_fd >= 0) || FLAGS_tcp_port != 0)
-      << "Must specify -tcp_port or -server_fd (with -server=tcp) flag";
-  CHECK((FLAGS_server == "vsock" && FLAGS_server_fd >= 0) ||
-        FLAGS_vsock_port != 0)
-      << "Must specify -vsock_port or -server_fd (with -server=vsock) flag";
-
-  if (FLAGS_adbd_events_fd >= 0) {
-    LOG(DEBUG) << "Wating AdbdStarted boot event from the kernel log";
-    WaitForAdbdToBeStarted(FLAGS_adbd_events_fd);
-  }
-
-  if (FLAGS_server == "tcp") {
-    CHECK(FLAGS_vsock_cid != 0) << "Must specify -vsock_cid flag";
-    TcpServer();
-  } else if (FLAGS_server == "vsock") {
-    VsockServer();
-  } else {
-    LOG(FATAL) << "Unknown server type: " << FLAGS_server;
-  }
-}
diff --git a/common/frontend/socket_vsock_proxy/server.cpp b/common/frontend/socket_vsock_proxy/server.cpp
new file mode 100644
index 0000000..2264442
--- /dev/null
+++ b/common/frontend/socket_vsock_proxy/server.cpp
@@ -0,0 +1,91 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <set>
+
+#include "common/frontend/socket_vsock_proxy/server.h"
+#include "common/libs/utils/contains.h"
+
+namespace cuttlefish {
+namespace socket_proxy {
+namespace {
+
+bool socketErrorIsRecoverable(int error) {
+  std::set<int> unrecoverable{EACCES, EAFNOSUPPORT, EINVAL, EPROTONOSUPPORT};
+  return !Contains(unrecoverable, error);
+}
+
+[[noreturn]] static void SleepForever() {
+  while (true) {
+    sleep(std::numeric_limits<unsigned int>::max());
+  }
+}
+
+}
+
+TcpServer::TcpServer(int port) : port_(port) {}
+
+SharedFD TcpServer::Start() {
+  SharedFD server;
+
+  server = SharedFD::SocketLocalServer(port_, SOCK_STREAM);
+  CHECK(server->IsOpen()) << "Could not start server on " << port_;
+
+  return server;
+}
+
+std::string TcpServer::Describe() const {
+  return fmt::format("tcp: {}", port_);
+}
+
+VsockServer::VsockServer(int port) : port_(port) {}
+
+// Intended to run in the guest
+SharedFD VsockServer::Start() {
+  SharedFD server;
+
+  do {
+    server = SharedFD::VsockServer(port_, SOCK_STREAM);
+    if (!server->IsOpen() && !socketErrorIsRecoverable(server->GetErrno())) {
+      LOG(ERROR) << "Could not open vsock socket: " << server->StrError();
+      // socket_vsock_proxy will now wait forever in the guest on encountering an
+      // "unrecoverable" errno. This is to prevent churn from being restarted by
+      // init.vsoc.rc.
+      SleepForever();
+    }
+  } while (!server->IsOpen());
+
+  return server;
+}
+
+std::string VsockServer::Describe() const {
+  return fmt::format("vsock: {}", port_);
+}
+
+DupServer::DupServer(int fd) : fd_(fd), sfd_(SharedFD::Dup(fd_)) {
+  close(fd);
+}
+
+SharedFD DupServer::Start() {
+  CHECK(sfd_->IsOpen()) << "Could not start duplicate server for passed fd";
+  return sfd_;
+}
+
+std::string DupServer::Describe() const {
+  return fmt::format("fd: {}", fd_);
+}
+
+}
+}
\ No newline at end of file
diff --git a/common/frontend/socket_vsock_proxy/server.h b/common/frontend/socket_vsock_proxy/server.h
new file mode 100644
index 0000000..66aaeef
--- /dev/null
+++ b/common/frontend/socket_vsock_proxy/server.h
@@ -0,0 +1,62 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include "common/libs/fs/shared_fd.h"
+
+namespace cuttlefish {
+namespace socket_proxy {
+
+class Server {
+ public:
+  virtual SharedFD Start() = 0;
+  virtual std::string Describe() const = 0;
+  virtual ~Server() = default;
+};
+
+class TcpServer : public Server {
+ public:
+  TcpServer(int port);
+  SharedFD Start() override;
+  std::string Describe() const override;
+
+ private:
+  int port_;
+};
+
+class VsockServer : public Server {
+ public:
+  VsockServer(int port);
+  SharedFD Start() override;
+  std::string Describe() const override;
+
+ private:
+  int port_;
+};
+
+class DupServer : public Server {
+ public:
+  DupServer(int fd);
+  SharedFD Start() override;
+  std::string Describe() const override;
+
+ private:
+  int fd_;
+  SharedFD sfd_;
+};
+
+}
+}
\ No newline at end of file
diff --git a/common/frontend/socket_vsock_proxy/socket_vsock_proxy.cpp b/common/frontend/socket_vsock_proxy/socket_vsock_proxy.cpp
new file mode 100644
index 0000000..1851368
--- /dev/null
+++ b/common/frontend/socket_vsock_proxy/socket_vsock_proxy.cpp
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <signal.h>
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+
+#include <memory>
+#include <sstream>
+
+#include "common/frontend/socket_vsock_proxy/client.h"
+#include "common/frontend/socket_vsock_proxy/server.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/socket2socket_proxy.h"
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/kernel_log_monitor/utils.h"
+
+#ifdef CUTTLEFISH_HOST
+#include "host/libs/config/logging.h"
+#endif // CUTTLEFISH_HOST
+
+constexpr const char TRANSPORT_TCP[] = "tcp";
+constexpr const char TRANSPORT_VSOCK[] = "vsock";
+
+DEFINE_string(label, "socket_vsock_proxy", "Label which is used only for logging. "
+                                           "Log messages will look like [label] message");
+DEFINE_string(server_type, "", "The type of server to host, `vsock` or `tcp`.");
+DEFINE_string(client_type, "", "The type of server to host, `vsock` or `tcp`.");
+DEFINE_uint32(server_tcp_port, 0, "Server TCP port");
+DEFINE_string(client_tcp_host, "localhost", "Client TCP host (default localhost)");
+DEFINE_uint32(client_tcp_port, 0, "Client TCP port");
+DEFINE_uint32(server_vsock_port, 0, "vsock port");
+DEFINE_uint32(client_vsock_id, 0, "Vsock cid to initiate connections to");
+DEFINE_uint32(client_vsock_port, 0, "Vsock port to initiate connections to");
+DEFINE_int32(server_fd, -1, "A file descriptor. If set the passed file descriptor will be used as "
+                            "the server and the corresponding port flag will be ignored");
+
+DEFINE_uint32(events_fd, -1, "A file descriptor. If set it will listen for the events "
+                             "to start / stop proxying. This option can be used only "
+                             "if start_event_id is provided (stop_event_id is optional)");
+DEFINE_uint32(start_event_id, -1, "Kernel event id (cuttlefish::monitor::Event from "
+                                  "kernel_log_server.h) that we will listen to start proxy");
+DEFINE_uint32(stop_event_id, -1, "Kernel event id (cuttlefish::monitor::Event from "
+                                  "kernel_log_server.h) that we will listen to stop proxy");
+
+namespace cuttlefish {
+namespace socket_proxy {
+namespace {
+
+std::unique_ptr<Server> BuildServer() {
+  if (FLAGS_server_fd >= 0) {
+    return std::make_unique<DupServer>(FLAGS_server_fd);
+  }
+
+  CHECK(FLAGS_server_type == TRANSPORT_TCP || FLAGS_server_type == TRANSPORT_VSOCK)
+      << "Must specify -server_type with tcp or vsock values";
+
+  if (FLAGS_server_type == TRANSPORT_TCP) {
+    CHECK(FLAGS_server_tcp_port != 0)
+        << "Must specify -server_tcp_port or -server_fd with -server_type=tcp flag";
+  }
+  if (FLAGS_server_type == TRANSPORT_VSOCK) {
+    CHECK(FLAGS_server_vsock_port != 0)
+        << "Must specify -server_vsock_port or -server_fd with -server_type=vsock flag";
+  }
+
+  std::unique_ptr<Server> server = nullptr;
+
+  if (FLAGS_server_type == TRANSPORT_TCP) {
+    server = std::make_unique<TcpServer>(FLAGS_server_tcp_port);
+  } else if (FLAGS_server_type == TRANSPORT_VSOCK) {
+    server = std::make_unique<VsockServer>(FLAGS_server_vsock_port);
+  } else {
+    LOG(FATAL) << "Unknown server type: " << FLAGS_server_type;
+  }
+
+  return server;
+}
+
+std::unique_ptr<Client> BuildClient() {
+  CHECK(FLAGS_client_type == TRANSPORT_TCP || FLAGS_client_type == TRANSPORT_VSOCK)
+      << "Must specify -client_type with tcp or vsock values";
+
+  if (FLAGS_client_type == TRANSPORT_TCP) {
+    CHECK(FLAGS_client_tcp_port != 0)
+        << "For -client_type=tcp you must specify -client_tcp_port flag";
+  }
+  if (FLAGS_client_type == TRANSPORT_VSOCK) {
+    CHECK(FLAGS_client_vsock_id >= 0 && FLAGS_client_vsock_port >= 0)
+        << "For -client_type=vsock you must specify -client_vsock_id and -client_vsock_port flags";
+  }
+
+  std::unique_ptr<Client> client = nullptr;
+
+  if (FLAGS_client_type == TRANSPORT_TCP) {
+    client = std::make_unique<TcpClient>(FLAGS_client_tcp_host, FLAGS_client_tcp_port);
+  } else if (FLAGS_client_type == TRANSPORT_VSOCK) {
+    client = std::make_unique<VsockClient>(FLAGS_client_vsock_id, FLAGS_client_vsock_port);
+  } else {
+    LOG(FATAL) << "Unknown client type: " << FLAGS_client_type;
+  }
+
+  return client;
+}
+
+void ListenEventsAndProxy(int events_fd, const monitor::Event start, const monitor::Event stop,
+                          Server& server, Client& client) {
+  auto events = SharedFD::Dup(events_fd);
+  close(events_fd);
+
+  std::unique_ptr<cuttlefish::ProxyServer> proxy;
+
+  LOG(DEBUG) << "Start reading ";
+  while (events->IsOpen()) {
+    std::optional<monitor::ReadEventResult> received_event = monitor::ReadEvent(events);
+
+    if (!received_event) {
+      LOG(ERROR) << "Failed to read a complete kernel log event";
+      continue;
+    }
+
+    if (start != -1 && received_event->event == start) {
+      if (!proxy) {
+        LOG(INFO) << "Start event (" << start << ") received. Starting proxy";
+        LOG(INFO) << "From: " << server.Describe();
+        LOG(INFO) << "To: " << client.Describe();
+        auto started_proxy = cuttlefish::ProxyAsync(server.Start(), [&client] {
+          return client.Start();
+        });
+        proxy = std::move(started_proxy);
+      }
+      continue;
+    }
+
+    if (stop != -1 && received_event->event == stop) {
+      LOG(INFO) << "Stop event (" << start << ") received. Stopping proxy";
+      proxy.reset();
+      continue;
+    }
+  }
+}
+
+}
+}
+}
+
+int main(int argc, char* argv[]) {
+  signal(SIGPIPE, SIG_IGN);
+
+#ifdef CUTTLEFISH_HOST
+  cuttlefish::DefaultSubprocessLogging(argv, cuttlefish::MetadataLevel::TAG_AND_MESSAGE);
+#else
+  ::android::base::InitLogging(argv, android::base::LogdLogger(android::base::SYSTEM));
+#endif
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  if (!FLAGS_label.empty()) {
+    android::base::SetDefaultTag("proxy_" + FLAGS_label);
+  }
+
+  auto server = cuttlefish::socket_proxy::BuildServer();
+  auto client = cuttlefish::socket_proxy::BuildClient();
+
+  if (FLAGS_events_fd != -1) {
+    CHECK(FLAGS_start_event_id != -1)
+        << "start_event_id is required if events_fd is provided";
+
+    const monitor::Event start_event = static_cast<monitor::Event>(FLAGS_start_event_id);
+    const monitor::Event stop_event = static_cast<monitor::Event>(FLAGS_stop_event_id);
+
+    cuttlefish::socket_proxy::ListenEventsAndProxy(FLAGS_events_fd, start_event, stop_event,
+                                                   *server, *client);
+  } else {
+    LOG(DEBUG) << "Starting proxy";
+    cuttlefish::Proxy(server->Start(), [&client] { return client->Start(); });
+  }
+}
diff --git a/common/libs/concurrency/thread_annotations.h b/common/libs/concurrency/thread_annotations.h
deleted file mode 100644
index cd568b3..0000000
--- a/common/libs/concurrency/thread_annotations.h
+++ /dev/null
@@ -1,72 +0,0 @@
-#pragma once
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#if defined(__SUPPORT_TS_ANNOTATION__) || defined(__clang__)
-#define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x))
-#else
-#define THREAD_ANNOTATION_ATTRIBUTE__(x)  // no-op
-#endif
-
-#define CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(capability(x))
-
-#define SCOPED_CAPABILITY THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable)
-
-#define GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(guarded_by(x))
-
-#define PT_GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(pt_guarded_by(x))
-
-#define ACQUIRED_BEFORE(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(__VA_ARGS__))
-
-#define ACQUIRED_AFTER(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(__VA_ARGS__))
-
-#define REQUIRES(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(requires_capability(__VA_ARGS__))
-
-#define REQUIRES_SHARED(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(requires_shared_capability(__VA_ARGS__))
-
-#define ACQUIRE(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(acquire_capability(__VA_ARGS__))
-
-#define ACQUIRE_SHARED(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(acquire_shared_capability(__VA_ARGS__))
-
-#define RELEASE(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(release_capability(__VA_ARGS__))
-
-#define RELEASE_SHARED(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(release_shared_capability(__VA_ARGS__))
-
-#define TRY_ACQUIRE(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(try_acquire_capability(__VA_ARGS__))
-
-#define TRY_ACQUIRE_SHARED(...) \
-  THREAD_ANNOTATION_ATTRIBUTE__(try_acquire_shared_capability(__VA_ARGS__))
-
-#define EXCLUDES(...) THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(__VA_ARGS__))
-
-#define ASSERT_CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(assert_capability(x))
-
-#define ASSERT_SHARED_CAPABILITY(x) \
-  THREAD_ANNOTATION_ATTRIBUTE__(assert_shared_capability(x))
-
-#define RETURN_CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x))
-
-#define NO_THREAD_SAFETY_ANALYSIS \
-  THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis)
diff --git a/common/libs/confui/packet.cpp b/common/libs/confui/packet.cpp
index debaeb8..fc2eedf 100644
--- a/common/libs/confui/packet.cpp
+++ b/common/libs/confui/packet.cpp
@@ -38,8 +38,8 @@
   }
 
   if (p.payload_length_ >= packet::kMaxPayloadLength) {
-    ConfUiLog(ERROR) << "Payload length must be less than "
-                     << packet::kMaxPayloadLength;
+    ConfUiLog(ERROR) << "Payload length " << p.payload_length_
+                     << " must be less than " << packet::kMaxPayloadLength;
     return std::nullopt;
   }
 
@@ -98,6 +98,7 @@
   }
   ConfUiPacketInfo data_to_return;
   std::vector<int> lengths;
+  lengths.reserve(n);
   for (int i = 1; i <= n; i++) {
     if (!IsOnlyDigits(tokens[2 + i])) {
       ConfUiLog(ERROR) << tokens[2 + i] << " should be a number but is not.";
diff --git a/common/libs/confui/protocol_types.cpp b/common/libs/confui/protocol_types.cpp
index 8c293ea..e5b68e2 100644
--- a/common/libs/confui/protocol_types.cpp
+++ b/common/libs/confui/protocol_types.cpp
@@ -21,6 +21,7 @@
 
 #include "common/libs/confui/packet.h"
 #include "common/libs/confui/utils.h"
+#include "common/libs/utils/contains.h"
 
 namespace cuttlefish {
 namespace confui {
@@ -40,7 +41,7 @@
       {ConfUiCmd::kAbort, "kAbort"},
       {ConfUiCmd::kUserInputEvent, "kUserInputEvent"},
       {ConfUiCmd::kUserInputEvent, "kUserTouchEvent"}};
-  if (look_up_tab.find(cmd) != look_up_tab.end()) {
+  if (Contains(look_up_tab, cmd)) {
     return look_up_tab[cmd] + suffix;
   }
   return "kUnknown" + suffix;
@@ -73,7 +74,7 @@
       {"kUserInputEvent", ConfUiCmd::kUserInputEvent},
       {"kUserTouchEvent", ConfUiCmd::kUserTouchEvent},
   };
-  if (cmds.find(cmd_str) != cmds.end()) {
+  if (Contains(cmds, cmd_str)) {
     return cmds[cmd_str];
   }
   return ConfUiCmd::kUnknown;
diff --git a/common/libs/confui/protocol_types.h b/common/libs/confui/protocol_types.h
index 98f581f..1a61b4c 100644
--- a/common/libs/confui/protocol_types.h
+++ b/common/libs/confui/protocol_types.h
@@ -217,7 +217,7 @@
   ConfUiCmd GetType() const override { return ConfUiCmd::kUserTouchEvent; }
   auto GetResponse() const { return response_; }
   bool SendOver(SharedFD fd) override;
-  std::pair<int, int> GetLocation() { return {x_, y_}; }
+  std::pair<int, int> GetLocation() const { return {x_, y_}; }
 
  private:
   int x_;
diff --git a/common/libs/device_config/host_device_config.cpp b/common/libs/device_config/host_device_config.cpp
index eb28c63..3e977c1 100644
--- a/common/libs/device_config/host_device_config.cpp
+++ b/common/libs/device_config/host_device_config.cpp
@@ -137,9 +137,9 @@
   // newer version of cuttlefish-common, and we can use the tap device
   // directly instead.
   if (!netconfig.ObtainConfig(instance.mobile_bridge_name(),
-                              cuttlefish_config.ril_dns())) {
+                              instance.ril_dns())) {
     if (!netconfig.ObtainConfig(instance.mobile_tap_name(),
-                                cuttlefish_config.ril_dns())) {
+                                instance.ril_dns())) {
       LOG(ERROR) << "Unable to obtain the network configuration";
       return false;
     }
@@ -157,7 +157,8 @@
 
 void InitializeScreenConfiguration(const CuttlefishConfig& cuttlefish_config,
                                    DeviceConfig* device_config) {
-  for (const auto& cuttlefish_display_config : cuttlefish_config.display_configs()) {
+  auto instance = cuttlefish_config.ForDefaultInstance();
+  for (const auto& cuttlefish_display_config : instance.display_configs()) {
     DeviceConfig::DisplayConfig* device_display_config =
       device_config->add_display_config();
 
diff --git a/common/libs/fs/shared_fd.cpp b/common/libs/fs/shared_fd.cpp
index b89db85..8173e12 100644
--- a/common/libs/fs/shared_fd.cpp
+++ b/common/libs/fs/shared_fd.cpp
@@ -15,8 +15,10 @@
  */
 #include "common/libs/fs/shared_fd.h"
 
+#include <arpa/inet.h>
 #include <errno.h>
 #include <fcntl.h>
+#include <net/if.h>
 #include <netinet/in.h>
 #include <poll.h>
 #include <sys/file.h>
@@ -28,12 +30,15 @@
 #include <cstddef>
 
 #include <algorithm>
+#include <sstream>
 #include <vector>
 
+#include <android-base/file.h>
 #include <android-base/logging.h>
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_select.h"
+#include "common/libs/utils/result.h"
 
 // #define ENABLE_GCE_SHARED_FD_LOGGING 1
 
@@ -103,6 +108,27 @@
 bool FileInstance::CopyFrom(FileInstance& in, size_t length) {
   std::vector<char> buffer(kPreferredBufferSize);
   while (length > 0) {
+    // Wait until either in becomes readable or our fd closes.
+    constexpr ssize_t IN = 0;
+    constexpr ssize_t OUT = 1;
+    struct pollfd pollfds[2];
+    pollfds[IN].fd = in.fd_;
+    pollfds[IN].events = POLLIN;
+    pollfds[IN].revents = 0;
+    pollfds[OUT].fd = fd_;
+    pollfds[OUT].events = 0;
+    pollfds[OUT].revents = 0;
+    int res = poll(pollfds, 2, -1 /* indefinitely */);
+    if (res < 0) {
+      errno_ = errno;
+      return false;
+    }
+    if (pollfds[OUT].revents != 0) {
+      // destination was either closed, invalid or errored, either way there is no
+      // point in continuing.
+      return false;
+    }
+
     ssize_t num_read = in.Read(buffer.data(), std::min(buffer.size(), length));
     if (num_read <= 0) {
       return false;
@@ -111,12 +137,14 @@
 
     ssize_t written = 0;
     do {
+      // No need to use poll for writes: even if the source closes, the data
+      // needs to be delivered to the other side.
       auto res = Write(buffer.data(), num_read);
-     if (res <= 0) {
-      // The caller will have to log an appropriate message.
-       return false;
-     }
-     written += res;
+      if (res <= 0) {
+        // The caller will have to log an appropriate message.
+        return false;
+      }
+      written += res;
     } while(written < num_read);
   }
   return true;
@@ -420,11 +448,11 @@
     }
   }
 
-  int fd = TEMP_FAILURE_RETRY(mkfifo(path.c_str(), mode));
-  if (fd == -1) {
+  int rval = TEMP_FAILURE_RETRY(mkfifo(path.c_str(), mode));
+  if (rval == -1) {
     return ErrorFD(errno);
   }
-  return Open(path, mode);
+  return Open(path, O_RDWR);
 }
 
 SharedFD SharedFD::Socket(int domain, int socket_type, int protocol) {
@@ -476,12 +504,52 @@
   addr.sin_family = AF_INET;
   addr.sin_port = htons(port);
   addr.sin_addr.s_addr = htonl(INADDR_ANY);
-  SharedFD rval = SharedFD::Socket(AF_INET, type, 0);
+  auto rval = SharedFD::Socket(AF_INET, type, 0);
   if (!rval->IsOpen()) {
     return rval;
   }
-  if (rval->Connect(reinterpret_cast<const sockaddr*>(&addr),
-                    sizeof addr) < 0) {
+  if (rval->Connect(reinterpret_cast<const sockaddr*>(&addr), sizeof addr) < 0) {
+    return SharedFD::ErrorFD(rval->GetErrno());
+  }
+  return rval;
+}
+
+SharedFD SharedFD::SocketClient(const std::string& host, int port, int type) {
+  sockaddr_in addr{};
+  addr.sin_family = AF_INET;
+  addr.sin_port = htons(port);
+  addr.sin_addr.s_addr = inet_addr(host.c_str());
+  auto rval = SharedFD::Socket(AF_INET, type, 0);
+  if (!rval->IsOpen()) {
+    return rval;
+  }
+  if (rval->Connect(reinterpret_cast<const sockaddr*>(&addr), sizeof addr) < 0) {
+    return SharedFD::ErrorFD(rval->GetErrno());
+  }
+  return rval;
+}
+
+SharedFD SharedFD::Socket6Client(const std::string& host, const std::string& interface,
+                                 int port, int type) {
+  sockaddr_in6 addr{};
+  addr.sin6_family = AF_INET6;
+  addr.sin6_port = htons(port);
+  inet_pton(AF_INET6, host.c_str(), &addr.sin6_addr);
+  auto rval = SharedFD::Socket(AF_INET6, type, 0);
+  if (!rval->IsOpen()) {
+    return rval;
+  }
+
+  if (!interface.empty()) {
+    ifreq ifr{};
+    snprintf(ifr.ifr_name, sizeof(ifr.ifr_name), "%s", interface.c_str());
+
+    if (rval->SetSockOpt(SOL_SOCKET, SO_BINDTODEVICE, &ifr, sizeof(ifr)) == -1) {
+      return SharedFD::ErrorFD(rval->GetErrno());
+    }
+  }
+
+  if (rval->Connect(reinterpret_cast<const sockaddr*>(&addr), sizeof addr) < 0) {
     return SharedFD::ErrorFD(rval->GetErrno());
   }
   return rval;
@@ -669,11 +737,12 @@
   return rval;
 }
 
-int FileInstance::Flock(int operation) {
+Result<void> FileInstance::Flock(int operation) {
   errno = 0;
   int rval = TEMP_FAILURE_RETRY(flock(fd_, operation));
   errno_ = errno;
-  return rval;
+  CF_EXPECT(rval == 0, StrError());
+  return {};
 }
 
 int FileInstance::GetSockName(struct sockaddr* addr, socklen_t* addrlen) {
@@ -846,6 +915,17 @@
   return rval;
 }
 
+Result<std::string> FileInstance::ProcFdLinkTarget() const {
+  std::stringstream output_composer;
+  output_composer << "/proc/" << getpid() << "/fd/" << fd_;
+  const std::string mem_fd_link = output_composer.str();
+  std::string mem_fd_target;
+  CF_EXPECT(
+      android::base::Readlink(mem_fd_link, &mem_fd_target),
+      "Getting link for the memory file \"" << mem_fd_link << "\" failed");
+  return mem_fd_target;
+}
+
 FileInstance::FileInstance(int fd, int in_errno)
     : fd_(fd), errno_(in_errno), is_regular_file_(IsRegularFile(fd_)) {
   // Ensure every file descriptor managed by a FileInstance has the CLOEXEC
diff --git a/common/libs/fs/shared_fd.h b/common/libs/fs/shared_fd.h
index d24d987..1c654cd 100644
--- a/common/libs/fs/shared_fd.h
+++ b/common/libs/fs/shared_fd.h
@@ -46,6 +46,8 @@
 
 #include "vm_sockets.h"
 
+#include "common/libs/utils/result.h"
+
 /**
  * Classes to to enable safe access to files.
  * POSIX kernels have an unfortunate habit of recycling file descriptors.
@@ -146,6 +148,9 @@
   static SharedFD SocketLocalClient(const std::string& name, bool is_abstract,
                                     int in_type, int timeout_seconds);
   static SharedFD SocketLocalClient(int port, int type);
+  static SharedFD SocketClient(const std::string& host, int port, int type);
+  static SharedFD Socket6Client(const std::string& host, const std::string& interface,
+                                int port, int type);
   static SharedFD SocketLocalServer(const std::string& name, bool is_abstract,
                                     int in_type, mode_t mode);
   static SharedFD SocketLocalServer(int port, int type);
@@ -214,6 +219,13 @@
 
   operator bool() const { return ptr_ != MAP_FAILED; }
 
+  // Checks whether the interval [offset, offset + length) is contained within
+  // [0, len_)
+  bool WithinBounds(size_t offset, size_t length) const {
+    // Don't add offset + len to avoid overflow
+    return offset < len_ && len_ - offset >= length;
+  }
+
  private:
   void* ptr_ = MAP_FAILED;
   size_t len_;
@@ -264,7 +276,7 @@
   int Fchdir();
   int Fcntl(int command, int value);
 
-  int Flock(int operation);
+  Result<void> Flock(int operation);
 
   int GetErrno() const { return errno_; }
   int GetSockName(struct sockaddr* addr, socklen_t* addrlen);
@@ -330,6 +342,10 @@
   int EventfdWrite(eventfd_t value);
   bool IsATTY();
 
+  // Returns the target of "/proc/getpid()/fd/" + std::to_string(fd_)
+  // if appropriate
+  Result<std::string> ProcFdLinkTarget() const;
+
  private:
   FileInstance(int fd, int in_errno);
   FileInstance* Accept(struct sockaddr* addr, socklen_t* addrlen) const;
@@ -349,7 +365,7 @@
 /* Methods that need both a fully defined SharedFD and a fully defined
    FileInstance. */
 
-inline SharedFD::SharedFD() : value_(FileInstance::ClosedInstance()) {}
+SharedFD::SharedFD() : value_(FileInstance::ClosedInstance()) {}
 
 }  // namespace cuttlefish
 
diff --git a/common/libs/net/netlink_client.cpp b/common/libs/net/netlink_client.cpp
index b245f7e..620bbc7 100644
--- a/common/libs/net/netlink_client.cpp
+++ b/common/libs/net/netlink_client.cpp
@@ -16,13 +16,20 @@
 #include "common/libs/net/netlink_client.h"
 
 #include <errno.h>
-#include <linux/rtnetlink.h>
-#include <linux/sockios.h>
-#include <net/if.h>
+#include <linux/netlink.h>
 #include <sys/socket.h>
+#include <sys/uio.h>
+
+#include <cstdint>
+#include <cstring>
+#include <memory>
+#include <string>
+#include "ostream"  // for operator<<, basic_ostream
+
+#include <android-base/logging.h>
 
 #include "common/libs/fs/shared_fd.h"
-#include "android-base/logging.h"
+#include "common/libs/net/netlink_request.h"
 
 namespace cuttlefish {
 namespace {
diff --git a/common/libs/net/netlink_client.h b/common/libs/net/netlink_client.h
index 25ff7f4..c60e0d0 100644
--- a/common/libs/net/netlink_client.h
+++ b/common/libs/net/netlink_client.h
@@ -16,9 +16,8 @@
 #ifndef COMMON_LIBS_NET_NETLINK_CLIENT_H_
 #define COMMON_LIBS_NET_NETLINK_CLIENT_H_
 
-#include <stddef.h>
 #include <memory>
-#include <string>
+
 #include "common/libs/net/netlink_request.h"
 
 namespace cuttlefish {
diff --git a/common/libs/net/netlink_request.cpp b/common/libs/net/netlink_request.cpp
index 3900e8a..ad1b960 100644
--- a/common/libs/net/netlink_request.cpp
+++ b/common/libs/net/netlink_request.cpp
@@ -15,13 +15,21 @@
  */
 #include "common/libs/net/netlink_request.h"
 
+#include <linux/if_addr.h>
+#include <linux/if_link.h>
 #include <linux/netlink.h>
 #include <linux/rtnetlink.h>
 #include <net/if.h>
-#include <string.h>
+#include <sys/socket.h>
+#include <unistd.h>
 
 #include <algorithm>
+#include <array>
+#include <cstdint>  // for int32_t
+#include <ostream>  // for operator<<, basic_ostream
 #include <string>
+#include <type_traits>  // for swap
+#include <utility>
 #include <vector>
 
 #include "android-base/logging.h"
diff --git a/common/libs/net/netlink_request.h b/common/libs/net/netlink_request.h
index eee231c..9f0f9c0 100644
--- a/common/libs/net/netlink_request.h
+++ b/common/libs/net/netlink_request.h
@@ -16,12 +16,15 @@
 #ifndef COMMON_LIBS_NET_NETLINK_REQUEST_H_
 #define COMMON_LIBS_NET_NETLINK_REQUEST_H_
 
-#include <linux/netlink.h>
 #include <stddef.h>
 
+#include <linux/netlink.h>
+
 #include <array>
-#include <memory>
+#include <cstdint>
 #include <string>
+#include <type_traits>
+#include <utility>
 #include <vector>
 
 namespace cuttlefish {
@@ -62,7 +65,7 @@
   void AddMacAddress(const std::array<unsigned char, 6>& address);
 
   // Creates new list.
-  // List mimmic recursive structures in a flat, contiuous representation.
+  // List mimmic recursive structures in a flat, continuous representation.
   // Each call to PushList() should have a corresponding call to PopList
   // indicating end of sub-attribute list.
   void PushList(uint16_t type);
diff --git a/common/libs/net/netlink_request_test.cpp b/common/libs/net/netlink_request_test.cpp
index 3e23358..753a5c5 100644
--- a/common/libs/net/netlink_request_test.cpp
+++ b/common/libs/net/netlink_request_test.cpp
@@ -13,16 +13,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-#include "common/libs/net/netlink_client.h"
 
+#include "common/libs/net/netlink_request.h"
+
+#include <linux/netlink.h>
 #include <linux/rtnetlink.h>
 
+#include <cstring>
+#include <ios>
+#include <ostream>
+
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
-#include <android-base/logging.h>
-
-#include <iostream>
-#include <memory>
 
 using ::testing::ElementsAreArray;
 using ::testing::MatchResultListener;
diff --git a/common/libs/net/network_interface_manager.cpp b/common/libs/net/network_interface_manager.cpp
index 19371e6..c4e695c 100644
--- a/common/libs/net/network_interface_manager.cpp
+++ b/common/libs/net/network_interface_manager.cpp
@@ -15,16 +15,25 @@
  */
 #include "common/libs/net/network_interface_manager.h"
 
+#include <stddef.h>
+#include <stdint.h>
+
 #include <arpa/inet.h>
 #include <linux/if_addr.h>
 #include <linux/if_link.h>
 #include <linux/netlink.h>
 #include <linux/rtnetlink.h>
 #include <net/if.h>
+#include <netinet/in.h>
 
 #include <memory>
+#include <ostream>
+#include <string>
+#include <utility>
 
 #include "android-base/logging.h"
+#include "common/libs/net/netlink_client.h"
+#include "common/libs/net/netlink_request.h"
 #include "common/libs/net/network_interface.h"
 
 namespace cuttlefish {
diff --git a/common/libs/net/network_interface_manager.h b/common/libs/net/network_interface_manager.h
index 4ade909..bec4fca 100644
--- a/common/libs/net/network_interface_manager.h
+++ b/common/libs/net/network_interface_manager.h
@@ -20,6 +20,7 @@
 #include <string>
 
 #include "common/libs/net/netlink_client.h"
+#include "common/libs/net/netlink_request.h"
 #include "common/libs/net/network_interface.h"
 
 namespace cuttlefish {
@@ -46,11 +47,6 @@
   // This method cannot be used to instantiate new network interfaces.
   bool ApplyChanges(const NetworkInterface& interface);
 
-  // Create new connected pair of virtual (veth) interfaces.
-  // Supplied pair of interfaces describe both endpoints' properties.
-  bool CreateVethPair(const NetworkInterface& first,
-                      const NetworkInterface& second);
-
   // Creates new NetworkInterfaceManager.
   static std::unique_ptr<NetworkInterfaceManager> New(
       NetlinkClientFactory* factory);
@@ -58,9 +54,6 @@
  private:
   NetworkInterfaceManager(std::unique_ptr<NetlinkClient> nl_client);
 
-  // Build (partial) netlink request.
-  bool BuildRequest(NetlinkRequest* request, const NetworkInterface& interface);
-
   std::unique_ptr<NetlinkClient> nl_client_;
 
   NetworkInterfaceManager(const NetworkInterfaceManager&);
diff --git a/common/libs/security/Android.bp b/common/libs/security/Android.bp
index fe18a0f..758b8bf 100644
--- a/common/libs/security/Android.bp
+++ b/common/libs/security/Android.bp
@@ -21,7 +21,6 @@
     name: "libcuttlefish_security",
     defaults: ["hidl_defaults", "cuttlefish_host"],
     srcs: [
-        "confui_sign.cpp",
         "gatekeeper_channel.cpp",
         "keymaster_channel.cpp",
     ],
@@ -30,11 +29,29 @@
     ],
     shared_libs: [
         "libbase",
-        "libcuttlefish_fs",
         "libgatekeeper",
         "libkeymaster_messages",
         "liblog",
     ],
+    target: {
+        linux: {
+            shared_libs: [
+                "libcuttlefish_fs",
+            ],
+            srcs: [
+                "confui_sign.cpp",
+                "gatekeeper_channel_sharedfd.cpp",
+                "keymaster_channel_sharedfd.cpp",
+            ],
+        },
+        windows: {
+            enabled: true,
+            srcs: [
+                "gatekeeper_channel_windows.cpp",
+                "keymaster_channel_windows.cpp",
+            ],
+        },
+    },
 }
 
 cc_test {
diff --git a/common/libs/security/gatekeeper_channel.cpp b/common/libs/security/gatekeeper_channel.cpp
index 7038d67..08f43b4 100644
--- a/common/libs/security/gatekeeper_channel.cpp
+++ b/common/libs/security/gatekeeper_channel.cpp
@@ -16,25 +16,10 @@
 
 #include "common/libs/security/gatekeeper_channel.h"
 
-#include <cstdlib>
-
-#include <android-base/logging.h>
-#include "keymaster/android_keymaster_utils.h"
-
-#include "common/libs/fs/shared_buf.h"
+#include <keymaster/android_keymaster_utils.h>
 
 namespace cuttlefish {
 
-ManagedGatekeeperMessage CreateGatekeeperMessage(
-    uint32_t command, bool is_response, size_t payload_size) {
-  auto memory = std::malloc(payload_size + sizeof(GatekeeperRawMessage));
-  auto message = reinterpret_cast<GatekeeperRawMessage*>(memory);
-  message->cmd = command;
-  message->is_response = is_response;
-  message->payload_size = payload_size;
-  return ManagedGatekeeperMessage(message);
-}
-
 void GatekeeperCommandDestroyer::operator()(GatekeeperRawMessage* ptr) {
   {
     keymaster::Eraser(ptr, sizeof(GatekeeperRawMessage) + ptr->payload_size);
@@ -42,57 +27,15 @@
   std::free(ptr);
 }
 
-GatekeeperChannel::GatekeeperChannel(SharedFD input, SharedFD output)
-    : input_(input), output_(output) {
+ManagedGatekeeperMessage CreateGatekeeperMessage(uint32_t command,
+                                                 bool is_response,
+                                                 size_t payload_size) {
+  auto memory = std::malloc(payload_size + sizeof(GatekeeperRawMessage));
+  auto message = reinterpret_cast<GatekeeperRawMessage*>(memory);
+  message->cmd = command;
+  message->is_response = is_response;
+  message->payload_size = payload_size;
+  return ManagedGatekeeperMessage(message);
 }
 
-bool GatekeeperChannel::SendRequest(
-    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
-  return SendMessage(command, false, message);
-}
-
-bool GatekeeperChannel::SendResponse(
-    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
-  return SendMessage(command, true, message);
-}
-
-bool GatekeeperChannel::SendMessage(
-    uint32_t command,
-    bool is_response,
-    const gatekeeper::GateKeeperMessage& message) {
-  LOG(DEBUG) << "Sending message with id: " << command;
-  auto payload_size = message.GetSerializedSize();
-  auto to_send = CreateGatekeeperMessage(command, is_response, payload_size);
-  message.Serialize(to_send->payload, to_send->payload + payload_size);
-  auto write_size = payload_size + sizeof(GatekeeperRawMessage);
-  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
-  auto written = WriteAll(output_, to_send_bytes, write_size);
-  if (written == -1) {
-    LOG(ERROR) << "Could not write Gatekeeper Message: " << output_->StrError();
-  }
-  return written == write_size;
-}
-
-ManagedGatekeeperMessage GatekeeperChannel::ReceiveMessage() {
-  struct GatekeeperRawMessage message_header;
-  auto read = ReadExactBinary(input_, &message_header);
-  if (read != sizeof(GatekeeperRawMessage)) {
-    LOG(ERROR) << "Expected " << sizeof(GatekeeperRawMessage) << ", received "
-               << read;
-    LOG(ERROR) << "Could not read Gatekeeper Message: " << input_->StrError();
-    return {};
-  }
-  LOG(DEBUG) << "Received message with id: " << message_header.cmd;
-  auto message = CreateGatekeeperMessage(message_header.cmd,
-                                         message_header.is_response,
-                                         message_header.payload_size);
-  auto message_bytes = reinterpret_cast<char*>(message->payload);
-  read = ReadExact(input_, message_bytes, message->payload_size);
-  if (read != message->payload_size) {
-    LOG(ERROR) << "Could not read Gatekeeper Message: " << input_->StrError();
-    return {};
-  }
-  return message;
-}
-
-} // namespace cuttlefish
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/gatekeeper_channel.h b/common/libs/security/gatekeeper_channel.h
index b4222e7..b1415bd 100644
--- a/common/libs/security/gatekeeper_channel.h
+++ b/common/libs/security/gatekeeper_channel.h
@@ -18,8 +18,6 @@
 
 #include "gatekeeper/gatekeeper_messages.h"
 
-#include "common/libs/fs/shared_fd.h"
-
 #include <memory>
 
 namespace gatekeeper {
@@ -32,13 +30,13 @@
  * @payload: start of the serialized command specific payload
  */
 struct GatekeeperRawMessage {
-    uint32_t cmd : 31;
-    bool is_response : 1;
-    uint32_t payload_size;
-    uint8_t payload[0];
+  uint32_t cmd : 31;
+  bool is_response : 1;
+  uint32_t payload_size;
+  uint8_t payload[0];
 };
 
-} // namespace gatekeeper
+}  // namespace gatekeeper
 
 namespace cuttlefish {
 
@@ -46,10 +44,11 @@
 
 /**
  * A destroyer for GatekeeperRawMessage instances created with
- * CreateGatekeeperMessage. Wipes memory from the GatekeeperRawMessage instances.
+ * CreateGatekeeperMessage. Wipes memory from the GatekeeperRawMessage
+ * instances.
  */
 class GatekeeperCommandDestroyer {
-public:
+ public:
   void operator()(GatekeeperRawMessage* ptr);
 };
 
@@ -61,27 +60,22 @@
  * Allocates memory for a GatekeeperRawMessage carrying a message of size
  * `payload_size`.
  */
-ManagedGatekeeperMessage CreateGatekeeperMessage(
-    uint32_t command, bool is_response, size_t payload_size);
+ManagedGatekeeperMessage CreateGatekeeperMessage(uint32_t command,
+                                                 bool is_response,
+                                                 size_t payload_size);
 
 /*
- * Interface for communication channels that synchronously communicate Gatekeeper
- * IPC/RPC calls. Sends messages over a file descriptor.
+ * Interface for communication channels that synchronously communicate
+ * Gatekeeper IPC/RPC calls.
  */
 class GatekeeperChannel {
-public:
-  GatekeeperChannel(SharedFD input, SharedFD output);
-
-  bool SendRequest(uint32_t command,
-                   const gatekeeper::GateKeeperMessage& message);
-  bool SendResponse(uint32_t command,
-                    const gatekeeper::GateKeeperMessage& message);
-  ManagedGatekeeperMessage ReceiveMessage();
-private:
-  SharedFD input_;
-  SharedFD output_;
-  bool SendMessage(uint32_t command, bool response,
-                   const gatekeeper::GateKeeperMessage& message);
+ public:
+  virtual bool SendRequest(uint32_t command,
+                           const gatekeeper::GateKeeperMessage& message) = 0;
+  virtual bool SendResponse(uint32_t command,
+                            const gatekeeper::GateKeeperMessage& message) = 0;
+  virtual ManagedGatekeeperMessage ReceiveMessage() = 0;
+  virtual ~GatekeeperChannel() {}
 };
 
-} // namespace cuttlefish
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/gatekeeper_channel_sharedfd.cpp b/common/libs/security/gatekeeper_channel_sharedfd.cpp
new file mode 100644
index 0000000..647706b
--- /dev/null
+++ b/common/libs/security/gatekeeper_channel_sharedfd.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/security/gatekeeper_channel_sharedfd.h"
+
+#include <cstdlib>
+
+#include <android-base/logging.h>
+#include "keymaster/android_keymaster_utils.h"
+
+#include "common/libs/fs/shared_buf.h"
+
+namespace cuttlefish {
+using gatekeeper::GatekeeperRawMessage;
+
+SharedFdGatekeeperChannel::SharedFdGatekeeperChannel(SharedFD input,
+                                                     SharedFD output)
+    : input_(input), output_(output) {}
+
+bool SharedFdGatekeeperChannel::SendRequest(
+    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
+  return SendMessage(command, false, message);
+}
+
+bool SharedFdGatekeeperChannel::SendResponse(
+    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
+  return SendMessage(command, true, message);
+}
+
+bool SharedFdGatekeeperChannel::SendMessage(
+    uint32_t command, bool is_response,
+    const gatekeeper::GateKeeperMessage& message) {
+  LOG(DEBUG) << "Sending message with id: " << command;
+  auto payload_size = message.GetSerializedSize();
+  auto to_send = CreateGatekeeperMessage(command, is_response, payload_size);
+  message.Serialize(to_send->payload, to_send->payload + payload_size);
+  auto write_size = payload_size + sizeof(GatekeeperRawMessage);
+  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
+  auto written = WriteAll(output_, to_send_bytes, write_size);
+  if (written == -1) {
+    LOG(ERROR) << "Could not write Gatekeeper Message: " << output_->StrError();
+  }
+  return written == write_size;
+}
+
+ManagedGatekeeperMessage SharedFdGatekeeperChannel::ReceiveMessage() {
+  struct GatekeeperRawMessage message_header;
+  auto read = ReadExactBinary(input_, &message_header);
+  if (read != sizeof(GatekeeperRawMessage)) {
+    LOG(ERROR) << "Expected " << sizeof(GatekeeperRawMessage) << ", received "
+               << read;
+    LOG(ERROR) << "Could not read Gatekeeper Message: " << input_->StrError();
+    return {};
+  }
+  LOG(DEBUG) << "Received message with id: " << message_header.cmd;
+  auto message =
+      CreateGatekeeperMessage(message_header.cmd, message_header.is_response,
+                              message_header.payload_size);
+  auto message_bytes = reinterpret_cast<char*>(message->payload);
+  read = ReadExact(input_, message_bytes, message->payload_size);
+  if (read != message->payload_size) {
+    LOG(ERROR) << "Could not read Gatekeeper Message: " << input_->StrError();
+    return {};
+  }
+  return message;
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/gatekeeper_channel_sharedfd.h b/common/libs/security/gatekeeper_channel_sharedfd.h
new file mode 100644
index 0000000..f4c63e5
--- /dev/null
+++ b/common/libs/security/gatekeeper_channel_sharedfd.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "gatekeeper/gatekeeper_messages.h"
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/security/gatekeeper_channel.h"
+
+namespace cuttlefish {
+/*
+ * Interface for communication channels that synchronously communicate
+ * Gatekeeper IPC/RPC calls. Sends messages over a file descriptor.
+ */
+class SharedFdGatekeeperChannel : public GatekeeperChannel {
+ public:
+  SharedFdGatekeeperChannel(SharedFD input, SharedFD output);
+
+  bool SendRequest(uint32_t command,
+                   const gatekeeper::GateKeeperMessage& message) override;
+  bool SendResponse(uint32_t command,
+                    const gatekeeper::GateKeeperMessage& message) override;
+  ManagedGatekeeperMessage ReceiveMessage() override;
+
+ private:
+  SharedFD input_;
+  SharedFD output_;
+  bool SendMessage(uint32_t command, bool response,
+                   const gatekeeper::GateKeeperMessage& message);
+};
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/gatekeeper_channel_windows.cpp b/common/libs/security/gatekeeper_channel_windows.cpp
new file mode 100644
index 0000000..b4974e6
--- /dev/null
+++ b/common/libs/security/gatekeeper_channel_windows.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/security/gatekeeper_channel_windows.h"
+
+#include <windows.h>
+
+#include <errhandlingapi.h>
+#include <fileapi.h>
+#include <handleapi.h>
+#include <namedpipeapi.h>
+#include <chrono>
+#include <cstdlib>
+#include <thread>
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+using gatekeeper::GatekeeperRawMessage;
+
+GatekeeperWindowsChannel::~GatekeeperWindowsChannel() {
+  if (pipe_handle_) {
+    CloseHandle(pipe_handle_);
+  }
+
+  if (pipe_overlapped_.hEvent) {
+    CloseHandle(pipe_overlapped_.hEvent);
+  }
+}
+
+std::unique_ptr<GatekeeperWindowsChannel> GatekeeperWindowsChannel::Create(
+    HANDLE pipe_handle) {
+  auto gatekeeper_channel =
+      std::unique_ptr<GatekeeperWindowsChannel>(new GatekeeperWindowsChannel());
+  if (!gatekeeper_channel->WaitForConnection(pipe_handle)) {
+    return nullptr;
+  }
+
+  return gatekeeper_channel;
+}
+
+bool GatekeeperWindowsChannel::WaitForConnection(HANDLE pipe_handle) {
+  assert(pipe_handle_ == NULL);
+  pipe_handle_ = pipe_handle;
+
+  DWORD flags;
+  if (GetNamedPipeInfo(pipe_handle_,
+                       /*lpFlags= */ &flags,
+                       /*lpOutBufferSize= */ NULL,
+                       /* lpInBufferSize= */ NULL,
+                       /* lpMaxInstances= */ NULL) == 0) {
+    LOG(ERROR) << "Could not query Gatekeeper named pipe handle info. "
+                  "Got error code "
+               << GetLastError();
+    return false;
+  }
+
+  if ((flags & PIPE_SERVER_END) == 0) {
+    LOG(ERROR) << "Gatekeeper handle is not the server end of a named pipe!";
+    return false;
+  }
+
+  // Create the event object
+  HANDLE event_handle =
+      CreateEventA(/* lpEventAttributes= */ NULL, /* bManualReset= */ true,
+                   /* bInitialState= */ 0, /* lpName= */ NULL);
+  if (event_handle == NULL) {
+    LOG(ERROR)
+        << "Error: Could not create keymaster event object. Got error code "
+        << GetLastError();
+    return false;
+  }
+  pipe_overlapped_.hEvent = event_handle;
+
+  // Wait for client to connect to the pipe
+  ConnectNamedPipe(pipe_handle_, &pipe_overlapped_);
+
+  LOG(INFO) << "Listening to existing Gatekeeper pipe.";
+  if (WaitForSingleObject(pipe_overlapped_.hEvent, INFINITE) != WAIT_OBJECT_0) {
+    LOG(ERROR) << "Could not wait for Gatekeeper pipe's overlapped to be "
+                  "signalled. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+  if (!ResetEvent(pipe_overlapped_.hEvent)) {
+    LOG(ERROR) << "Could not reset Gatekeeper pipe's overlapped. Got Windows "
+                  "error code "
+               << GetLastError();
+    return false;
+  }
+  return true;
+}
+
+bool GatekeeperWindowsChannel::SendRequest(
+    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
+  return SendMessage(command, false, message);
+}
+
+bool GatekeeperWindowsChannel::SendResponse(
+    uint32_t command, const gatekeeper::GateKeeperMessage& message) {
+  return SendMessage(command, true, message);
+}
+
+// TODO(b/203538883): Remove non-vsock logic and enable vsock by default
+bool GatekeeperWindowsChannel::SendMessage(
+    uint32_t command, bool is_response,
+    const gatekeeper::GateKeeperMessage& message) {
+  auto payload_size = message.GetSerializedSize();
+
+  if (payload_size > 1024 * 1024) {
+    LOG(WARNING) << "Sending large message with id: " << command
+                 << " and size: " << payload_size;
+  }
+
+  auto to_send = CreateGatekeeperMessage(command, is_response, payload_size);
+  message.Serialize(to_send->payload, to_send->payload + payload_size);
+  auto write_size = payload_size + sizeof(GatekeeperRawMessage);
+  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
+  if (!WriteFile(pipe_handle_, to_send_bytes, write_size, NULL,
+                 &pipe_overlapped_) &&
+      GetLastError() != ERROR_IO_PENDING) {
+    LOG(ERROR) << "Could not write Gatekeeper Message. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+
+  // Vsock pipes are overlapped (asynchronous) and we need to wait for the
+  // overlapped event to be signaled.
+  // https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-waitforsingleobject#return-value
+  if (WaitForSingleObject(pipe_overlapped_.hEvent, INFINITE) != WAIT_OBJECT_0) {
+    LOG(ERROR) << "Could not wait for Gatekeeper pipe's overlapped to be "
+                  "signalled. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+  if (!ResetEvent(pipe_overlapped_.hEvent)) {
+    LOG(ERROR) << "Could not reset Gatekeeper pipe's overlapped. Got Windows "
+                  "error code "
+               << GetLastError();
+    return false;
+  }
+  return true;
+}
+
+bool GatekeeperWindowsChannel::ReadFromPipe(LPVOID buffer, DWORD size) {
+  if (ReadFile(pipe_handle_, buffer, size, NULL, &pipe_overlapped_) == FALSE) {
+    if (GetLastError() == ERROR_BROKEN_PIPE) {
+      LOG(INFO) << "Gatekeeper pipe was closed.";
+      return false;
+    } else if (GetLastError() != ERROR_IO_PENDING) {
+      LOG(ERROR) << "Could not read Gatekeeper message. Got Windows error code "
+                 << GetLastError();
+      return false;
+    }
+
+    // Wait for the asynchronous read to finish.
+    DWORD unused_bytes_read;
+    if (GetOverlappedResult(pipe_handle_, &pipe_overlapped_, &unused_bytes_read,
+                            /*bWait=*/TRUE) == FALSE) {
+      if (GetLastError() == ERROR_BROKEN_PIPE) {
+        LOG(INFO) << "Gatekeeper pipe was closed.";
+        return false;
+      }
+
+      LOG(ERROR) << "Error receiving Gatekeeper data. Got Windows error code "
+                 << GetLastError();
+      return false;
+    }
+  }
+
+  if (ResetEvent(pipe_overlapped_.hEvent) == 0) {
+    LOG(ERROR) << "Error calling ResetEvent for Gatekeeper data. Got "
+                  "Windows error code "
+               << GetLastError();
+
+    return false;
+  }
+
+  return true;
+}
+
+ManagedGatekeeperMessage GatekeeperWindowsChannel::ReceiveMessage() {
+  struct GatekeeperRawMessage message_header;
+
+  if (!ReadFromPipe(&message_header, sizeof(message_header))) {
+    return {};
+  }
+
+  if (message_header.payload_size > 1024 * 1024) {
+    LOG(WARNING) << "Received large message with id: " << message_header.cmd
+                 << " and size " << message_header.payload_size;
+  }
+
+  auto message =
+      CreateGatekeeperMessage(message_header.cmd, message_header.is_response,
+                              message_header.payload_size);
+  auto message_bytes = reinterpret_cast<char*>(message->payload);
+  if (!ReadFromPipe(message_bytes, message->payload_size)) {
+    return {};
+  }
+
+  return message;
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/gatekeeper_channel_windows.h b/common/libs/security/gatekeeper_channel_windows.h
new file mode 100644
index 0000000..0bd5314
--- /dev/null
+++ b/common/libs/security/gatekeeper_channel_windows.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <windows.h>
+#undef ERROR_RETRY
+#include <gatekeeper/gatekeeper_messages.h>
+
+#include <string>
+
+#include "common/libs/security/gatekeeper_channel.h"
+
+namespace cuttlefish {
+
+/*
+ * Interface for communication channels that synchronously communicate
+ * Gatekeeper IPC/RPC calls. Sends messages over a named pipe.
+ */
+class GatekeeperWindowsChannel : public GatekeeperChannel {
+ public:
+  ~GatekeeperWindowsChannel();
+
+  static std::unique_ptr<GatekeeperWindowsChannel> Create(HANDLE pipe_handle);
+  bool SendRequest(uint32_t command,
+                   const gatekeeper::GateKeeperMessage& message) override;
+  bool SendResponse(uint32_t command,
+                    const gatekeeper::GateKeeperMessage& message) override;
+  ManagedGatekeeperMessage ReceiveMessage() override;
+
+ protected:
+  GatekeeperWindowsChannel() = default;
+
+ private:
+  bool WaitForConnection(HANDLE pipe_handle);
+  bool SendMessage(uint32_t command, bool response,
+                   const gatekeeper::GateKeeperMessage& message);
+  bool ReadFromPipe(LPVOID buffer, DWORD size);
+
+  // Handle to the (asynchronous) named pipe.
+  HANDLE pipe_handle_ = NULL;
+  // OVERLAPPED struct for the named pipe. It contains an event object and is
+  // used to wait for asynchronous pipe operations.
+  OVERLAPPED pipe_overlapped_ = {};
+};
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel.cpp b/common/libs/security/keymaster_channel.cpp
index fde5aa5..2a347bc 100644
--- a/common/libs/security/keymaster_channel.cpp
+++ b/common/libs/security/keymaster_channel.cpp
@@ -16,22 +16,16 @@
 
 #include "common/libs/security/keymaster_channel.h"
 
-#include <cstdlib>
-#include <memory>
-#include <ostream>
-#include <string>
-
-#include <android-base/logging.h>
-#include <keymaster/android_keymaster_messages.h>
-#include <keymaster/mem.h>
-#include <keymaster/serializable.h>
-
-#include "common/libs/fs/shared_buf.h"
-
 namespace cuttlefish {
 
-ManagedKeymasterMessage CreateKeymasterMessage(
-    AndroidKeymasterCommand command, bool is_response, size_t payload_size) {
+void KeymasterCommandDestroyer::operator()(keymaster_message* ptr) {
+  { keymaster::Eraser(ptr, sizeof(keymaster_message) + ptr->payload_size); }
+  std::free(ptr);
+}
+
+ManagedKeymasterMessage CreateKeymasterMessage(AndroidKeymasterCommand command,
+                                               bool is_response,
+                                               size_t payload_size) {
   auto memory = std::malloc(payload_size + sizeof(keymaster_message));
   auto message = reinterpret_cast<keymaster_message*>(memory);
   message->cmd = command;
@@ -40,66 +34,4 @@
   return ManagedKeymasterMessage(message);
 }
 
-void KeymasterCommandDestroyer::operator()(keymaster_message* ptr) {
-  {
-    keymaster::Eraser(ptr, sizeof(keymaster_message) + ptr->payload_size);
-  }
-  std::free(ptr);
-}
-
-KeymasterChannel::KeymasterChannel(SharedFD input, SharedFD output)
-    : input_(input), output_(output) {
-}
-
-bool KeymasterChannel::SendRequest(
-    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
-  return SendMessage(command, false, message);
-}
-
-bool KeymasterChannel::SendResponse(
-    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
-  return SendMessage(command, true, message);
-}
-
-bool KeymasterChannel::SendMessage(
-    AndroidKeymasterCommand command,
-    bool is_response,
-    const keymaster::Serializable& message) {
-  auto payload_size = message.SerializedSize();
-  LOG(VERBOSE) << "Sending message with id: " << command << " and size "
-               << payload_size;
-  auto to_send = CreateKeymasterMessage(command, is_response, payload_size);
-  message.Serialize(to_send->payload, to_send->payload + payload_size);
-  auto write_size = payload_size + sizeof(keymaster_message);
-  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
-  auto written = WriteAll(output_, to_send_bytes, write_size);
-  if (written != write_size) {
-    LOG(ERROR) << "Could not write Keymaster Message: " << output_->StrError();
-  }
-  return written == write_size;
-}
-
-ManagedKeymasterMessage KeymasterChannel::ReceiveMessage() {
-  struct keymaster_message message_header;
-  auto read = ReadExactBinary(input_, &message_header);
-  if (read != sizeof(keymaster_message)) {
-    LOG(ERROR) << "Expected " << sizeof(keymaster_message) << ", received "
-               << read;
-    LOG(ERROR) << "Could not read Keymaster Message: " << input_->StrError();
-    return {};
-  }
-  LOG(VERBOSE) << "Received message with id: " << message_header.cmd
-               << " and size " << message_header.payload_size;
-  auto message = CreateKeymasterMessage(message_header.cmd,
-                                        message_header.is_response,
-                                        message_header.payload_size);
-  auto message_bytes = reinterpret_cast<char*>(message->payload);
-  read = ReadExact(input_, message_bytes, message->payload_size);
-  if (read != message->payload_size) {
-    LOG(ERROR) << "Could not read Keymaster Message: " << input_->StrError();
-    return {};
-  }
-  return message;
-}
-
-}
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel.h b/common/libs/security/keymaster_channel.h
index eec5a7f..bb3fac1 100644
--- a/common/libs/security/keymaster_channel.h
+++ b/common/libs/security/keymaster_channel.h
@@ -16,15 +16,11 @@
 
 #pragma once
 
-#include <cstddef>
-#include <cstdint>
 #include <memory>
 
 #include <keymaster/android_keymaster_messages.h>
 #include <keymaster/serializable.h>
 
-#include "common/libs/fs/shared_fd.h"
-
 namespace keymaster {
 
 /**
@@ -33,13 +29,13 @@
  * @payload: start of the serialized command specific payload
  */
 struct keymaster_message {
-    AndroidKeymasterCommand cmd : 31;
-    bool is_response : 1;
-    std::uint32_t payload_size;
-    std::uint8_t payload[0];
+  AndroidKeymasterCommand cmd : 31;
+  bool is_response : 1;
+  std::uint32_t payload_size;
+  std::uint8_t payload[0];
 };
 
-} // namespace keymaster
+}  // namespace keymaster
 
 namespace cuttlefish {
 
@@ -51,7 +47,7 @@
  * CreateKeymasterMessage. Wipes memory from the keymaster_message instances.
  */
 class KeymasterCommandDestroyer {
-public:
+ public:
   void operator()(keymaster_message* ptr);
 };
 
@@ -72,19 +68,13 @@
  * IPC/RPC calls. Sends messages over a file descriptor.
  */
 class KeymasterChannel {
-public:
-  KeymasterChannel(SharedFD input, SharedFD output);
-
-  bool SendRequest(AndroidKeymasterCommand command,
-                   const keymaster::Serializable& message);
-  bool SendResponse(AndroidKeymasterCommand command,
-                    const keymaster::Serializable& message);
-  ManagedKeymasterMessage ReceiveMessage();
-private:
-  SharedFD input_;
-  SharedFD output_;
-  bool SendMessage(AndroidKeymasterCommand command, bool response,
-                   const keymaster::Serializable& message);
+ public:
+  virtual bool SendRequest(AndroidKeymasterCommand command,
+                           const keymaster::Serializable& message) = 0;
+  virtual bool SendResponse(AndroidKeymasterCommand command,
+                            const keymaster::Serializable& message) = 0;
+  virtual ManagedKeymasterMessage ReceiveMessage() = 0;
+  virtual ~KeymasterChannel() {}
 };
 
-} // namespace cuttlefish
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel_sharedfd.cpp b/common/libs/security/keymaster_channel_sharedfd.cpp
new file mode 100644
index 0000000..29bbea8
--- /dev/null
+++ b/common/libs/security/keymaster_channel_sharedfd.cpp
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/security/keymaster_channel_sharedfd.h"
+
+#include <cstdlib>
+#include <memory>
+#include <ostream>
+#include <string>
+
+#include <android-base/logging.h>
+#include <keymaster/android_keymaster_messages.h>
+#include <keymaster/mem.h>
+#include <keymaster/serializable.h>
+
+#include "common/libs/fs/shared_buf.h"
+
+namespace cuttlefish {
+
+SharedFdKeymasterChannel::SharedFdKeymasterChannel(SharedFD input,
+                                                   SharedFD output)
+    : input_(input), output_(output) {}
+
+bool SharedFdKeymasterChannel::SendRequest(
+    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
+  return SendMessage(command, false, message);
+}
+
+bool SharedFdKeymasterChannel::SendResponse(
+    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
+  return SendMessage(command, true, message);
+}
+
+bool SharedFdKeymasterChannel::SendMessage(
+    AndroidKeymasterCommand command, bool is_response,
+    const keymaster::Serializable& message) {
+  auto payload_size = message.SerializedSize();
+  LOG(VERBOSE) << "Sending message with id: " << command << " and size "
+               << payload_size;
+  auto to_send = CreateKeymasterMessage(command, is_response, payload_size);
+  message.Serialize(to_send->payload, to_send->payload + payload_size);
+  auto write_size = payload_size + sizeof(keymaster_message);
+  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
+  auto written = WriteAll(output_, to_send_bytes, write_size);
+  if (written != write_size) {
+    LOG(ERROR) << "Could not write Keymaster Message: " << output_->StrError();
+  }
+  return written == write_size;
+}
+
+ManagedKeymasterMessage SharedFdKeymasterChannel::ReceiveMessage() {
+  struct keymaster_message message_header;
+  auto read = ReadExactBinary(input_, &message_header);
+  if (read != sizeof(keymaster_message)) {
+    LOG(ERROR) << "Expected " << sizeof(keymaster_message) << ", received "
+               << read;
+    LOG(ERROR) << "Could not read Keymaster Message: " << input_->StrError();
+    return {};
+  }
+  LOG(VERBOSE) << "Received message with id: " << message_header.cmd
+               << " and size " << message_header.payload_size;
+  auto message =
+      CreateKeymasterMessage(message_header.cmd, message_header.is_response,
+                             message_header.payload_size);
+  auto message_bytes = reinterpret_cast<char*>(message->payload);
+  read = ReadExact(input_, message_bytes, message->payload_size);
+  if (read != message->payload_size) {
+    LOG(ERROR) << "Could not read Keymaster Message: " << input_->StrError();
+    return {};
+  }
+  return message;
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel_sharedfd.h b/common/libs/security/keymaster_channel_sharedfd.h
new file mode 100644
index 0000000..f52f309
--- /dev/null
+++ b/common/libs/security/keymaster_channel_sharedfd.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <keymaster/android_keymaster_messages.h>
+#include <keymaster/serializable.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/security/keymaster_channel.h"
+
+namespace cuttlefish {
+
+/*
+ * Interface for communication channels that synchronously communicate Keymaster
+ * IPC/RPC calls. Sends messages over a file descriptor.
+ */
+class SharedFdKeymasterChannel : public KeymasterChannel {
+ public:
+  SharedFdKeymasterChannel(SharedFD input, SharedFD output);
+
+  bool SendRequest(AndroidKeymasterCommand command,
+                   const keymaster::Serializable& message) override;
+  bool SendResponse(AndroidKeymasterCommand command,
+                    const keymaster::Serializable& message) override;
+  ManagedKeymasterMessage ReceiveMessage() override;
+
+ private:
+  SharedFD input_;
+  SharedFD output_;
+  bool SendMessage(keymaster::AndroidKeymasterCommand command, bool response,
+                   const keymaster::Serializable& message);
+};
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel_test.cpp b/common/libs/security/keymaster_channel_test.cpp
index aed5bed..234fde0 100644
--- a/common/libs/security/keymaster_channel_test.cpp
+++ b/common/libs/security/keymaster_channel_test.cpp
@@ -20,7 +20,7 @@
 #include <string>
 
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/security/keymaster_channel.h"
+#include "common/libs/security/keymaster_channel_sharedfd.h"
 #include "gtest/gtest.h"
 
 namespace cuttlefish {
@@ -30,7 +30,7 @@
   SharedFD write_fd;
   ASSERT_TRUE(SharedFD::Pipe(&read_fd, &write_fd)) << "Failed to create pipe";
 
-  KeymasterChannel channel{read_fd, write_fd};
+  SharedFdKeymasterChannel channel{read_fd, write_fd};
 
   char buffer[] = {1, 2, 3, 4, 5, 6};
   keymaster::Buffer request(buffer, sizeof(buffer));
@@ -55,7 +55,7 @@
   SharedFD write_fd;
   ASSERT_TRUE(SharedFD::Pipe(&read_fd, &write_fd)) << "Failed to create pipe";
 
-  KeymasterChannel channel{read_fd, write_fd};
+  SharedFdKeymasterChannel channel{read_fd, write_fd};
 
   char buffer[] = {1, 2, 3, 4, 5, 6};
   keymaster::Buffer request(buffer, sizeof(buffer));
diff --git a/common/libs/security/keymaster_channel_windows.cpp b/common/libs/security/keymaster_channel_windows.cpp
new file mode 100644
index 0000000..45ad0b0
--- /dev/null
+++ b/common/libs/security/keymaster_channel_windows.cpp
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/security/keymaster_channel_windows.h"
+
+#include <windows.h>
+
+#include <errhandlingapi.h>
+#include <fileapi.h>
+#include <handleapi.h>
+#include <namedpipeapi.h>
+#include <chrono>
+#include <cstdlib>
+#include <thread>
+
+#include <android-base/logging.h>
+#include <keymaster/android_keymaster_utils.h>
+
+namespace cuttlefish {
+using keymaster::keymaster_message;
+
+std::unique_ptr<KeymasterWindowsChannel> KeymasterWindowsChannel::Create(
+    HANDLE pipe_handle) {
+  auto keymaster_channel =
+      std::unique_ptr<KeymasterWindowsChannel>(new KeymasterWindowsChannel());
+  if (!keymaster_channel->WaitForConnection(pipe_handle)) {
+    return nullptr;
+  }
+
+  return keymaster_channel;
+}
+
+bool KeymasterWindowsChannel::WaitForConnection(HANDLE pipe_handle) {
+  assert(pipe_handle_ == NULL);
+  pipe_handle_ = pipe_handle;
+
+  DWORD flags;
+  if (GetNamedPipeInfo(pipe_handle_,
+                       /*lpFlags= */ &flags,
+                       /*lpOutBufferSize= */ NULL,
+                       /* lpInBufferSize= */ NULL,
+                       /* lpMaxInstances= */ NULL) == 0) {
+    LOG(ERROR)
+        << "Could not query Keymaster named pipe handle info. Got error code "
+        << GetLastError();
+    return false;
+  }
+
+  if ((flags & PIPE_SERVER_END) == 0) {
+    LOG(ERROR) << "Keymaster handle is not the server end of a named pipe!";
+    return false;
+  }
+
+  // Create the event object
+  HANDLE event_handle =
+      CreateEventA(/* lpEventAttributes= */ NULL, /* bManualReset= */ true,
+                   /* bInitialState= */ 0, /* lpName= */ NULL);
+  if (event_handle == NULL) {
+    LOG(ERROR)
+        << "Error: Could not create keymaster event object. Got error code "
+        << GetLastError();
+    return false;
+  }
+  pipe_overlapped_.hEvent = event_handle;
+
+  // Wait for client to connect to the pipe
+  ConnectNamedPipe(pipe_handle_, &pipe_overlapped_);
+  LOG(INFO) << "Listening to existing keymaster pipe handle.";
+
+  if (WaitForSingleObject(pipe_overlapped_.hEvent, INFINITE) != WAIT_OBJECT_0) {
+    LOG(ERROR) << "Could not wait for Keymaster pipe's overlapped to be "
+                  "signalled. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+  if (!ResetEvent(pipe_overlapped_.hEvent)) {
+    LOG(ERROR) << "Could not reset Keymaster pipe's overlapped. Got Windows "
+                  "error code "
+               << GetLastError();
+    return false;
+  }
+  return true;
+}
+
+KeymasterWindowsChannel::~KeymasterWindowsChannel() {
+  if (pipe_handle_) {
+    CloseHandle(pipe_handle_);
+  }
+
+  if (pipe_overlapped_.hEvent) {
+    CloseHandle(pipe_overlapped_.hEvent);
+  }
+}
+
+bool KeymasterWindowsChannel::SendRequest(
+    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
+  return SendMessage(command, false, message);
+}
+
+bool KeymasterWindowsChannel::SendResponse(
+    AndroidKeymasterCommand command, const keymaster::Serializable& message) {
+  return SendMessage(command, true, message);
+}
+
+bool KeymasterWindowsChannel::SendMessage(
+    AndroidKeymasterCommand command, bool is_response,
+    const keymaster::Serializable& message) {
+  auto payload_size = message.SerializedSize();
+  if (payload_size > 1024 * 1024) {
+    LOG(WARNING) << "Sending large message with id: " << command << " and size "
+                 << payload_size;
+  }
+
+  auto to_send = CreateKeymasterMessage(command, is_response, payload_size);
+  message.Serialize(to_send->payload, to_send->payload + payload_size);
+  auto write_size = payload_size + sizeof(keymaster_message);
+  auto to_send_bytes = reinterpret_cast<const char*>(to_send.get());
+  if (!WriteFile(pipe_handle_, to_send_bytes, write_size, NULL,
+                 &pipe_overlapped_) &&
+      GetLastError() != ERROR_IO_PENDING) {
+    LOG(ERROR) << "Could not write Keymaster Message. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+
+  // Vsock pipes are overlapped (asynchronous) and we need to wait for the
+  // overlapped event to be signaled.
+  // https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-waitforsingleobject#return-value
+  if (WaitForSingleObject(pipe_overlapped_.hEvent, INFINITE) != WAIT_OBJECT_0) {
+    LOG(ERROR) << "Could not wait for Keymaster pipe's overlapped to be "
+                  "signalled. Got Windows error code "
+               << GetLastError();
+    return false;
+  }
+  if (!ResetEvent(pipe_overlapped_.hEvent)) {
+    LOG(ERROR) << "Could not reset Keymaster pipe's overlapped. Got Windows "
+                  "error code "
+               << GetLastError();
+    return false;
+  }
+  return true;
+}
+
+bool KeymasterWindowsChannel::ReadFromPipe(LPVOID buffer, DWORD size) {
+  if (ReadFile(pipe_handle_, buffer, size, NULL, &pipe_overlapped_) == FALSE) {
+    if (GetLastError() == ERROR_BROKEN_PIPE) {
+      LOG(INFO) << "Keymaster pipe was closed.";
+      return false;
+    } else if (GetLastError() != ERROR_IO_PENDING) {
+      LOG(ERROR) << "Could not read Keymaster message. Got Windows error code "
+                 << GetLastError();
+      return false;
+    }
+
+    // Wait for the asynchronous read to finish.
+    DWORD unused_bytes_read;
+    if (GetOverlappedResult(pipe_handle_, &pipe_overlapped_, &unused_bytes_read,
+                            /*bWait=*/TRUE) == FALSE) {
+      if (GetLastError() == ERROR_BROKEN_PIPE) {
+        LOG(INFO) << "Keymaster pipe was closed.";
+        return false;
+      }
+
+      LOG(ERROR) << "Error receiving Keymaster data. Got Windows error code "
+                 << GetLastError();
+      return false;
+    }
+  }
+
+  if (ResetEvent(pipe_overlapped_.hEvent) == 0) {
+    LOG(ERROR) << "Error calling ResetEvent for Keymaster data. Got "
+                  "Windows error code "
+               << GetLastError();
+
+    return false;
+  }
+
+  return true;
+}
+
+ManagedKeymasterMessage KeymasterWindowsChannel::ReceiveMessage() {
+  struct keymaster_message message_header;
+  if (!ReadFromPipe(&message_header, sizeof(message_header))) {
+    return {};
+  }
+
+  if (message_header.payload_size > 1024 * 1024) {
+    LOG(WARNING) << "Received large message with id: " << message_header.cmd
+                 << " and size " << message_header.payload_size;
+  }
+
+  auto message =
+      CreateKeymasterMessage(message_header.cmd, message_header.is_response,
+                             message_header.payload_size);
+  auto message_bytes = reinterpret_cast<char*>(message->payload);
+
+  if (!ReadFromPipe(message_bytes, message->payload_size)) {
+    return {};
+  }
+
+  return message;
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/security/keymaster_channel_windows.h b/common/libs/security/keymaster_channel_windows.h
new file mode 100644
index 0000000..164a1d9
--- /dev/null
+++ b/common/libs/security/keymaster_channel_windows.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <windows.h>
+#include <string>
+
+#include <keymaster/android_keymaster_messages.h>
+#include <keymaster/serializable.h>
+
+#include "common/libs/security/keymaster_channel.h"
+
+namespace cuttlefish {
+
+using keymaster::AndroidKeymasterCommand;
+
+/*
+ * Interface for communication channels that synchronously communicate Keymaster
+ * IPC/RPC calls. Sends messages over a named pipe.
+ */
+class KeymasterWindowsChannel : public KeymasterChannel {
+ public:
+  ~KeymasterWindowsChannel();
+
+  static std::unique_ptr<KeymasterWindowsChannel> Create(HANDLE pipe_handle);
+
+  bool SendRequest(AndroidKeymasterCommand command,
+                   const keymaster::Serializable& message) override;
+  bool SendResponse(AndroidKeymasterCommand command,
+                    const keymaster::Serializable& message) override;
+  ManagedKeymasterMessage ReceiveMessage() override;
+
+ protected:
+  KeymasterWindowsChannel() = default;
+
+ private:
+  bool WaitForConnection(HANDLE pipe_handle);
+
+  bool SendMessage(AndroidKeymasterCommand command, bool response,
+                   const keymaster::Serializable& message);
+
+  bool ReadFromPipe(LPVOID buffer, DWORD size);
+
+  // Handle to the (asynchronous) named pipe.
+  HANDLE pipe_handle_ = NULL;
+  // OVERLAPPED struct for the named pipe. It contains an event object and is
+  // used to wait for asynchronous pipe operations.
+  OVERLAPPED pipe_overlapped_ = {};
+};
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/common/libs/utils/Android.bp b/common/libs/utils/Android.bp
index 412bbef..09ee08b 100644
--- a/common/libs/utils/Android.bp
+++ b/common/libs/utils/Android.bp
@@ -21,19 +21,24 @@
     name: "libcuttlefish_utils",
     srcs: [
         "archive.cpp",
-        "subprocess.cpp",
-        "environment.cpp",
-        "flag_parser.cpp",
-        "shared_fd_flag.cpp",
-        "files.cpp",
-        "users.cpp",
-        "network.cpp",
         "base64.cpp",
+        "environment.cpp",
+        "files.cpp",
+        "flag_parser.cpp",
+        "flags_validator.cpp",
+        "inotify.cpp",
+        "json.cpp",
+        "network.cpp",
+        "proc_file_utils.cpp",
+        "scope_guard.cpp",
+        "shared_fd_flag.cpp",
+        "socket2socket_proxy.cpp",
+        "subprocess.cpp",
         "tcp_socket.cpp",
         "tee_logging.cpp",
         "unix_sockets.cpp",
+        "users.cpp",
         "vsock_connection.cpp",
-        "socket2socket_proxy.cpp",
     ],
     shared: {
         shared_libs: [
@@ -56,16 +61,33 @@
     defaults: ["cuttlefish_host"],
 }
 
+cc_library_static {
+    name: "libcuttlefish_utils_product",
+    srcs: [
+        "inotify.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "liblog",
+    ],
+    stl: "libc++_static",
+    defaults: ["cuttlefish_guest_product_only"],
+}
+
 cc_test_host {
     name: "libcuttlefish_utils_test",
     srcs: [
         "flag_parser_test.cpp",
+        "proc_file_utils_test.cpp",
+        "result_test.cpp",
+        "unique_resource_allocator_test.cpp",
         "unix_sockets_test.cpp",
     ],
     static_libs: [
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
+        "libgmock",
     ],
     shared_libs: [
         "libcrypto",
diff --git a/common/libs/utils/archive.cpp b/common/libs/utils/archive.cpp
index 0610327..f04a942 100644
--- a/common/libs/utils/archive.cpp
+++ b/common/libs/utils/archive.cpp
@@ -16,33 +16,58 @@
 
 #include "common/libs/utils/archive.h"
 
-#include <ostream>
+#include <unistd.h>
+
 #include <string>
 #include <utility>
 #include <vector>
 
-#include <android-base/strings.h>
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 
 #include "common/libs/utils/subprocess.h"
 
 namespace cuttlefish {
+namespace {
 
-Archive::Archive(const std::string& file) : file(file) {
+Result<std::vector<std::string>> ExtractHelper(
+    std::vector<std::string>& files, const std::string& archive_filepath,
+    const std::string& target_directory, const bool keep_archive) {
+  CF_EXPECT(!files.empty(), "No files extracted from " << archive_filepath);
+
+  auto it = files.begin();
+  while (it != files.end()) {
+    if (*it == "" || android::base::EndsWith(*it, "/")) {
+      it = files.erase(it);
+    } else {
+      *it = target_directory + "/" + *it;
+      it++;
+    }
+  }
+
+  if (!keep_archive && unlink(archive_filepath.data()) != 0) {
+    LOG(ERROR) << "Could not delete " << archive_filepath;
+    files.push_back(archive_filepath);
+  }
+
+  return {files};
 }
 
-Archive::~Archive() {
-}
+}  // namespace
+
+Archive::Archive(const std::string& file) : file_(file) {}
+
+Archive::~Archive() {}
 
 std::vector<std::string> Archive::Contents() {
   Command bsdtar_cmd("/usr/bin/bsdtar");
   bsdtar_cmd.AddParameter("-tf");
-  bsdtar_cmd.AddParameter(file);
+  bsdtar_cmd.AddParameter(file_);
   std::string bsdtar_input, bsdtar_output;
   auto bsdtar_ret = RunWithManagedStdio(std::move(bsdtar_cmd), &bsdtar_input,
                                              &bsdtar_output, nullptr);
   if (bsdtar_ret != 0) {
-    LOG(ERROR) << "`bsdtar -tf \"" << file << "\"` returned " << bsdtar_ret;
+    LOG(ERROR) << "`bsdtar -tf \"" << file_ << "\"` returned " << bsdtar_ret;
   }
   return bsdtar_ret == 0
       ? android::base::Split(bsdtar_output, "\n")
@@ -61,7 +86,7 @@
   bsdtar_cmd.AddParameter("-C");
   bsdtar_cmd.AddParameter(target_directory);
   bsdtar_cmd.AddParameter("-f");
-  bsdtar_cmd.AddParameter(file);
+  bsdtar_cmd.AddParameter(file_);
   bsdtar_cmd.AddParameter("-S");
   for (const auto& extract : to_extract) {
     bsdtar_cmd.AddParameter(extract);
@@ -70,7 +95,8 @@
                            Subprocess::StdIOChannel::kStdErr);
   auto bsdtar_ret = bsdtar_cmd.Start().Wait();
   if (bsdtar_ret != 0) {
-    LOG(ERROR) << "bsdtar extraction on \"" << file << "\" returned " << bsdtar_ret;
+    LOG(ERROR) << "bsdtar extraction on \"" << file_ << "\" returned "
+               << bsdtar_ret;
   }
   return bsdtar_ret == 0;
 }
@@ -78,18 +104,51 @@
 std::string Archive::ExtractToMemory(const std::string& path) {
   Command bsdtar_cmd("/usr/bin/bsdtar");
   bsdtar_cmd.AddParameter("-xf");
-  bsdtar_cmd.AddParameter(file);
+  bsdtar_cmd.AddParameter(file_);
   bsdtar_cmd.AddParameter("-O");
   bsdtar_cmd.AddParameter(path);
   std::string stdout_str;
   auto ret =
       RunWithManagedStdio(std::move(bsdtar_cmd), nullptr, &stdout_str, nullptr);
   if (ret != 0) {
-    LOG(ERROR) << "Could not extract \"" << path << "\" from \"" << file
+    LOG(ERROR) << "Could not extract \"" << path << "\" from \"" << file_
                << "\" to memory.";
     return "";
   }
   return stdout_str;
 }
 
+Result<std::vector<std::string>> ExtractImages(
+    const std::string& archive_filepath, const std::string& target_directory,
+    const std::vector<std::string>& images, const bool keep_archive) {
+  Archive archive(archive_filepath);
+  CF_EXPECT(archive.ExtractFiles(images, target_directory),
+            "Could not extract images from \"" << archive_filepath << "\" to \""
+                                               << target_directory << "\"");
+
+  std::vector<std::string> files = images;
+  return ExtractHelper(files, archive_filepath, target_directory, keep_archive);
+}
+
+Result<std::string> ExtractImage(const std::string& archive_filepath,
+                                 const std::string& target_directory,
+                                 const std::string& image,
+                                 const bool keep_archive) {
+  std::vector<std::string> result = CF_EXPECT(
+      ExtractImages(archive_filepath, target_directory, {image}, keep_archive));
+  return {result.front()};
+}
+
+Result<std::vector<std::string>> ExtractArchiveContents(
+    const std::string& archive_filepath, const std::string& target_directory,
+    const bool keep_archive) {
+  Archive archive(archive_filepath);
+  CF_EXPECT(archive.ExtractAll(target_directory),
+            "Could not extract \"" << archive_filepath << "\" to \""
+                                   << target_directory << "\"");
+
+  std::vector<std::string> files = archive.Contents();
+  return ExtractHelper(files, archive_filepath, target_directory, keep_archive);
+}
+
 } // namespace cuttlefish
diff --git a/common/libs/utils/archive.h b/common/libs/utils/archive.h
index ea548f2..b0def68 100644
--- a/common/libs/utils/archive.h
+++ b/common/libs/utils/archive.h
@@ -18,12 +18,15 @@
 #include <string>
 #include <vector>
 
+#include "common/libs/utils/result.h"
+
 namespace cuttlefish {
 
 // Operations on archive files
 class Archive {
-  std::string file;
-public:
+  std::string file_;
+
+ public:
   Archive(const std::string& file);
   ~Archive();
 
@@ -34,4 +37,17 @@
   std::string ExtractToMemory(const std::string& path);
 };
 
+Result<std::vector<std::string>> ExtractImages(
+    const std::string& archive_filepath, const std::string& target_directory,
+    const std::vector<std::string>& images, const bool keep_archive);
+
+Result<std::string> ExtractImage(const std::string& archive_filepath,
+                                 const std::string& target_directory,
+                                 const std::string& image,
+                                 const bool keep_archive);
+
+Result<std::vector<std::string>> ExtractArchiveContents(
+    const std::string& archive_filepath, const std::string& target_directory,
+    const bool keep_archive);
+
 } // namespace cuttlefish
diff --git a/common/libs/utils/base64.cpp b/common/libs/utils/base64.cpp
index 837c486..2aad234 100644
--- a/common/libs/utils/base64.cpp
+++ b/common/libs/utils/base64.cpp
@@ -49,9 +49,11 @@
     return false;
   }
   buffer->resize(out_len);
-  return EVP_DecodeBase64(buffer->data(), &out_len, out_len,
+  auto result = EVP_DecodeBase64(buffer->data(), &out_len, out_len,
                           reinterpret_cast<const std::uint8_t *>(data.data()),
                           data.size());
+  buffer->resize(out_len); // remove padding '=' characters
+  return result;
 }
 
 }  // namespace cuttlefish
diff --git a/common/libs/utils/collect.h b/common/libs/utils/collect.h
new file mode 100644
index 0000000..08d5287
--- /dev/null
+++ b/common/libs/utils/collect.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+/**
+ * return all the elements in container that satisfies predicate.
+ *
+ * Container could be mostly any type, and Set should be any sort of set.
+ */
+template <typename T, typename Set, typename Container>
+Set Collect(const Container& container,
+            std::function<bool(const T&)> predicate) {
+  Set output;
+  std::copy_if(container.cbegin(), container.cend(),
+               std::inserter(output, output.end()), predicate);
+  return output;
+}
+
+/**
+ * Collect all Ts from each container inside the "Containers"
+ *
+ * Containers are a set/list of Container. Container can be viewed as a set/list
+ * of Ts.
+ *
+ */
+template <typename T, typename Set, typename Containers>
+Set Flatten(const Containers& containers) {
+  Set output;
+  for (const auto& container : containers) {
+    output.insert(container.cbegin(), container.cend());
+  }
+  return output;
+}
+
+template <typename S>
+Result<typename std::remove_reference<S>::type> AtMostN(S&& s, const size_t n) {
+  CF_EXPECT(s.size() <= n);
+  return {std::forward<S>(s)};
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/contains.h b/common/libs/utils/contains.h
new file mode 100644
index 0000000..97361c1
--- /dev/null
+++ b/common/libs/utils/contains.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <iterator>
+#include <string>
+#include <string_view>
+#include <type_traits>
+
+/**
+ * @file: Implement Contains(container, key)
+ *
+ * The function returns true if container has the key, or false.
+ *
+ * If the container has a find(key) method (e.g. set, unordered_set, std::map,
+ * etc), the find method is used. Otherwise, the std::find function from
+ * algorithm is used, which may result in a linear search.
+ *
+ * See go/cf-utils-contains for more details.
+ */
+namespace cuttlefish {
+namespace contains_internal_impl {
+
+/*
+ * If Container does not have find key, will be a compiler error used
+ * by SFINAE. If it does have one, this is equivalent to the "void" type.
+ */
+template <typename Container, typename Key>
+using VoidTypeIfHasFind =
+    decltype(void(std::declval<Container&>().find(std::declval<Key&>())));
+
+/*
+ * Here is how this works:
+ *
+ * Given that
+ *   HasFindImpl<Container, T> is used in the code
+ *
+ *   1. The input is effectively regarded as HasFindImpl<Container, T, void>.
+ *   The specialized version below isn't looked up yet; whether the specialized
+ *   version below is used or not, the compiler front-end needs all three
+ *   template parameters to match against either special or generic version.
+ *   When obtaining "all three," the front-end only looks up the base template
+ *   definition. The default type of the third template parameter is void, so
+ *   the given type is expanded/deduced to HasFindImpl<Container, T, void>.
+ *
+ *   2. Now, given HasFindImpl<Container, T, void>, the compiler front-end
+ *   tries matching against the specialized and generic/original versions. If
+ *   the input could matches both a generic and a specialized one, the compiler
+ *   chooses the specialized one. Thus, particularly, HasFindImpl
+ *   implementation's third parameter in the specialized version must be the
+ *   same as the default type of the third template parameter to the original/
+ *   generic version, which is "void."
+ */
+template <typename Container, typename T, typename = void>
+struct HasFindImpl : std::false_type {};
+
+template <typename Container, typename T>
+struct HasFindImpl<Container, T, VoidTypeIfHasFind<Container, T>>
+    : std::true_type {};
+
+template <typename T>
+using RemoveCvref =
+    typename std::remove_cv_t<typename std::remove_reference_t<T>>;
+
+template <typename T, typename U>
+using IsSame = typename std::is_same<RemoveCvref<T>, RemoveCvref<U>>;
+
+template <typename T>
+struct IsString : IsSame<std::string, T> {};
+
+template <typename T>
+struct IsStringView : IsSame<std::string_view, T> {};
+
+}  // namespace contains_internal_impl
+
+// TODO(kwstephenkim): Replace these when C++20 starts to be used.
+template <typename Container, typename U,
+          typename = std::enable_if_t<
+              contains_internal_impl::HasFindImpl<Container, U>::value &&
+                  (!contains_internal_impl::IsString<Container>::value &&
+                   !contains_internal_impl::IsStringView<Container>::value),
+              void>>
+constexpr bool Contains(Container&& container, U&& u) {
+  // using O(1) or O(lgN) find()
+  return container.find(std::forward<U>(u)) != container.end();
+}
+
+template <
+    typename Container, typename U,
+    std::enable_if_t<!contains_internal_impl::HasFindImpl<Container, U>::value,
+                     int> = 0>
+constexpr bool Contains(Container&& container, U&& u) {
+  // falls back to a generic, likely linear search
+  const auto itr =
+      std::find(std::begin(container), std::end(container), std::forward<U>(u));
+  return itr != std::end(container);
+}
+
+// std::string:: or std::string_view::find() returns index, not iterator
+template <typename T>
+constexpr bool Contains(const std::string& s, T&& t) {
+  return s.find(std::forward<T>(t)) != std::string::npos;
+}
+
+template <typename T>
+constexpr bool Contains(const std::string_view& s, T&& t) {
+  return s.find(std::forward<T>(t)) != std::string_view::npos;
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/environment.cpp b/common/libs/utils/environment.cpp
index b2643fa..c9f3847 100644
--- a/common/libs/utils/environment.cpp
+++ b/common/libs/utils/environment.cpp
@@ -23,6 +23,7 @@
 #include <string>
 
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 
 #include "common/libs/utils/files.h"
 
@@ -46,12 +47,9 @@
  */
 std::string HostArchStr() {
   static std::string arch;
-  static bool cached = false;
-
-  if (cached) {
+  if (!arch.empty()) {
     return arch;
   }
-  cached = true;
 
   // good to check if uname exists and is executable
   // or, guarantee uname is available by dependency list
@@ -70,7 +68,9 @@
         decltype(len) upper = len + 1;
         std::string format("%");
         format.append(std::to_string(upper)).append("s");
-        std::shared_ptr<char> buf(new char[upper],
+        // 1 extra character needed for the terminating null
+        // character added by fscanf.
+        std::shared_ptr<char> buf(new char[upper + 1],
                                   std::default_delete<char[]>());
         if (fscanf(fp, format.c_str(), buf.get()) == EOF) {
           return std::string{};
@@ -78,13 +78,8 @@
         std::string result(buf.get());
         return (result.length() < upper) ? result : std::string{};
       };
-  arch = read_from_file(pip, 20);
+  arch = android::base::Trim(std::string_view{read_from_file(pip, 20)});
   pclose(pip);
-
-  // l and r trim on arch
-  static const char* whitespace = "\t\n\r\f\v ";
-  arch.erase(arch.find_last_not_of(whitespace) + 1); // r trim
-  arch.erase(0, arch.find_first_not_of(whitespace)); // l trim
   return arch;
 }
 
@@ -94,6 +89,8 @@
     return Arch::Arm64;
   } else if (arch_str == "arm") {
     return Arch::Arm;
+  } else if (arch_str == "riscv64") {
+    return Arch::RiscV64;
   } else if (arch_str == "x86_64") {
     return Arch::X86_64;
   } else if (arch_str.size() == 4 && arch_str[0] == 'i' && arch_str[2] == '8' &&
diff --git a/common/libs/utils/environment.h b/common/libs/utils/environment.h
index 004a849..b84c036 100644
--- a/common/libs/utils/environment.h
+++ b/common/libs/utils/environment.h
@@ -22,6 +22,7 @@
 enum class Arch {
   Arm,
   Arm64,
+  RiscV64,
   X86,
   X86_64,
 };
diff --git a/common/libs/utils/files.cpp b/common/libs/utils/files.cpp
index c1b1778..6b650c7 100644
--- a/common/libs/utils/files.cpp
+++ b/common/libs/utils/files.cpp
@@ -15,13 +15,20 @@
  */
 
 #include "common/libs/utils/files.h"
-
-#include <android-base/logging.h>
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/inotify.h"
 
 #include <dirent.h>
 #include <fcntl.h>
 #include <ftw.h>
 #include <libgen.h>
+#include <linux/fiemap.h>
+#include <linux/fs.h>
+#include <sched.h>
+#include <sys/inotify.h>
+#include <sys/ioctl.h>
+#include <sys/select.h>
+#include <sys/sendfile.h>
 #include <sys/stat.h>
 #include <sys/types.h>
 #include <unistd.h>
@@ -43,14 +50,20 @@
 #include <string>
 #include <vector>
 
+#include <android-base/logging.h>
 #include <android-base/macros.h>
 
+#include "android-base/strings.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/scope_guard.h"
+#include "common/libs/utils/subprocess.h"
+#include "common/libs/utils/users.h"
 
 namespace cuttlefish {
 
 bool FileExists(const std::string& path, bool follow_symlinks) {
-  struct stat st;
+  struct stat st {};
   return (follow_symlinks ? stat : lstat)(path.c_str(), &st) == 0;
 }
 
@@ -58,21 +71,19 @@
   return FileSize(path) > 0;
 }
 
-std::vector<std::string> DirectoryContents(const std::string& path) {
+Result<std::vector<std::string>> DirectoryContents(const std::string& path) {
   std::vector<std::string> ret;
   std::unique_ptr<DIR, int(*)(DIR*)> dir(opendir(path.c_str()), closedir);
-  CHECK(dir != nullptr) << "Could not read from dir \"" << path << "\"";
-  if (dir) {
-    struct dirent *ent;
-    while ((ent = readdir(dir.get()))) {
-      ret.push_back(ent->d_name);
-    }
+  CF_EXPECT(dir != nullptr, "Could not read from dir \"" << path << "\"");
+  struct dirent* ent{};
+  while ((ent = readdir(dir.get()))) {
+    ret.emplace_back(ent->d_name);
   }
   return ret;
 }
 
 bool DirectoryExists(const std::string& path, bool follow_symlinks) {
-  struct stat st;
+  struct stat st {};
   if ((follow_symlinks ? stat : lstat)(path.c_str(), &st) == -1) {
     return false;
   }
@@ -82,18 +93,48 @@
   return true;
 }
 
-Result<void> EnsureDirectoryExists(const std::string& directory_path) {
-  if (!DirectoryExists(directory_path)) {
-    LOG(DEBUG) << "Setting up " << directory_path;
-    if (mkdir(directory_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) <
-            0 &&
-        errno != EEXIST) {
-      return CF_ERRNO("Failed to create dir: \"" << directory_path);
-    }
+Result<void> EnsureDirectoryExists(const std::string& directory_path,
+                                   const mode_t mode,
+                                   const std::string& group_name) {
+  if (DirectoryExists(directory_path)) {
+    return {};
   }
+  const auto parent_dir = cpp_dirname(directory_path);
+  if (parent_dir.size() > 1) {
+    EnsureDirectoryExists(parent_dir, mode, group_name);
+  }
+  LOG(DEBUG) << "Setting up " << directory_path;
+  if (mkdir(directory_path.c_str(), mode) < 0 && errno != EEXIST) {
+    return CF_ERRNO("Failed to create directory: \"" << directory_path << "\"");
+  }
+
+  if (group_name != "") {
+    ChangeGroup(directory_path, group_name);
+  }
+
   return {};
 }
 
+Result<void> ChangeGroup(const std::string& path,
+                         const std::string& group_name) {
+  auto groupId = GroupIdFromName(group_name);
+
+  if (groupId == -1) {
+    return CF_ERR("Failed to get group id: ") << group_name;
+  }
+
+  if (chown(path.c_str(), -1, groupId) != 0) {
+    return CF_ERRNO("Feailed to set group for path: "
+                    << path << ", " << group_name << ", " << strerror(errno));
+  }
+
+  return {};
+}
+
+bool CanAccess(const std::string& path, const int mode) {
+  return access(path.c_str(), mode) == 0;
+}
+
 bool IsDirectoryEmpty(const std::string& path) {
   auto direc = ::opendir(path.c_str());
   if (!direc) {
@@ -149,6 +190,81 @@
          0;
 }
 
+namespace {
+
+bool SendFile(int out_fd, int in_fd, off64_t* offset, size_t count) {
+  while (count > 0) {
+    const auto bytes_written =
+        TEMP_FAILURE_RETRY(sendfile(out_fd, in_fd, offset, count));
+    if (bytes_written <= 0) {
+      return false;
+    }
+
+    count -= bytes_written;
+  }
+  return true;
+}
+
+}  // namespace
+
+bool Copy(const std::string& from, const std::string& to) {
+  android::base::unique_fd fd_from(
+      open(from.c_str(), O_RDONLY | O_CLOEXEC));
+  android::base::unique_fd fd_to(
+      open(to.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_CLOEXEC, 0644));
+
+  if (fd_from.get() < 0 || fd_to.get() < 0) {
+    return false;
+  }
+
+  off_t farthest_seek = lseek(fd_from.get(), 0, SEEK_END);
+  if (farthest_seek == -1) {
+    PLOG(ERROR) << "Could not lseek in \"" << from << "\"";
+    return false;
+  }
+  if (ftruncate64(fd_to.get(), farthest_seek) < 0) {
+    PLOG(ERROR) << "Failed to ftruncate " << to;
+  }
+  off_t offset = 0;
+  while (offset < farthest_seek) {
+    off_t new_offset = lseek(fd_from.get(), offset, SEEK_HOLE);
+    if (new_offset == -1) {
+      // ENXIO is returned when there are no more blocks of this type
+      // coming.
+      if (errno == ENXIO) {
+        return true;
+      }
+      PLOG(ERROR) << "Could not lseek in \"" << from << "\"";
+      return false;
+    }
+    auto data_bytes = new_offset - offset;
+    if (lseek(fd_to.get(), offset, SEEK_SET) < 0) {
+      PLOG(ERROR) << "lseek() on " << to << " failed";
+      return false;
+    }
+    if (!SendFile(fd_to.get(), fd_from.get(), &offset, data_bytes)) {
+      PLOG(ERROR) << "sendfile() failed";
+      return false;
+    }
+    CHECK_EQ(offset, new_offset);
+    if (offset >= farthest_seek) {
+      return true;
+    }
+    new_offset = lseek(fd_from.get(), offset, SEEK_DATA);
+    if (new_offset == -1) {
+      // ENXIO is returned when there are no more blocks of this type
+      // coming.
+      if (errno == ENXIO) {
+        return true;
+      }
+      PLOG(ERROR) << "Could not lseek in \"" << from << "\"";
+      return false;
+    }
+    offset = new_offset;
+  }
+  return true;
+}
+
 std::string AbsolutePath(const std::string& path) {
   if (path.empty()) {
     return {};
@@ -171,7 +287,7 @@
 }
 
 off_t FileSize(const std::string& path) {
-  struct stat st;
+  struct stat st {};
   if (stat(path.c_str(), &st) == -1) {
     return 0;
   }
@@ -185,7 +301,7 @@
 
 // TODO(schuffelen): Use std::filesystem::last_write_time when on C++17
 std::chrono::system_clock::time_point FileModificationTime(const std::string& path) {
-  struct stat st;
+  struct stat st {};
   if (stat(path.c_str(), &st) == -1) {
     return std::chrono::system_clock::time_point();
   }
@@ -193,14 +309,14 @@
   return std::chrono::system_clock::time_point(seconds);
 }
 
-bool RenameFile(const std::string& old_name, const std::string& new_name) {
-  LOG(DEBUG) << "Renaming " << old_name << " to " << new_name;
-  if(rename(old_name.c_str(), new_name.c_str())) {
-    LOG(ERROR) << "File rename failed due to " << strerror(errno);
-    return false;
+Result<std::string> RenameFile(const std::string& current_filepath,
+                               const std::string& target_filepath) {
+  if (current_filepath != target_filepath) {
+    CF_EXPECT(rename(current_filepath.c_str(), target_filepath.c_str()) == 0,
+              "rename " << current_filepath << " to " << target_filepath
+                        << " failed: " << strerror(errno));
   }
-
-  return true;
+  return target_filepath;
 }
 
 bool RemoveFile(const std::string& file) {
@@ -216,6 +332,10 @@
     // TODO(schuffelen): Return a failing Result instead
     return "";
   }
+  if (in.tellg() == std::ifstream::pos_type(-1)) {
+    PLOG(ERROR) << "Failed to seek on " << file;
+    return "";
+  }
   contents.resize(in.tellg());
   in.seekg(0, std::ios::beg);
   in.read(&contents[0], contents.size());
@@ -296,9 +416,180 @@
 }
 
 bool FileIsSocket(const std::string& path) {
-  struct stat st;
+  struct stat st {};
   return stat(path.c_str(), &st) == 0 && S_ISSOCK(st.st_mode);
 }
 
+int GetDiskUsage(const std::string& path) {
+  Command du_cmd("du");
+  du_cmd.AddParameter("-b");
+  du_cmd.AddParameter("-k");
+  du_cmd.AddParameter("-s");
+  du_cmd.AddParameter(path);
+  SharedFD read_fd;
+  SharedFD write_fd;
+  SharedFD::Pipe(&read_fd, &write_fd);
+  du_cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, write_fd);
+  auto subprocess = du_cmd.Start();
+  std::array<char, 1024> text_output{};
+  const auto bytes_read = read_fd->Read(text_output.data(), text_output.size());
+  CHECK_GT(bytes_read, 0) << "Failed to read from pipe " << strerror(errno);
+  std::move(subprocess).Wait();
+  return atoi(text_output.data()) * 1024;
+}
+
+std::string FindFile(const std::string& path, const std::string& target_name) {
+  std::string ret;
+  WalkDirectory(path,
+                [&ret, &target_name](const std::string& filename) mutable {
+                  if (cpp_basename(filename) == target_name) {
+                    ret = filename;
+                  }
+                  return true;
+                });
+  return ret;
+}
+
+// Recursively enumerate files in |dir|, and invoke the callback function with
+// path to each file/directory.
+Result<void> WalkDirectory(
+    const std::string& dir,
+    const std::function<bool(const std::string&)>& callback) {
+  const auto files = CF_EXPECT(DirectoryContents(dir));
+  for (const auto& filename : files) {
+    if (filename == "." || filename == "..") {
+      continue;
+    }
+    auto file_path = dir + "/";
+    file_path.append(filename);
+    callback(file_path);
+    if (DirectoryExists(file_path)) {
+      WalkDirectory(file_path, callback);
+    }
+  }
+  return {};
+}
+
+class InotifyWatcher {
+ public:
+  InotifyWatcher(int inotify, const std::string& path, int watch_mode)
+      : inotify_(inotify) {
+    watch_ = inotify_add_watch(inotify_, path.c_str(), watch_mode);
+  }
+  virtual ~InotifyWatcher() { inotify_rm_watch(inotify_, watch_); }
+
+ private:
+  int inotify_;
+  int watch_;
+};
+
+static Result<void> WaitForFileInternal(const std::string& path, int timeoutSec,
+                                        int inotify) {
+  CF_EXPECT_NE(path, "", "Path is empty");
+
+  if (FileExists(path, true)) {
+    return {};
+  }
+
+  const auto targetTime =
+      std::chrono::system_clock::now() + std::chrono::seconds(timeoutSec);
+
+  const auto parentPath = cpp_dirname(path);
+  const auto filename = cpp_basename(path);
+
+  CF_EXPECT(WaitForFile(parentPath, timeoutSec),
+            "Error while waiting for parent directory creation");
+
+  auto watcher = InotifyWatcher(inotify, parentPath.c_str(), IN_CREATE);
+
+  if (FileExists(path, true)) {
+    return {};
+  }
+
+  while (true) {
+    const auto currentTime = std::chrono::system_clock::now();
+
+    if (currentTime >= targetTime) {
+      return CF_ERR("Timed out");
+    }
+
+    const auto timeRemain =
+        std::chrono::duration_cast<std::chrono::microseconds>(targetTime -
+                                                              currentTime)
+            .count();
+    const auto secondInUsec =
+        std::chrono::microseconds(std::chrono::seconds(1)).count();
+    struct timeval timeout;
+
+    timeout.tv_sec = timeRemain / secondInUsec;
+    timeout.tv_usec = timeRemain % secondInUsec;
+
+    fd_set readfds;
+
+    FD_ZERO(&readfds);
+    FD_SET(inotify, &readfds);
+
+    auto ret = select(inotify + 1, &readfds, NULL, NULL, &timeout);
+
+    if (ret == 0) {
+      return CF_ERR("select() timed out");
+    } else if (ret < 0) {
+      return CF_ERRNO("select() failed");
+    }
+
+    auto names = GetCreatedFileListFromInotifyFd(inotify);
+
+    CF_EXPECT(names.size() > 0,
+              "Failed to get names from inotify " << strerror(errno));
+
+    if (Contains(names, filename)) {
+      return {};
+    }
+  }
+
+  return CF_ERR("This shouldn't be executed");
+}
+
+auto WaitForFile(const std::string& path, int timeoutSec)
+    -> decltype(WaitForFileInternal(path, timeoutSec, 0)) {
+  auto inotify = inotify_init1(IN_CLOEXEC);
+
+  ScopeGuard close_inotify([inotify]() { close(inotify); });
+
+  CF_EXPECT(WaitForFileInternal(path, timeoutSec, inotify));
+
+  return {};
+}
+
+Result<void> WaitForUnixSocket(const std::string& path, int timeoutSec) {
+  const auto targetTime =
+      std::chrono::system_clock::now() + std::chrono::seconds(timeoutSec);
+
+  CF_EXPECT(WaitForFile(path, timeoutSec),
+            "Waiting for socket path creation failed");
+  CF_EXPECT(FileIsSocket(path), "Specified path is not a socket");
+
+  while (true) {
+    const auto currentTime = std::chrono::system_clock::now();
+
+    if (currentTime >= targetTime) {
+      return CF_ERR("Timed out");
+    }
+
+    const auto timeRemain = std::chrono::duration_cast<std::chrono::seconds>(
+                                targetTime - currentTime)
+                                .count();
+    auto testConnect =
+        SharedFD::SocketLocalClient(path, false, SOCK_STREAM, timeRemain);
+
+    if (testConnect->IsOpen()) {
+      return {};
+    }
+
+    sched_yield();
+  }
+
+  return CF_ERR("This shouldn't be executed");
+}
 
 }  // namespace cuttlefish
diff --git a/common/libs/utils/files.h b/common/libs/utils/files.h
index fbe6b70..b9b0eca 100644
--- a/common/libs/utils/files.h
+++ b/common/libs/utils/files.h
@@ -15,6 +15,7 @@
  */
 #pragma once
 
+#include <sys/stat.h>
 #include <sys/types.h>
 
 #include <chrono>
@@ -26,14 +27,22 @@
 namespace cuttlefish {
 bool FileExists(const std::string& path, bool follow_symlinks = true);
 bool FileHasContent(const std::string& path);
-std::vector<std::string> DirectoryContents(const std::string& path);
+Result<std::vector<std::string>> DirectoryContents(const std::string& path);
 bool DirectoryExists(const std::string& path, bool follow_symlinks = true);
-Result<void> EnsureDirectoryExists(const std::string& directory_path);
+Result<void> EnsureDirectoryExists(const std::string& directory_path,
+                                   const mode_t mode = S_IRWXU | S_IRWXG |
+                                                       S_IROTH | S_IXOTH,
+                                   const std::string& group_name = "");
+Result<void> ChangeGroup(const std::string& path,
+                         const std::string& group_name);
+bool CanAccess(const std::string& path, const int mode);
 bool IsDirectoryEmpty(const std::string& path);
 bool RecursivelyRemoveDirectory(const std::string& path);
+bool Copy(const std::string& from, const std::string& to);
 off_t FileSize(const std::string& path);
 bool RemoveFile(const std::string& file);
-bool RenameFile(const std::string& old_name, const std::string& new_name);
+Result<std::string> RenameFile(const std::string& current_filepath,
+                               const std::string& target_filepath);
 std::string ReadFile(const std::string& file);
 bool MakeFileExecutable(const std::string& path);
 std::chrono::system_clock::time_point FileModificationTime(const std::string& path);
@@ -41,6 +50,9 @@
 std::string cpp_basename(const std::string& str);
 // Whether a file exists and is a unix socket
 bool FileIsSocket(const std::string& path);
+// Get disk usage of a path. If this path is a directory, disk usage will
+// account for all files under this folder(recursively).
+int GetDiskUsage(const std::string& path);
 
 // The returned value may contain .. or . if these are present in the path
 // argument.
@@ -54,4 +66,16 @@
   off_t disk_size;
 };
 FileSizes SparseFileSizes(const std::string& path);
+
+// Find file with name |target_name| under directory |path|, return path to
+// found file(if any)
+std::string FindFile(const std::string& path, const std::string& target_name);
+
+Result<void> WalkDirectory(
+    const std::string& dir,
+    const std::function<bool(const std::string&)>& callback);
+
+Result<void> WaitForFile(const std::string& path, int timeoutSec);
+Result<void> WaitForUnixSocket(const std::string& path, int timeoutSec);
+
 }  // namespace cuttlefish
diff --git a/common/libs/utils/flag_parser.cpp b/common/libs/utils/flag_parser.cpp
index 76aeced..d0b9bdd 100644
--- a/common/libs/utils/flag_parser.cpp
+++ b/common/libs/utils/flag_parser.cpp
@@ -16,20 +16,16 @@
 
 #include "common/libs/utils/flag_parser.h"
 
+#include <algorithm>
 #include <cerrno>
-#include <cstdint>
 #include <cstdlib>
 #include <cstring>
-#include <functional>
-#include <optional>
-#include <ostream>
-#include <string>
+#include <iostream>
 #include <string_view>
 #include <type_traits>
 #include <unordered_map>
 #include <unordered_set>
 #include <utility>
-#include <vector>
 
 #include <android-base/logging.h>
 #include <android-base/strings.h>
@@ -129,6 +125,10 @@
   return android::base::StartsWith(next_arg, "-");
 }
 
+std::string BoolToString(bool val) {
+  return val ? "true" : "false";
+}
+
 Flag::FlagProcessResult Flag::Process(
     const std::string& arg, const std::optional<std::string>& next_arg) const {
   if (!setter_ && aliases_.size() > 0) {
@@ -272,14 +272,24 @@
         HasAlias({FlagAliasMode::kFlagPrefix, "--" + name + "="}) &&
         HasAlias({FlagAliasMode::kFlagConsumesFollowing, "-" + name}) &&
         HasAlias({FlagAliasMode::kFlagConsumesFollowing, "--" + name});
-    if (has_bool_aliases && has_other_aliases) {
-      LOG(ERROR) << "Expected exactly one of has_bool_aliases and "
-                 << "has_other_aliases, got both for \"" << name << "\".";
+    bool has_help_aliases = HasAlias({FlagAliasMode::kFlagExact, "-help"}) &&
+                            HasAlias({FlagAliasMode::kFlagExact, "--help"});
+    std::vector<bool> has_aliases = {has_bool_aliases, has_other_aliases,
+                                     has_help_aliases};
+    const auto true_count =
+        std::count(has_aliases.cbegin(), has_aliases.cend(), true);
+    if (true_count > 1) {
+      LOG(ERROR) << "Expected exactly one of has_bool_aliases, "
+                 << "has_other_aliases, and has_help_aliases, got "
+                 << true_count << " for \"" << name << "\".";
       return false;
-    } else if (!has_bool_aliases && !has_other_aliases) {
+    }
+    if (true_count == 0) {
       continue;
     }
     found_alias = true;
+    std::string type_str =
+        (has_bool_aliases || has_help_aliases) ? "bool" : "string";
     // Lifted from external/gflags/src/gflags_reporting.cc:DescribeOneFlagInXML
     out << "<flag>\n";
     out << "  <file>file.cc</file>\n";
@@ -289,7 +299,7 @@
     auto value = getter_ ? XmlEscape((*getter_)()) : std::string{""};
     out << "  <default>" << value << "</default>\n";
     out << "  <current>" << value << "</current>\n";
-    out << "  <type>" << (has_bool_aliases ? "bool" : "string") << "</type>\n";
+    out << "  <type>" << type_str << "</type>\n";
     out << "</flag>\n";
   }
   return found_alias;
@@ -366,6 +376,67 @@
       .Setter(setter);
 }
 
+static bool GflagsCompatBoolFlagSetter(const std::string& name, bool& value,
+                                       const FlagMatch& match) {
+  const auto& key = match.key;
+  if (key == "-" + name || key == "--" + name) {
+    value = true;
+    return true;
+  } else if (key == "-no" + name || key == "--no" + name) {
+    value = false;
+    return true;
+  } else if (key == "-" + name + "=" || key == "--" + name + "=") {
+    if (match.value == "true") {
+      value = true;
+      return true;
+    } else if (match.value == "false") {
+      value = false;
+      return true;
+    } else {
+      LOG(ERROR) << "Unexpected boolean value \"" << match.value << "\""
+                 << " for \"" << name << "\"";
+      return false;
+    }
+  }
+  LOG(ERROR) << "Unexpected key \"" << match.key << "\""
+             << " for \"" << name << "\"";
+  return false;
+}
+
+static Flag GflagsCompatBoolFlagBase(const std::string& name) {
+  return Flag()
+      .Alias({FlagAliasMode::kFlagPrefix, "-" + name + "="})
+      .Alias({FlagAliasMode::kFlagPrefix, "--" + name + "="})
+      .Alias({FlagAliasMode::kFlagExact, "-" + name})
+      .Alias({FlagAliasMode::kFlagExact, "--" + name})
+      .Alias({FlagAliasMode::kFlagExact, "-no" + name})
+      .Alias({FlagAliasMode::kFlagExact, "--no" + name});
+}
+
+Flag HelpXmlFlag(const std::vector<Flag>& flags, std::ostream& out, bool& value,
+                 const std::string& text) {
+  const std::string name = "helpxml";
+  auto setter = [name, &out, &value, &text, &flags](const FlagMatch& match) {
+    bool print_xml = false;
+    auto parse_success = GflagsCompatBoolFlagSetter(name, print_xml, match);
+    if (!parse_success) {
+      return false;
+    }
+    if (!print_xml) {
+      return true;
+    }
+    if (!text.empty()) {
+      out << text << std::endl;
+    }
+    value = print_xml;
+    out << "<?xml version=\"1.0\"?>" << std::endl << "<AllFlags>" << std::endl;
+    WriteGflagsCompatXml(flags, out);
+    out << "</AllFlags>" << std::flush;
+    return false;
+  };
+  return GflagsCompatBoolFlagBase(name).Setter(setter);
+}
+
 Flag InvalidFlagGuard() {
   return Flag()
       .UnvalidatedAlias({FlagAliasMode::kFlagPrefix, "-"})
@@ -447,38 +518,10 @@
 }
 
 Flag GflagsCompatFlag(const std::string& name, bool& value) {
-  return Flag()
-      .Alias({FlagAliasMode::kFlagPrefix, "-" + name + "="})
-      .Alias({FlagAliasMode::kFlagPrefix, "--" + name + "="})
-      .Alias({FlagAliasMode::kFlagExact, "-" + name})
-      .Alias({FlagAliasMode::kFlagExact, "--" + name})
-      .Alias({FlagAliasMode::kFlagExact, "-no" + name})
-      .Alias({FlagAliasMode::kFlagExact, "--no" + name})
+  return GflagsCompatBoolFlagBase(name)
       .Getter([&value]() { return value ? "true" : "false"; })
       .Setter([name, &value](const FlagMatch& match) {
-        const auto& key = match.key;
-        if (key == "-" + name || key == "--" + name) {
-          value = true;
-          return true;
-        } else if (key == "-no" + name || key == "--no" + name) {
-          value = false;
-          return true;
-        } else if (key == "-" + name + "=" || key == "--" + name + "=") {
-          if (match.value == "true") {
-            value = true;
-            return true;
-          } else if (match.value == "false") {
-            value = false;
-            return true;
-          } else {
-            LOG(ERROR) << "Unexpected boolean value \"" << match.value << "\""
-                       << " for \"" << name << "\"";
-            return false;
-          }
-        }
-        LOG(ERROR) << "Unexpected key \"" << match.key << "\""
-                   << " for \"" << name << "\"";
-        return false;
+        return GflagsCompatBoolFlagSetter(name, value, match);
       });
 };
 
diff --git a/common/libs/utils/flag_parser.h b/common/libs/utils/flag_parser.h
index b45c544..4c46587 100644
--- a/common/libs/utils/flag_parser.h
+++ b/common/libs/utils/flag_parser.h
@@ -127,6 +127,8 @@
 
 std::vector<std::string> ArgsToVec(int argc, char** argv);
 
+std::string BoolToString(bool val);
+
 /* Handles a list of flags. Flags are matched in the order given in case two
  * flags match the same argument. Matched flags are removed, leaving only
  * unmatched arguments. */
@@ -141,6 +143,11 @@
 
 /* If a "-help" or "--help" flag is present, prints all the flags and fails. */
 Flag HelpFlag(const std::vector<Flag>& flags, const std::string& text = "");
+
+/* If a "-helpxml" is present, prints all the flags in XML and fails. */
+Flag HelpXmlFlag(const std::vector<Flag>& flags, std::ostream&, bool& value,
+                 const std::string& text = "");
+
 /* Catches unrecognized arguments that begin with `-`, and errors out. This
  * effectively denies unknown flags. */
 Flag InvalidFlagGuard();
diff --git a/common/libs/utils/flags_validator.cpp b/common/libs/utils/flags_validator.cpp
new file mode 100644
index 0000000..d7c5d01
--- /dev/null
+++ b/common/libs/utils/flags_validator.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/utils/flags_validator.h"
+
+namespace cuttlefish {
+Result<void> ValidateStupWizardMode(const std::string& setupwizard_mode) {
+  // One of DISABLED,OPTIONAL,REQUIRED
+  bool result = setupwizard_mode == "DISABLED" ||
+                setupwizard_mode == "OPTIONAL" ||
+                setupwizard_mode == "REQUIRED";
+
+  CF_EXPECT(result == true, "Invalid value for setupwizard_mode config");
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/flags_validator.h b/common/libs/utils/flags_validator.h
new file mode 100644
index 0000000..47de231
--- /dev/null
+++ b/common/libs/utils/flags_validator.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+Result<void> ValidateStupWizardMode(const std::string &setupwizard_mode);
+}  // namespace cuttlefish
diff --git a/common/libs/utils/inotify.cpp b/common/libs/utils/inotify.cpp
new file mode 100644
index 0000000..ee2426f
--- /dev/null
+++ b/common/libs/utils/inotify.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/inotify.h>
+#include <unistd.h>
+#include <string>
+#include <vector>
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+
+#define INOTIFY_MAX_EVENT_SIZE (sizeof(struct inotify_event) + NAME_MAX + 1)
+std::vector<std::string> GetCreatedFileListFromInotifyFd(int fd) {
+  char event_readout[INOTIFY_MAX_EVENT_SIZE];
+  int bytes_parsed = 0;
+  std::vector<std::string> result;
+  // Each successful read can contain one or more of inotify_event events
+  // Note: read() on inotify returns 'whole' events, will never partially
+  // populate the buffer.
+  int event_read_out_length = read(fd, event_readout, INOTIFY_MAX_EVENT_SIZE);
+
+  if (event_read_out_length == -1) {
+    LOG(ERROR) << __FUNCTION__
+               << ": Couldn't read out inotify event due to error: '"
+               << strerror(errno) << "' (" << errno << ")";
+    return std::vector<std::string>();
+  }
+
+  while (bytes_parsed < event_read_out_length) {
+    struct inotify_event* event =
+        reinterpret_cast<inotify_event*>(event_readout + bytes_parsed);
+    bytes_parsed += sizeof(struct inotify_event) + event->len;
+
+    // No file name was present
+    if (event->len == 0) {
+      LOG(ERROR) << __FUNCTION__ << ": inotify event didn't contain filename";
+      continue;
+    }
+    if (!(event->mask & IN_CREATE)) {
+      LOG(ERROR) << __FUNCTION__
+                 << ": inotify event didn't pertain to file creation";
+      continue;
+    }
+    result.push_back(std::string(event->name));
+  }
+
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/inotify.h b/common/libs/utils/inotify.h
new file mode 100644
index 0000000..2808537
--- /dev/null
+++ b/common/libs/utils/inotify.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+namespace cuttlefish {
+
+std::vector<std::string> GetCreatedFileListFromInotifyFd(int fd);
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/json.cpp b/common/libs/utils/json.cpp
new file mode 100644
index 0000000..13aabf4
--- /dev/null
+++ b/common/libs/utils/json.cpp
@@ -0,0 +1,31 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "common/libs/utils/json.h"
+
+namespace cuttlefish {
+
+Result<Json::Value> ParseJson(std::string_view input) {
+  Json::Value root;
+  JSONCPP_STRING err;
+  Json::CharReaderBuilder builder;
+  const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
+  auto begin = input.data();
+  auto end = begin + input.length();
+  CF_EXPECT(reader->parse(begin, end, &root, &err), err);
+  return root;
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/json.h b/common/libs/utils/json.h
new file mode 100644
index 0000000..db9581c
--- /dev/null
+++ b/common/libs/utils/json.h
@@ -0,0 +1,28 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string_view>
+
+#include <json/json.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+Result<Json::Value> ParseJson(std::string_view input);
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/network.cpp b/common/libs/utils/network.cpp
index 9727584..7d45a49 100644
--- a/common/libs/utils/network.cpp
+++ b/common/libs/utils/network.cpp
@@ -25,6 +25,7 @@
 
 #include <endian.h>
 #include <fcntl.h>
+#include <ifaddrs.h>
 #include <linux/if_ether.h>
 #include <linux/types.h>
 #include <net/ethernet.h>
@@ -38,6 +39,7 @@
 #include <ios>
 #include <memory>
 #include <ostream>
+#include <iomanip>
 #include <set>
 #include <string>
 #include <utility>
@@ -95,8 +97,39 @@
   return ParseAddress(address, ".", 4, 10, ip);
 }
 
+/**
+ * Generate mac address following:
+ * 00:1a:11:e0:cf:index
+ * ________ __    ______
+ *    |      |          |
+ *    |       type (e0, e1, etc)
+*/
+void GenerateMacForInstance(int index, uint8_t type, std::uint8_t out[6]) {
+  // the first octet must be even
+  out[0] = 0x00;
+  out[1] = 0x1a;
+  out[2] = 0x11;
+  out[3] = type;
+  out[4] = 0xcf;
+  out[5] = static_cast<std::uint8_t>(index);
+}
+
 }  // namespace
 
+bool NetworkInterfaceExists(const std::string& interface_name) {
+  struct ifaddrs *ifa_list{}, *ifa{};
+  bool ret = false;
+  getifaddrs(&ifa_list);
+  for (ifa = ifa_list; ifa; ifa = ifa->ifa_next) {
+    if (strcmp(ifa->ifa_name, interface_name.c_str()) == 0) {
+      ret = true;
+      break;
+    }
+  }
+  freeifaddrs(ifa_list);
+  return ret;
+}
+
 SharedFD OpenTapInterface(const std::string& interface_name) {
   constexpr auto TUNTAP_DEV = "/dev/net/tun";
 
@@ -327,4 +360,79 @@
   return success;
 }
 
+std::string MacAddressToString(const std::uint8_t mac[6]) {
+  std::stringstream result;
+
+  result << std::hex;
+  for (int i = 0; i < 6; i++) {
+    result << std::setfill('0') << std::setw(2)
+           << static_cast<int>(mac[i]);
+
+    if (i < 5) {
+      result << ':';
+    }
+  }
+
+  return result.str();
+}
+
+std::string Ipv6ToString(const std::uint8_t ip[16]) {
+  std::stringstream result;
+
+  result << std::hex;
+  for (int i = 0; i < 16; i = i + 2) {
+    result << std::setfill('0') << std::setw(2)
+           << static_cast<int>(ip[i])
+           << std::setfill('0') << std::setw(2)
+           << static_cast<int>(ip[i + 1]);
+
+    if (i < 14) {
+      result << ':';
+    }
+  }
+
+  return result.str();
+}
+
+void GenerateMobileMacForInstance(int index, std::uint8_t out[6]) {
+  GenerateMacForInstance(index, 0xe0, out);
+}
+
+void GenerateEthMacForInstance(int index, std::uint8_t out[6]) {
+  GenerateMacForInstance(index, 0xe1, out);
+}
+
+void GenerateWifiMacForInstance(int index, std::uint8_t out[6]) {
+  GenerateMacForInstance(index, 0xe2, out);
+}
+
+/**
+ * Linux uses mac to generate link-local IPv6 address following:
+ *
+ * 1. Get mac address (for example 00:1a:11:ee:cf:01)
+ * 2. Throw ff:fe as a 3th and 4th octets (00:1a:11 :ff:fe: ee:cf:01)
+ * 3. Flip 2th bit in the first octet (02: 1a:11:ff:fe:ee:cf:01)
+ * 4. Use IPv6 format (021a:11ff:feee:cf01)
+ * 5. Add prefix fe80:: (fe80::021a:11ff:feee:cf01 or fe80:0000:0000:0000:021a:11ff:feee:cf00)
+*/
+void GenerateCorrespondingIpv6ForMac(const std::uint8_t mac[6], std::uint8_t out[16]) {
+  out[0] = 0xfe;
+  out[1] = 0x80;
+
+  // 2 - 7 octets are zero
+
+  // need to invert 2th bit of the first octet
+  out[8] = mac[0] ^ (1 << 1);
+  out[9] = mac[1];
+
+  out[10] = mac[2];
+  out[11] = 0xff;
+
+  out[12] = 0xfe;
+  out[13] = mac[3];
+
+  out[14] = mac[4];
+  out[15] = mac[5];
+}
+
 }  // namespace cuttlefish
diff --git a/common/libs/utils/network.h b/common/libs/utils/network.h
index 985476e..72905e6 100644
--- a/common/libs/utils/network.h
+++ b/common/libs/utils/network.h
@@ -24,6 +24,9 @@
 #include "common/libs/fs/shared_fd.h"
 
 namespace cuttlefish {
+// Check network interface with given name exists, such as cvd-ebr.
+bool NetworkInterfaceExists(const std::string& interface_name);
+
 // Creates, or connects to if it already exists, a tap network interface. The
 // user needs CAP_NET_ADMIN to create such interfaces or be the owner to connect
 // to one.
@@ -52,4 +55,12 @@
 
 bool ReleaseDhcpLeases(const std::string& lease_path, SharedFD tap_fd,
                        const std::uint8_t dhcp_server_ip[4]);
+
+void GenerateCorrespondingIpv6ForMac(const std::uint8_t mac[6], std::uint8_t out[16]);
+void GenerateMobileMacForInstance(int index, std::uint8_t out[6]);
+void GenerateEthMacForInstance(int index, std::uint8_t out[6]);
+void GenerateWifiMacForInstance(int index, std::uint8_t out[6]);
+
+std::string MacAddressToString(const std::uint8_t mac[6]);
+std::string Ipv6ToString(const std::uint8_t ip[16]);
 }
diff --git a/common/libs/utils/proc_file_utils.cpp b/common/libs/utils/proc_file_utils.cpp
new file mode 100644
index 0000000..83171af
--- /dev/null
+++ b/common/libs/utils/proc_file_utils.cpp
@@ -0,0 +1,248 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/utils/proc_file_utils.h"
+
+#include <sys/stat.h>
+
+#include <regex>
+#include <sstream>
+
+#include <android-base/file.h>
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/files.h"
+
+namespace cuttlefish {
+
+// TODO(kwstephenkim): This logic is used broadly, so consider
+// to create a new library.
+template <typename... Args>
+static std::string ConcatToString(Args&&... args) {
+  std::stringstream concatenator;
+  (concatenator << ... << std::forward<Args>(args));
+  return concatenator.str();
+}
+
+static std::string PidDirPath(const pid_t pid) {
+  return ConcatToString(kProcDir, "/", pid);
+}
+
+/* ReadFile does not work for /proc/<pid>/<some files>
+ * ReadFile requires the file size to be known in advance,
+ * which is not the case here.
+ */
+static Result<std::string> ReadAll(const std::string& file_path) {
+  SharedFD fd = SharedFD::Open(file_path, O_RDONLY);
+  CF_EXPECT(fd->IsOpen());
+  // should be good size to read all Envs or Args,
+  // whichever bigger
+  const int buf_size = 1024;
+  std::string output;
+  ssize_t nread = 0;
+  do {
+    std::vector<char> buf(buf_size);
+    nread = ReadExact(fd, buf.data(), buf_size);
+    CF_EXPECT(nread >= 0, "ReadExact returns " << nread);
+    output.append(buf.begin(), buf.end());
+  } while (nread > 0);
+  return output;
+}
+
+/**
+ * Tokenizes the given string, using '\0' as a delimiter
+ *
+ * android::base::Tokenize works mostly except the delimiter can't be '\0'.
+ * The /proc/<pid>/environ file has the list of environment variables, delimited
+ * by '\0'. Needs a dedicated tokenizer.
+ *
+ */
+static std::vector<std::string> TokenizeByNullChar(const std::string& input) {
+  if (input.empty()) {
+    return {};
+  }
+  std::vector<std::string> tokens;
+  std::string token;
+  for (int i = 0; i < input.size(); i++) {
+    if (input.at(i) != '\0') {
+      token.append(1, input.at(i));
+    } else {
+      if (token.empty()) {
+        break;
+      }
+      tokens.push_back(token);
+      token.clear();
+    }
+  }
+  if (!token.empty()) {
+    tokens.push_back(token);
+  }
+  return tokens;
+}
+
+Result<std::vector<pid_t>> CollectPids(const uid_t uid) {
+  CF_EXPECT(DirectoryExists(kProcDir));
+  auto subdirs = CF_EXPECT(DirectoryContents(kProcDir));
+  std::regex pid_dir_pattern("[0-9]+");
+  std::vector<pid_t> pids;
+  for (const auto& subdir : subdirs) {
+    if (!std::regex_match(subdir, pid_dir_pattern)) {
+      continue;
+    }
+    int pid;
+    // Shouldn't failed here. If failed, either regex or
+    // android::base::ParseInt needs serious fixes
+    CF_EXPECT(android::base::ParseInt(subdir, &pid));
+    struct stat dir_stat_buf;
+    if (::stat(PidDirPath(pid).data(), &dir_stat_buf) != 0) {
+      continue;
+    }
+    if (dir_stat_buf.st_uid != uid) {
+      continue;
+    }
+    // as we collect cuttlefish-related stuff, we want exe to be
+    // shared by the same owner
+    struct stat exe_stat_buf;
+    std::string exe_path = PidDirPath(pid) + "/exe";
+    if (::stat(exe_path.data(), &exe_stat_buf) != 0) {
+      continue;
+    }
+    if (exe_stat_buf.st_uid != uid) {
+      continue;
+    }
+    pids.push_back(pid);
+  }
+  return pids;
+}
+
+Result<std::vector<std::string>> GetCmdArgs(const pid_t pid) {
+  std::string cmdline_file_path = PidDirPath(pid) + "/cmdline";
+  auto owner = CF_EXPECT(OwnerUid(cmdline_file_path));
+  CF_EXPECT(getuid() == owner);
+  std::string contents = CF_EXPECT(ReadAll(cmdline_file_path));
+  return TokenizeByNullChar(contents);
+}
+
+Result<std::string> GetExecutablePath(const pid_t pid) {
+  std::string exec_target_path;
+  std::string proc_exe_path = ConcatToString("/proc/", pid, "/exe");
+  CF_EXPECT(
+      android::base::Readlink(proc_exe_path, std::addressof(exec_target_path)),
+      proc_exe_path << " Should be a symbolic link but it is not.");
+  std::string suffix(" (deleted)");
+  if (android::base::EndsWith(exec_target_path, suffix)) {
+    return exec_target_path.substr(0, exec_target_path.size() - suffix.size());
+  }
+  return exec_target_path;
+}
+
+Result<std::vector<pid_t>> CollectPidsByExecName(const std::string& exec_name,
+                                                 const uid_t uid) {
+  CF_EXPECT(cpp_basename(exec_name) == exec_name);
+  auto input_pids = CF_EXPECT(CollectPids(uid));
+  std::vector<pid_t> output_pids;
+  for (const auto pid : input_pids) {
+    auto pid_exec_path = GetExecutablePath(pid);
+    if (!pid_exec_path.ok()) {
+      LOG(ERROR) << pid_exec_path.error().Trace();
+      continue;
+    }
+    if (cpp_basename(*pid_exec_path) == exec_name) {
+      output_pids.push_back(pid);
+    }
+  }
+  return output_pids;
+}
+
+Result<std::vector<pid_t>> CollectPidsByExecPath(const std::string& exec_path,
+                                                 const uid_t uid) {
+  auto input_pids = CF_EXPECT(CollectPids(uid));
+  std::vector<pid_t> output_pids;
+  for (const auto pid : input_pids) {
+    auto pid_exec_path = GetExecutablePath(pid);
+    if (!pid_exec_path.ok()) {
+      continue;
+    }
+    if (*pid_exec_path == exec_path) {
+      output_pids.push_back(pid);
+    }
+  }
+  return output_pids;
+}
+
+Result<std::vector<pid_t>> CollectPidsByArgv0(const std::string& expected_argv0,
+                                              const uid_t uid) {
+  auto input_pids = CF_EXPECT(CollectPids(uid));
+  std::vector<pid_t> output_pids;
+  for (const auto pid : input_pids) {
+    auto argv_result = GetCmdArgs(pid);
+    if (!argv_result.ok()) {
+      continue;
+    }
+    if (argv_result->empty()) {
+      continue;
+    }
+    if (argv_result->front() == expected_argv0) {
+      output_pids.push_back(pid);
+    }
+  }
+  return output_pids;
+}
+
+Result<uid_t> OwnerUid(const pid_t pid) {
+  auto proc_pid_path = PidDirPath(pid);
+  auto uid = CF_EXPECT(OwnerUid(proc_pid_path));
+  return uid;
+}
+
+Result<uid_t> OwnerUid(const std::string& path) {
+  struct stat buf;
+  CF_EXPECT_EQ(::stat(path.data(), &buf), 0);
+  return buf.st_uid;
+}
+
+Result<std::unordered_map<std::string, std::string>> GetEnvs(const pid_t pid) {
+  std::string environ_file_path = PidDirPath(pid) + "/environ";
+  auto owner = CF_EXPECT(OwnerUid(environ_file_path));
+  CF_EXPECT(getuid() == owner, "Owned by another user of uid" << owner);
+  std::string environ = CF_EXPECT(ReadAll(environ_file_path));
+  std::vector<std::string> lines = TokenizeByNullChar(environ);
+  // now, each line looks like:  HOME=/home/user
+  std::unordered_map<std::string, std::string> envs;
+  for (const auto& line : lines) {
+    auto pos = line.find_first_of('=');
+    if (pos == std::string::npos) {
+      LOG(ERROR) << "Found an invalid env: " << line << " and ignored.";
+      continue;
+    }
+    std::string key = line.substr(0, pos);
+    std::string value = line.substr(pos + 1);
+    envs[key] = value;
+  }
+  return envs;
+}
+
+Result<ProcInfo> ExtractProcInfo(const pid_t pid) {
+  return ProcInfo{.pid_ = pid,
+                  .actual_exec_path_ = CF_EXPECT(GetExecutablePath(pid)),
+                  .envs_ = CF_EXPECT(GetEnvs(pid)),
+                  .args_ = CF_EXPECT(GetCmdArgs(pid))};
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/proc_file_utils.h b/common/libs/utils/proc_file_utils.h
new file mode 100644
index 0000000..3fd5c08
--- /dev/null
+++ b/common/libs/utils/proc_file_utils.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+
+/**
+ * @file Utility functions to retrieve information from proc filesystem
+ *
+ * As of now, the major consumer is cvd.
+ */
+namespace cuttlefish {
+
+static constexpr char kProcDir[] = "/proc";
+
+struct ProcInfo {
+  pid_t pid_;
+  std::string actual_exec_path_;
+  std::unordered_map<std::string, std::string> envs_;
+  std::vector<std::string> args_;
+};
+Result<ProcInfo> ExtractProcInfo(const pid_t pid);
+
+// collects all pids whose owner is uid
+Result<std::vector<pid_t>> CollectPids(const uid_t uid = getuid());
+
+/* collects all pids that meet the following:
+ *
+ * 1. Belongs to the uid
+ * 2. cpp_basename(readlink(/proc/<pid>/exe)) == exec_name
+ *
+ */
+Result<std::vector<pid_t>> CollectPidsByExecName(const std::string& exec_name,
+                                                 const uid_t uid = getuid());
+
+/* collects all pids that meet the following:
+ *
+ * 1. Belongs to the uid
+ * 2. readlink(/proc/<pid>/exe) == exec_name
+ *
+ */
+Result<std::vector<pid_t>> CollectPidsByExecPath(const std::string& exec_path,
+                                                 const uid_t uid = getuid());
+
+/**
+ * When argv[0] != exec_path, collects PIDs based on argv[0]
+ *
+ */
+Result<std::vector<pid_t>> CollectPidsByArgv0(const std::string& expected_argv0,
+                                              const uid_t uid = getuid());
+
+Result<uid_t> OwnerUid(const pid_t pid);
+// sometimes, files under /proc/<pid> owned by a different user
+// e.g. /proc/<pid>/exe
+Result<uid_t> OwnerUid(const std::string& file_path);
+
+// retrieves command line args for the pid
+Result<std::vector<std::string>> GetCmdArgs(const pid_t pid);
+
+// retrieves the path to the executable file used for the pid
+Result<std::string> GetExecutablePath(const pid_t pid);
+
+// retrieves the environment variables of the process, pid
+Result<std::unordered_map<std::string, std::string>> GetEnvs(const pid_t pid);
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/proc_file_utils_test.cpp b/common/libs/utils/proc_file_utils_test.cpp
new file mode 100644
index 0000000..94765ba
--- /dev/null
+++ b/common/libs/utils/proc_file_utils_test.cpp
@@ -0,0 +1,46 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/proc_file_utils.h"
+
+namespace cuttlefish {
+
+TEST(ProcFileUid, SelfUidTest) {
+  auto my_pid = getpid();
+  auto login_uid_of_my_pid = OwnerUid(my_pid);
+
+  ASSERT_TRUE(login_uid_of_my_pid.ok()) << login_uid_of_my_pid.error().Trace();
+  ASSERT_EQ(getuid(), *login_uid_of_my_pid);
+}
+
+TEST(ProcFilePid, CurrentPidCollected) {
+  auto pids_result = CollectPids(getuid());
+  auto this_pid = getpid();
+
+  // verify if the pids returned are really owned by getuid()
+  ASSERT_TRUE(pids_result.ok());
+  ASSERT_TRUE(Contains(*pids_result, this_pid));
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/result.h b/common/libs/utils/result.h
index 2b732e4..531589a 100644
--- a/common/libs/utils/result.h
+++ b/common/libs/utils/result.h
@@ -23,11 +23,132 @@
 
 namespace cuttlefish {
 
-using android::base::Result;
+class StackTraceError;
 
-#define CF_ERR_MSG()                             \
-  "  at " << __FILE__ << ":" << __LINE__ << "\n" \
-          << "  in " << __PRETTY_FUNCTION__
+class StackTraceEntry {
+ public:
+  StackTraceEntry(std::string file, size_t line, std::string pretty_function)
+      : file_(std::move(file)),
+        line_(line),
+        pretty_function_(std::move(pretty_function)) {}
+
+  StackTraceEntry(std::string file, size_t line, std::string pretty_function,
+                  std::string expression)
+      : file_(std::move(file)),
+        line_(line),
+        pretty_function_(std::move(pretty_function)),
+        expression_(std::move(expression)) {}
+
+  StackTraceEntry(const StackTraceEntry& other)
+      : file_(other.file_),
+        line_(other.line_),
+        pretty_function_(other.pretty_function_),
+        expression_(other.expression_),
+        message_(other.message_.str()) {}
+
+  StackTraceEntry(StackTraceEntry&&) = default;
+  StackTraceEntry& operator=(const StackTraceEntry& other) {
+    file_ = other.file_;
+    line_ = other.line_;
+    pretty_function_ = other.pretty_function_;
+    expression_ = other.expression_;
+    message_.str(other.message_.str());
+    return *this;
+  }
+  StackTraceEntry& operator=(StackTraceEntry&&) = default;
+
+  template <typename T>
+  StackTraceEntry& operator<<(T&& message_ext) & {
+    message_ << std::forward<T>(message_ext);
+    return *this;
+  }
+  template <typename T>
+  StackTraceEntry operator<<(T&& message_ext) && {
+    message_ << std::forward<T>(message_ext);
+    return std::move(*this);
+  }
+
+  operator StackTraceError() &&;
+  template <typename T>
+  operator android::base::expected<T, StackTraceError>() &&;
+
+  bool HasMessage() const { return !message_.str().empty(); }
+
+  void Write(std::ostream& stream) const { stream << message_.str(); }
+  void WriteVerbose(std::ostream& stream) const {
+    auto str = message_.str();
+    if (str.empty()) {
+      stream << "Failure\n";
+    } else {
+      stream << message_.str() << "\n";
+    }
+    stream << " at " << file_ << ":" << line_ << "\n";
+    stream << " in " << pretty_function_;
+    if (!expression_.empty()) {
+      stream << " for CF_EXPECT(" << expression_ << ")\n";
+    }
+  }
+
+ private:
+  std::string file_;
+  size_t line_;
+  std::string pretty_function_;
+  std::string expression_;
+  std::stringstream message_;
+};
+
+#define CF_STACK_TRACE_ENTRY(expression) \
+  StackTraceEntry(__FILE__, __LINE__, __PRETTY_FUNCTION__, expression)
+
+class StackTraceError {
+ public:
+  StackTraceError& PushEntry(StackTraceEntry entry) & {
+    stack_.emplace_back(std::move(entry));
+    return *this;
+  }
+  StackTraceError PushEntry(StackTraceEntry entry) && {
+    stack_.emplace_back(std::move(entry));
+    return std::move(*this);
+  }
+  const std::vector<StackTraceEntry>& Stack() const { return stack_; }
+
+  std::string Message() const {
+    std::stringstream writer;
+    for (const auto& entry : stack_) {
+      entry.Write(writer);
+    }
+    return writer.str();
+  }
+
+  std::string Trace() const {
+    std::stringstream writer;
+    for (const auto& entry : stack_) {
+      entry.WriteVerbose(writer);
+    }
+    return writer.str();
+  }
+
+  template <typename T>
+  operator android::base::expected<T, StackTraceError>() && {
+    return android::base::unexpected(std::move(*this));
+  }
+
+ private:
+  std::vector<StackTraceEntry> stack_;
+};
+
+inline StackTraceEntry::operator StackTraceError() && {
+  return StackTraceError().PushEntry(std::move(*this));
+}
+
+template <typename T>
+inline StackTraceEntry::operator android::base::expected<T,
+                                                         StackTraceError>() && {
+  return android::base::unexpected(std::move(*this));
+}
+
+template <typename T>
+using Result = android::base::expected<T, StackTraceError>;
 
 /**
  * Error return macro that includes the location in the file in the error
@@ -47,10 +168,8 @@
  *       at path/to/file.cpp:50
  *       in Result<std::string> MyFunction()
  */
-#define CF_ERR(MSG) android::base::Error() << MSG << "\n" << CF_ERR_MSG()
-#define CF_ERRNO(MSG)                        \
-  android::base::ErrnoError() << MSG << "\n" \
-                              << CF_ERR_MSG() << "\n  with errno " << errno
+#define CF_ERR(MSG) (CF_STACK_TRACE_ENTRY("") << MSG)
+#define CF_ERRNO(MSG) (CF_STACK_TRACE_ENTRY("") << MSG)
 
 template <typename T>
 T OutcomeDereference(std::optional<T>&& value) {
@@ -90,13 +209,11 @@
   return value.ok();
 }
 
-inline auto ErrorFromType(bool) {
-  return (android::base::Error() << "Received `false`").str();
-}
+inline auto ErrorFromType(bool) { return StackTraceError(); }
 
 template <typename T>
 inline auto ErrorFromType(std::optional<T>) {
-  return (android::base::Error() << "Received empty optional").str();
+  return StackTraceError();
 }
 
 template <typename T>
@@ -111,20 +228,20 @@
 
 #define CF_EXPECT_OVERLOAD(_1, _2, NAME, ...) NAME
 
-#define CF_EXPECT2(RESULT, MSG)                                  \
-  ({                                                             \
-    decltype(RESULT)&& macro_intermediate_result = RESULT;       \
-    if (!TypeIsSuccess(macro_intermediate_result)) {             \
-      return android::base::Error()                              \
-             << ErrorFromType(macro_intermediate_result) << "\n" \
-             << MSG << "\n"                                      \
-             << CF_ERR_MSG() << "\n"                             \
-             << "  for CF_EXPECT(" << #RESULT << ")";            \
-    };                                                           \
-    OutcomeDereference(std::move(macro_intermediate_result));    \
+#define CF_EXPECT2(RESULT, MSG)                               \
+  ({                                                          \
+    decltype(RESULT)&& macro_intermediate_result = RESULT;    \
+    if (!TypeIsSuccess(macro_intermediate_result)) {          \
+      auto current_entry = CF_STACK_TRACE_ENTRY(#RESULT);     \
+      current_entry << MSG;                                   \
+      auto error = ErrorFromType(macro_intermediate_result);  \
+      error.PushEntry(std::move(current_entry));              \
+      return error;                                           \
+    };                                                        \
+    OutcomeDereference(std::move(macro_intermediate_result)); \
   })
 
-#define CF_EXPECT1(RESULT) CF_EXPECT2(RESULT, "Received error")
+#define CF_EXPECT1(RESULT) CF_EXPECT2(RESULT, "")
 
 /**
  * Error propagation macro that can be used as an expression.
@@ -165,4 +282,47 @@
 #define CF_EXPECT(...) \
   CF_EXPECT_OVERLOAD(__VA_ARGS__, CF_EXPECT2, CF_EXPECT1)(__VA_ARGS__)
 
+#define CF_COMPARE_EXPECT4(COMPARE_OP, LHS_RESULT, RHS_RESULT, MSG)         \
+  ({                                                                        \
+    auto&& lhs_macro_intermediate_result = LHS_RESULT;                      \
+    auto&& rhs_macro_intermediate_result = RHS_RESULT;                      \
+    bool comparison_result = lhs_macro_intermediate_result COMPARE_OP       \
+        rhs_macro_intermediate_result;                                      \
+    if (!comparison_result) {                                               \
+      auto current_entry = CF_STACK_TRACE_ENTRY("");                        \
+      current_entry << "Expected \"" << #LHS_RESULT << "\" " << #COMPARE_OP \
+                    << " \"" << #RHS_RESULT << "\" but was "                \
+                    << lhs_macro_intermediate_result << " vs "              \
+                    << rhs_macro_intermediate_result << ".";                \
+      current_entry << MSG;                                                 \
+      auto error = ErrorFromType(false);                                    \
+      error.PushEntry(std::move(current_entry));                            \
+      return error;                                                         \
+    };                                                                      \
+    comparison_result;                                                      \
+  })
+
+#define CF_COMPARE_EXPECT3(COMPARE_OP, LHS_RESULT, RHS_RESULT) \
+  CF_COMPARE_EXPECT4(COMPARE_OP, LHS_RESULT, RHS_RESULT, "")
+
+#define CF_COMPARE_EXPECT_OVERLOAD(_1, _2, _3, _4, NAME, ...) NAME
+
+#define CF_COMPARE_EXPECT(...)                                \
+  CF_COMPARE_EXPECT_OVERLOAD(__VA_ARGS__, CF_COMPARE_EXPECT4, \
+                             CF_COMPARE_EXPECT3)              \
+  (__VA_ARGS__)
+
+#define CF_EXPECT_EQ(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(==, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+#define CF_EXPECT_NE(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(!=, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+#define CF_EXPECT_LE(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(<=, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+#define CF_EXPECT_LT(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(<, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+#define CF_EXPECT_GE(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(>=, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+#define CF_EXPECT_GT(LHS_RESULT, RHS_RESULT, ...) \
+  CF_COMPARE_EXPECT(>, LHS_RESULT, RHS_RESULT, ##__VA_ARGS__)
+
 }  // namespace cuttlefish
diff --git a/common/libs/utils/result_matchers.h b/common/libs/utils/result_matchers.h
new file mode 100644
index 0000000..8089d23
--- /dev/null
+++ b/common/libs/utils/result_matchers.h
@@ -0,0 +1,67 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <type_traits>
+
+#include <android-base/expected.h>
+#include <gmock/gmock.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+MATCHER(IsOk, "an ok result") {
+  auto& result = arg;
+  if (!result.ok()) {
+    *result_listener << "which is an error result with trace: "
+                     << result.error().Message();
+    return false;
+  }
+  return true;
+}
+
+MATCHER(IsError, "an error result") {
+  auto& result = arg;
+  if (result.ok()) {
+    *result_listener << "which is an ok result";
+    return false;
+  }
+  return true;
+}
+
+MATCHER_P(IsOkAndValue, result_value_matcher, "") {
+  auto& result = arg;
+  using ResultType = std::decay_t<decltype(result)>;
+  return ExplainMatchResult(
+      ::testing::AllOf(IsOk(), ::testing::Property("value", &ResultType::value,
+                                                   result_value_matcher)),
+      result, result_listener);
+}
+
+MATCHER_P(IsErrorAndMessage, message_matcher, "") {
+  auto& result = arg;
+  using ResultType = std::decay_t<decltype(result)>;
+  return ExplainMatchResult(
+      ::testing::AllOf(
+          IsError(),
+          ::testing::Property(
+              "error", &ResultType::error,
+              ::testing::Property(&StackTraceError::Message, message_matcher))),
+      result, result_listener);
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/result_test.cpp b/common/libs/utils/result_test.cpp
new file mode 100644
index 0000000..7212e9c
--- /dev/null
+++ b/common/libs/utils/result_test.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common/libs/utils/result.h"
+
+#include <type_traits>
+
+#include <android-base/expected.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/result_matchers.h"
+
+namespace cuttlefish {
+namespace {
+
+using ::testing::HasSubstr;
+using ::testing::StrEq;
+
+}  // namespace
+
+TEST(ResultTest, ExpectBoolGoodNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT(true);
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("okay")));
+}
+
+TEST(ResultTest, ExpectBoolGoodWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT(true, "Failed");
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("okay")));
+}
+
+TEST(ResultTest, ExpectBoolBadNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT(false);
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsError());
+}
+
+TEST(ResultTest, ExpectBoolBadWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT(false, "ExpectBoolBadWithMessage message");
+    return "okay";
+  }();
+  EXPECT_THAT(result,
+              IsErrorAndMessage(HasSubstr("ExpectBoolBadWithMessage message")));
+}
+
+TEST(ResultTest, ExpectWithResultGoodNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    const auto inner_result = []() -> Result<std::string> {
+      CF_EXPECT(true);
+      return "inner okay";
+    };
+    CF_EXPECT(inner_result());
+    return "outer okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("outer okay")));
+}
+
+TEST(ResultTest, ExpectWithResultGoodWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    const auto inner_result = []() -> Result<std::string> {
+      CF_EXPECT(true);
+      return "inner okay";
+    };
+    CF_EXPECT(inner_result(), "Failed inner result.");
+    return "outer okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("outer okay")));
+}
+
+TEST(ResultTest, ExpectWithResultBadNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    const auto inner_result = []() -> Result<std::string> {
+      CF_EXPECT(false, "inner bad");
+      return "inner okay";
+    };
+    CF_EXPECT(inner_result());
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsError());
+}
+
+TEST(ResultTest, ExpectWithResultBadWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    const auto inner_result = []() -> Result<std::string> {
+      CF_EXPECT(false, "inner bad");
+      return "inner okay";
+    };
+    CF_EXPECT(inner_result(), "ExpectWithResultBadWithMessage message");
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsErrorAndMessage(
+                          HasSubstr("ExpectWithResultBadWithMessage message")));
+}
+
+TEST(ResultTest, ExpectEqGoodNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT_EQ(1, 1);
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("okay")));
+}
+
+TEST(ResultTest, ExpectEqGoodWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT_EQ(1, 1, "Failed comparison");
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsOkAndValue(StrEq("okay")));
+}
+
+TEST(ResultTest, ExpectEqBadNoMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT_EQ(1, 2);
+    return "okay";
+  }();
+  EXPECT_THAT(result, IsError());
+}
+
+TEST(ResultTest, ExpectEqBadWithMessage) {
+  const auto result = []() -> Result<std::string> {
+    CF_EXPECT_EQ(1, 2, "ExpectEqBadWithMessage message");
+    return "okay";
+  }();
+  EXPECT_THAT(result,
+              IsErrorAndMessage(HasSubstr("ExpectEqBadWithMessage message")));
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/scope_guard.cpp b/common/libs/utils/scope_guard.cpp
new file mode 100644
index 0000000..8d9f06e
--- /dev/null
+++ b/common/libs/utils/scope_guard.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "common/libs/utils/scope_guard.h"
+
+#include <functional>
+
+namespace cuttlefish {
+
+ScopeGuard::ScopeGuard() = default;
+
+ScopeGuard::ScopeGuard(std::function<void()> fn) : fn_(std::move(fn)) {}
+
+ScopeGuard::ScopeGuard(ScopeGuard&&) noexcept = default;
+
+ScopeGuard& ScopeGuard::operator=(ScopeGuard&&) noexcept = default;
+
+ScopeGuard::~ScopeGuard() {
+  if (fn_) {
+    fn_();
+  }
+}
+
+void ScopeGuard::Cancel() { fn_ = nullptr; }
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/scope_guard.h b/common/libs/utils/scope_guard.h
new file mode 100644
index 0000000..91e6066
--- /dev/null
+++ b/common/libs/utils/scope_guard.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <functional>
+
+namespace cuttlefish {
+
+class ScopeGuard {
+ public:
+  ScopeGuard();
+  explicit ScopeGuard(std::function<void()> fn);
+  ScopeGuard(ScopeGuard&&) noexcept;
+  ~ScopeGuard();
+  ScopeGuard& operator=(ScopeGuard&&) noexcept;
+
+  void Cancel();
+
+ private:
+  std::function<void()> fn_;
+};
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/socket2socket_proxy.cpp b/common/libs/utils/socket2socket_proxy.cpp
index e3d7343..b55c215 100644
--- a/common/libs/utils/socket2socket_proxy.cpp
+++ b/common/libs/utils/socket2socket_proxy.cpp
@@ -16,6 +16,7 @@
 
 #include "common/libs/utils/socket2socket_proxy.h"
 
+#include <poll.h>
 #include <sys/socket.h>
 
 #include <cstring>
@@ -31,6 +32,7 @@
 namespace {
 
 void Forward(const std::string& label, SharedFD from, SharedFD to) {
+  LOG(DEBUG) << label << ": Proxy thread started. Starting copying data";
   auto success = to->CopyAllFrom(*from);
   if (!success) {
     if (from->GetErrno()) {
@@ -41,37 +43,94 @@
     }
   }
   to->Shutdown(SHUT_WR);
-  LOG(DEBUG) << label << " completed";
+  LOG(DEBUG) << label << ": Proxy thread completed";
 }
 
 void SetupProxying(SharedFD client, SharedFD target) {
-  std::thread([client, target]() {
-    std::thread client2target(Forward, "client2target", client, target);
-    Forward("target2client", target, client);
-    client2target.join();
-    // The actual proxying is handled in a detached thread so that this function
-    // returns immediately
-  }).detach();
+  LOG(DEBUG) << "Launching proxy threads";
+  std::thread client2target(Forward, "c2t", client, target);
+  std::thread target2client(Forward, "t2c", target, client);
+  client2target.detach();
+  target2client.detach();
 }
 
 }  // namespace
 
-void Proxy(SharedFD server, std::function<SharedFD()> conn_factory) {
-  while (server->IsOpen()) {
-    auto client = SharedFD::Accept(*server);
-    if (!client->IsOpen()) {
-      LOG(ERROR) << "Failed to accept connection in server: "
-                 << client->StrError();
-      continue;
-    }
-    auto target = conn_factory();
-    if (target->IsOpen()) {
-      SetupProxying(client, target);
-    }
-    // The client will close when it goes out of scope here if the target didn't
-    // open.
+ProxyServer::ProxyServer(SharedFD server, std::function<SharedFD()> clients_factory)
+    : stop_fd_(SharedFD::Event()) {
+
+  if (!stop_fd_->IsOpen()) {
+    LOG(FATAL) << "Failed to open eventfd: " << stop_fd_->StrError();
+    return;
   }
-  LOG(INFO) << "Server closed: " << server->StrError();
+  server_ = std::thread([&, server_fd = std::move(server),
+                            clients_factory = std::move(clients_factory)]() {
+    constexpr ssize_t SERVER = 0;
+    constexpr ssize_t STOP = 1;
+
+    std::vector<PollSharedFd> server_poll = {
+      {.fd = server_fd, .events = POLLIN},
+      {.fd = stop_fd_, .events = POLLIN}
+    };
+
+    while (server_fd->IsOpen()) {
+      server_poll[SERVER].revents = 0;
+      server_poll[STOP].revents = 0;
+
+      const int poll_result = SharedFD::Poll(server_poll, -1);
+      if (poll_result < 0) {
+        LOG(ERROR) << "Failed to poll to wait for incoming connection";
+        continue;
+      }
+      if (server_poll[STOP].revents & POLLIN) {
+        // Stop fd is available to read, so we received a stop event
+        // and must stop the thread
+        break;
+      }
+      if (!(server_poll[SERVER].revents & POLLIN)) {
+        continue;
+      }
+
+      // Server fd is available to read, so we can accept the
+      // connection without blocking on that
+      auto client = SharedFD::Accept(*server_fd);
+      if (!client->IsOpen()) {
+        LOG(ERROR) << "Failed to accept incoming connection: " << client->StrError();
+        continue;
+      }
+      auto target = clients_factory();
+      if (target->IsOpen()) {
+        SetupProxying(client, target);
+      } else {
+        LOG(ERROR) << "Cannot connect to the target to setup proxying: " << target->StrError();
+      }
+      // The client will close when it goes out of scope here if the target
+      // didn't open.
+    }
+  });
+}
+
+void ProxyServer::Join() {
+  if (server_.joinable()) {
+    server_.join();
+  }
+}
+
+ProxyServer::~ProxyServer() {
+  if (stop_fd_->EventfdWrite(1) != 0) {
+    LOG(ERROR) << "Failed to stop proxy thread: " << stop_fd_->StrError();
+  }
+  Join();
+}
+
+void Proxy(SharedFD server, std::function<SharedFD()> conn_factory) {
+  ProxyServer proxy(std::move(server), std::move(conn_factory));
+  proxy.Join();
+}
+
+std::unique_ptr<ProxyServer> ProxyAsync(SharedFD server, std::function<SharedFD()> conn_factory) {
+  return std::unique_ptr<ProxyServer>(
+      new ProxyServer(std::move(server), std::move(conn_factory)));
 }
 
 }  // namespace cuttlefish
diff --git a/common/libs/utils/socket2socket_proxy.h b/common/libs/utils/socket2socket_proxy.h
index 28e17b1..f73bbb3 100644
--- a/common/libs/utils/socket2socket_proxy.h
+++ b/common/libs/utils/socket2socket_proxy.h
@@ -17,10 +17,24 @@
 #pragma once
 
 #include <functional>
+#include <memory>
+#include <thread>
 
 #include "common/libs/fs/shared_fd.h"
 
 namespace cuttlefish {
+
+class ProxyServer {
+ public:
+  ProxyServer(SharedFD server, std::function<SharedFD()> clients_factory);
+  void Join();
+  ~ProxyServer();
+
+ private:
+  SharedFD stop_fd_;
+  std::thread server_;
+};
+
 // Executes a TCP proxy
 // Accept() is called on the server in a loop, for every client connection a
 // target connection is created through the conn_factory callback and data is
@@ -30,4 +44,6 @@
 // behavior for SIGPIPE before calling this function, otherwise it runs the risk
 // or crashing the process when a connection breaks.
 void Proxy(SharedFD server, std::function<SharedFD()> conn_factory);
+std::unique_ptr<ProxyServer> ProxyAsync(SharedFD server, std::function<SharedFD()> conn_factory);
+
 }  // namespace cuttlefish
diff --git a/common/libs/utils/subprocess.cpp b/common/libs/utils/subprocess.cpp
index 16a7989..5fcb034 100644
--- a/common/libs/utils/subprocess.cpp
+++ b/common/libs/utils/subprocess.cpp
@@ -28,11 +28,13 @@
 #include <cstring>
 #include <map>
 #include <memory>
+#include <optional>
 #include <ostream>
 #include <set>
 #include <sstream>
 #include <string>
 #include <thread>
+#include <type_traits>
 #include <utility>
 #include <vector>
 
@@ -205,12 +207,12 @@
   return StopperResult::kStopSuccess;
 }
 
-Command::Command(const std::string& executable, SubprocessStopper stopper)
+Command::Command(std::string executable, SubprocessStopper stopper)
     : subprocess_stopper_(stopper) {
   for (char** env = environ; *env; env++) {
     env_.emplace_back(*env);
   }
-  command_.push_back(executable);
+  command_.emplace_back(std::move(executable));
 }
 
 Command::~Command() {
@@ -264,27 +266,22 @@
   return std::move(*this);
 }
 
-Command& Command::SetWorkingDirectory(std::string path) & {
+Command& Command::SetWorkingDirectory(const std::string& path) & {
   auto fd = SharedFD::Open(path, O_RDONLY | O_PATH | O_DIRECTORY);
   CHECK(fd->IsOpen()) << "Could not open \"" << path
                       << "\" dir fd: " << fd->StrError();
   return SetWorkingDirectory(fd);
 }
-Command Command::SetWorkingDirectory(std::string path) && {
-  auto fd = SharedFD::Open(path, O_RDONLY | O_PATH | O_DIRECTORY);
-  CHECK(fd->IsOpen()) << "Could not open \"" << path
-                      << "\" dir fd: " << fd->StrError();
-  return std::move(SetWorkingDirectory(fd));
+Command Command::SetWorkingDirectory(const std::string& path) && {
+  return std::move(SetWorkingDirectory(path));
 }
 Command& Command::SetWorkingDirectory(SharedFD dirfd) & {
   CHECK(dirfd->IsOpen()) << "Dir fd invalid: " << dirfd->StrError();
-  working_directory_ = dirfd;
+  working_directory_ = std::move(dirfd);
   return *this;
 }
 Command Command::SetWorkingDirectory(SharedFD dirfd) && {
-  CHECK(dirfd->IsOpen()) << "Dir fd invalid: " << dirfd->StrError();
-  working_directory_ = dirfd;
-  return std::move(*this);
+  return std::move(SetWorkingDirectory(std::move(dirfd)));
 }
 
 Subprocess Command::Start(SubprocessOptions options) const {
@@ -321,11 +318,12 @@
     }
     int rval;
     auto envp = ToCharPointers(env_);
-    rval = execvpe(cmd[0], const_cast<char* const*>(cmd.data()),
+    const char* executable = executable_ ? executable_->c_str() : cmd[0];
+    rval = execvpe(executable, const_cast<char* const*>(cmd.data()),
                    const_cast<char* const*>(envp.data()));
     // No need for an if: if exec worked it wouldn't have returned
-    LOG(ERROR) << "exec of " << cmd[0] << " failed (" << strerror(errno)
-               << ")";
+    LOG(ERROR) << "exec of " << cmd[0] << " with path \"" << executable
+               << "\" failed (" << strerror(errno) << ")";
     exit(rval);
   }
   if (pid == -1) {
diff --git a/common/libs/utils/subprocess.h b/common/libs/utils/subprocess.h
index ff1af5b..5c072e5 100644
--- a/common/libs/utils/subprocess.h
+++ b/common/libs/utils/subprocess.h
@@ -25,9 +25,11 @@
 #include <cstring>
 #include <functional>
 #include <map>
+#include <optional>
 #include <ostream>
 #include <sstream>
 #include <string>
+#include <type_traits>
 #include <utility>
 #include <vector>
 
@@ -133,8 +135,7 @@
   // Constructs a command object from the path to an executable binary and an
   // optional subprocess stopper. When not provided, stopper defaults to sending
   // SIGKILL to the subprocess.
-  Command(const std::string& executable,
-          SubprocessStopper stopper = KillSubprocess);
+  Command(std::string executable, SubprocessStopper stopper = KillSubprocess);
   Command(Command&&) = default;
   // The default copy constructor is unsafe because it would mean multiple
   // closing of the inherited file descriptors. If needed it can be implemented
@@ -143,35 +144,50 @@
   Command& operator=(const Command&) = delete;
   ~Command();
 
-  const std::string& Executable() const { return command_[0]; }
+  const std::string& Executable() const {
+    return executable_ ? *executable_ : command_[0];
+  }
 
-  Command& SetExecutable(const std::string& executable) & {
-    command_[0] = executable;
+  Command& SetExecutable(std::string executable) & {
+    executable_ = std::move(executable);
     return *this;
   }
-  Command SetExecutable(const std::string& executable) && {
-    SetExecutable(executable);
-    return std::move(*this);
+  Command SetExecutable(std::string executable) && {
+    return std::move(SetExecutable(executable));
+  }
+
+  Command& SetName(std::string name) & {
+    command_[0] = std::move(name);
+    return *this;
+  }
+  Command SetName(std::string name) && {
+    return std::move(SetName(std::move(name)));
+  }
+
+  Command& SetExecutableAndName(std::string name) & {
+    return SetExecutable(name).SetName(std::move(name));
+  }
+
+  Command SetExecutableAndName(std::string name) && {
+    return std::move(SetExecutableAndName(std::move(name)));
   }
 
   Command& SetStopper(SubprocessStopper stopper) & {
-    subprocess_stopper_ = stopper;
+    subprocess_stopper_ = std::move(stopper);
     return *this;
   }
   Command SetStopper(SubprocessStopper stopper) && {
-    SetStopper(stopper);
-    return std::move(*this);
+    return std::move(SetStopper(std::move(stopper)));
   }
 
   // Specify the environment for the subprocesses to be started. By default
   // subprocesses inherit the parent's environment.
-  Command& SetEnvironment(const std::vector<std::string>& env) & {
-    env_ = env;
+  Command& SetEnvironment(std::vector<std::string> env) & {
+    env_ = std::move(env);
     return *this;
   }
-  Command SetEnvironment(const std::vector<std::string>& env) && {
-    SetEnvironment(env);
-    return std::move(*this);
+  Command SetEnvironment(std::vector<std::string> env) && {
+    return std::move(SetEnvironment(std::move(env)));
   }
 
   Command& AddEnvironmentVariable(const std::string& env_var,
@@ -184,13 +200,12 @@
     return std::move(*this);
   }
 
-  Command& AddEnvironmentVariable(const std::string& env_var) & {
-    env_.push_back(env_var);
+  Command& AddEnvironmentVariable(std::string env_var) & {
+    env_.emplace_back(std::move(env_var));
     return *this;
   }
-  Command AddEnvironmentVariable(const std::string& env_var) && {
-    AddEnvironmentVariable(env_var);
-    return std::move(*this);
+  Command AddEnvironmentVariable(std::string env_var) && {
+    return std::move(AddEnvironmentVariable(std::move(env_var)));
   }
 
   // Specify an environment variable to be unset from the parent's
@@ -207,8 +222,7 @@
     return *this;
   }
   Command UnsetFromEnvironment(const std::string& env_var) && {
-    UnsetFromEnvironment(env_var);
-    return std::move(*this);
+    return std::move(UnsetFromEnvironment(env_var));
   }
 
   // Adds a single parameter to the command. All arguments are concatenated into
@@ -225,8 +239,7 @@
   }
   template <typename... Args>
   Command AddParameter(Args... args) && {
-    AddParameter(std::forward<Args>(args)...);
-    return std::move(*this);
+    return std::move(AddParameter(std::forward<Args>(args)...));
   }
   // Similar to AddParameter, except the args are appended to the last (most
   // recently-added) parameter in the command.
@@ -240,8 +253,7 @@
   }
   template <typename... Args>
   Command AppendToLastParameter(Args... args) && {
-    AppendToLastParameter(std::forward<Args>(args)...);
-    return std::move(*this);
+    return std::move(AppendToLastParameter(std::forward<Args>(args)...));
   }
 
   // Redirects the standard IO of the command.
@@ -254,8 +266,8 @@
   Command RedirectStdIO(Subprocess::StdIOChannel subprocess_channel,
                         Subprocess::StdIOChannel parent_channel) &&;
 
-  Command& SetWorkingDirectory(std::string path) &;
-  Command SetWorkingDirectory(std::string path) &&;
+  Command& SetWorkingDirectory(const std::string& path) &;
+  Command SetWorkingDirectory(const std::string& path) &&;
   Command& SetWorkingDirectory(SharedFD dirfd) &;
   Command SetWorkingDirectory(SharedFD dirfd) &&;
 
@@ -276,6 +288,7 @@
   std::string AsBashScript(const std::string& redirected_stdio_path = "") const;
 
  private:
+  std::optional<std::string> executable_;  // When unset, use command_[0]
   std::vector<std::string> command_;
   std::map<SharedFD, int> inherited_fds_{};
   std::map<Subprocess::StdIOChannel, int> redirects_{};
diff --git a/common/libs/utils/tee_logging.cpp b/common/libs/utils/tee_logging.cpp
index 8129b40..fb6e5a3 100644
--- a/common/libs/utils/tee_logging.cpp
+++ b/common/libs/utils/tee_logging.cpp
@@ -126,9 +126,10 @@
 // Copied from system/libbase/logging_splitters.h
 // This adds the log header to each line of message and returns it as a string intended to be
 // written to stderr.
-static std::string StderrOutputGenerator(const struct tm& now, int pid, uint64_t tid,
-                                         LogSeverity severity, const char* tag, const char* file,
-                                         unsigned int line, const char* message) {
+std::string StderrOutputGenerator(const struct tm& now, int pid, uint64_t tid,
+                                  LogSeverity severity, const char* tag,
+                                  const char* file, unsigned int line,
+                                  const char* message) {
   char timestamp[32];
   strftime(timestamp, sizeof(timestamp), "%m-%d %H:%M:%S", &now);
 
@@ -190,15 +191,21 @@
   for (const auto& destination : destinations_) {
     std::string msg_with_prefix = prefix_ + message;
     std::string output_string;
-    if (destination.metadata_level == MetadataLevel::ONLY_MESSAGE) {
-      output_string = msg_with_prefix + std::string("\n");
-    } else {
-      struct tm now;
-      time_t t = time(nullptr);
-      localtime_r(&t, &now);
-      output_string =
-          StderrOutputGenerator(now, getpid(), GetThreadId(), severity, tag,
-                                file, line, msg_with_prefix.c_str());
+    switch (destination.metadata_level) {
+      case MetadataLevel::ONLY_MESSAGE:
+        output_string = msg_with_prefix + std::string("\n");
+        break;
+      case MetadataLevel::TAG_AND_MESSAGE:
+        output_string = fmt::format("{}] {}{}", tag, msg_with_prefix, "\n");
+        break;
+      default:
+        struct tm now;
+        time_t t = time(nullptr);
+        localtime_r(&t, &now);
+        output_string =
+            StderrOutputGenerator(now, getpid(), GetThreadId(), severity, tag,
+                                  file, line, msg_with_prefix.c_str());
+        break;
     }
     if (severity >= destination.severity) {
       if (destination.target->IsATTY()) {
@@ -234,11 +241,12 @@
 }
 
 TeeLogger LogToStderrAndFiles(const std::vector<std::string>& files,
-                              const std::string& prefix) {
+                              const std::string& prefix,
+                              MetadataLevel stderr_level) {
   std::vector<SeverityTarget> log_severities = SeverityTargetsForFiles(files);
   log_severities.push_back(SeverityTarget{ConsoleSeverity(),
                                           SharedFD::Dup(/* stderr */ 2),
-                                          MetadataLevel::ONLY_MESSAGE});
+                                          stderr_level});
   return TeeLogger(log_severities, prefix);
 }
 
diff --git a/common/libs/utils/tee_logging.h b/common/libs/utils/tee_logging.h
index ac3e70c..03a3f45 100644
--- a/common/libs/utils/tee_logging.h
+++ b/common/libs/utils/tee_logging.h
@@ -24,12 +24,18 @@
 
 namespace cuttlefish {
 
+std::string StderrOutputGenerator(const struct tm& now, int pid, uint64_t tid,
+                                  android::base::LogSeverity severity,
+                                  const char* tag, const char* file,
+                                  unsigned int line, const char* message);
+
 android::base::LogSeverity ConsoleSeverity();
 android::base::LogSeverity LogFileSeverity();
 
 enum class MetadataLevel {
   FULL,
   ONLY_MESSAGE,
+  TAG_AND_MESSAGE
 };
 
 struct SeverityTarget {
@@ -57,6 +63,7 @@
 TeeLogger LogToFiles(const std::vector<std::string>& files,
                      const std::string& log_prefix = "");
 TeeLogger LogToStderrAndFiles(const std::vector<std::string>& files,
-                              const std::string& log_prefix = "");
+                              const std::string& log_prefix = "",
+                              MetadataLevel stderr_level = MetadataLevel::ONLY_MESSAGE);
 
 } // namespace cuttlefish
diff --git a/common/libs/utils/unique_resource_allocator.h b/common/libs/utils/unique_resource_allocator.h
new file mode 100644
index 0000000..a73a970
--- /dev/null
+++ b/common/libs/utils/unique_resource_allocator.h
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <optional>
+#include <type_traits>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <android-base/logging.h>
+
+#include "common/libs/utils/contains.h"
+
+namespace cuttlefish {
+
+/**
+ * Generic allocator that can provide RAII-aware resource reservations.
+ *
+ * See go/cf-resource-allocator-utils for more details.
+ */
+template <typename T>
+class UniqueResourceAllocator {
+  template <typename U>
+  using RemoveCvref =
+      typename std::remove_cv_t<typename std::remove_reference_t<U>>;
+
+ public:
+  /*
+   * Returning the inner resource to the pool at destruction time
+   *
+   * The pool must live longer than the resources. Use this like you use
+   * std::unique_ptr.
+   */
+  class Reservation {
+    friend class UniqueResourceAllocator;
+    friend class ReservationSet;
+
+   public:
+    Reservation(const Reservation&) = delete;
+    Reservation(Reservation&& src)
+        : resource_pool_(src.resource_pool_), resource_(src.resource_) {
+      src.resource_pool_ = nullptr;
+    }
+    Reservation& operator=(const Reservation&) = delete;
+    Reservation& operator=(Reservation&& src) = delete;
+
+    bool operator==(const Reservation& src) const {
+      return (resource_ == src.resource_ &&
+              resource_pool_ == src.resource_pool_);
+    }
+
+    ~Reservation() {
+      if (resource_pool_) {
+        resource_pool_->Reclaim(*resource_);
+      }
+    }
+    const T& Get() const { return *resource_; }
+
+   private:
+    Reservation(UniqueResourceAllocator& resource_pool, const T& resource)
+        : resource_pool_(std::addressof(resource_pool)),
+          resource_(std::addressof(resource)) {}
+    /*
+     * Once this Reservation is std::move-ed out to other object,
+     * resource_pool_ should be invalidated, and resource_ shouldn't
+     * be tried to be returned to the invalid resource_pool_
+     */
+    UniqueResourceAllocator* resource_pool_;
+    const T* resource_;
+  };
+
+  struct ReservationHash {
+    std::size_t operator()(const Reservation& resource_wrapper) const {
+      return std::hash<const T*>()(std::addressof(resource_wrapper.Get()));
+    }
+  };
+  using ReservationSet = std::unordered_set<Reservation, ReservationHash>;
+  /*
+   * Creates the singleton object.
+   *
+   * Call this function once during the entire program's life
+   */
+  static UniqueResourceAllocator& Create(const std::vector<T>& pool) {
+    static UniqueResourceAllocator singleton_allocator(pool);
+    return singleton_allocator;
+  }
+
+  static std::unique_ptr<UniqueResourceAllocator> New(
+      const std::vector<T>& pool) {
+    UniqueResourceAllocator* new_allocator = new UniqueResourceAllocator(pool);
+    return std::unique_ptr<UniqueResourceAllocator>(new_allocator);
+  }
+
+  // Adds the elements from new pool that did not belong to and have not
+  // belonged to the current pool of the allocator. returns the leftover
+  std::vector<T> ExpandPool(std::vector<T> another_pool) {
+    std::lock_guard lock(mutex_);
+    std::vector<T> not_selected;
+    for (auto& new_item : another_pool) {
+      if (Contains(available_resources_, new_item) ||
+          Contains(allocated_resources_, new_item)) {
+        not_selected.emplace_back(std::move(new_item));
+        continue;
+      }
+      available_resources_.insert(std::move(new_item));
+    }
+    return not_selected;
+  }
+
+  std::vector<T> ExpandPool(T&& t) {
+    std::vector<T> pool_to_add;
+    pool_to_add.emplace_back(std::move(t));
+    return ExpandPool(std::move(pool_to_add));
+  }
+
+  std::vector<T> ExpandPool(const T& t) {
+    std::vector<T> pool_to_add;
+    pool_to_add.emplace_back(t);
+    return ExpandPool(std::move(pool_to_add));
+  }
+
+  std::optional<Reservation> UniqueItem() {
+    std::lock_guard<std::mutex> lock(mutex_);
+    auto itr = available_resources_.begin();
+    if (itr == available_resources_.end()) {
+      return std::nullopt;
+    }
+    Reservation r(*this, *(RemoveFromPool(itr)));
+    return {std::move(r)};
+  }
+
+  // gives n unique integers from the pool, and then remove them from the pool
+  std::optional<ReservationSet> UniqueItems(const int n) {
+    std::lock_guard<std::mutex> lock(mutex_);
+    if (n <= 0 || available_resources_.size() < n) {
+      return std::nullopt;
+    }
+    ReservationSet result;
+    for (int i = 0; i < n; i++) {
+      auto itr = available_resources_.begin();
+      result.insert(Reservation{*this, *(RemoveFromPool(itr))});
+    }
+    return {std::move(result)};
+  }
+
+  template <typename V = T>
+  std::enable_if_t<std::is_integral<V>::value, std::optional<ReservationSet>>
+  UniqueConsecutiveItems(const int n) {
+    static_assert(std::is_same<T, V>::value);
+    std::lock_guard<std::mutex> lock(mutex_);
+    if (n <= 0 || available_resources_.size() < n) {
+      return std::nullopt;
+    }
+
+    for (const auto& available_resource : available_resources_) {
+      auto start_inclusive = available_resource;
+      auto resources_opt =
+          TakeRangeInternal(start_inclusive, start_inclusive + n);
+      if (!resources_opt) {
+        continue;
+      }
+      return resources_opt;
+    }
+    return std::nullopt;
+  }
+
+  // takes t if available
+  // returns false if not available or not in the pool at all
+  std::optional<Reservation> Take(const T& t) {
+    std::lock_guard<std::mutex> lock(mutex_);
+    auto itr = available_resources_.find(t);
+    if (itr == available_resources_.end()) {
+      return std::nullopt;
+    }
+    Reservation resource{*this, *(RemoveFromPool(itr))};
+    return resource;
+  }
+
+  template <typename Container>
+  std::optional<ReservationSet> TakeAll(const Container& ts) {
+    std::lock_guard<std::mutex> lock(mutex_);
+    for (const auto& t : ts) {
+      if (!Contains(available_resources_, t)) {
+        return std::nullopt;
+      }
+    }
+    ReservationSet resources;
+    for (const auto& t : ts) {
+      auto itr = available_resources_.find(t);
+      resources.insert(Reservation{*this, *(RemoveFromPool(itr))});
+    }
+    return resources;
+  }
+
+  /*
+   * If the range is available, returns the resources from the pool
+   *
+   * Otherwise, makes no change in the internal data structure but
+   * returns false.
+   */
+  template <typename V = T>
+  std::enable_if_t<std::is_integral<V>::value, std::optional<ReservationSet>>
+  TakeRange(const T& start_inclusive, const T& end_exclusive) {
+    static_assert(std::is_same<T, V>::value);
+    std::lock_guard<std::mutex> lock(mutex_);
+    return TakeRangeInternal(start_inclusive, end_exclusive);
+  }
+
+ private:
+  template <typename Container>
+  UniqueResourceAllocator(const Container& items)
+      : available_resources_{items.cbegin(), items.cend()} {}
+
+  bool operator==(const UniqueResourceAllocator& other) const {
+    return std::addressof(*this) == std::addressof(other);
+  }
+
+  // only called by the destructor of Reservation
+  // harder to use Result as this is called by destructors only
+  void Reclaim(const T& t) {
+    std::lock_guard<std::mutex> lock(mutex_);
+    auto itr = allocated_resources_.find(t);
+    if (itr == allocated_resources_.end()) {
+      if (!Contains(available_resources_, t)) {
+        LOG(ERROR) << "The resource " << t << " does not belong to this pool";
+        return;
+      }
+      // already reclaimed.
+      return;
+    }
+    T tmp = std::move(*itr);
+    allocated_resources_.erase(itr);
+    available_resources_.insert(std::move(tmp));
+  }
+
+  /*
+   * If the range is available, returns the resources from the pool
+   *
+   * Otherwise, makes no change in the internal data structure but
+   * returns false.
+   */
+  template <typename V = T>
+  std::enable_if_t<std::is_integral<V>::value, std::optional<ReservationSet>>
+  TakeRangeInternal(const T& start_inclusive, const T& end_exclusive) {
+    static_assert(std::is_same<T, V>::value);
+    for (auto cursor = start_inclusive; cursor < end_exclusive; cursor++) {
+      if (!Contains(available_resources_, cursor)) {
+        return std::nullopt;
+      }
+    }
+    ReservationSet resources;
+    for (auto cursor = start_inclusive; cursor < end_exclusive; cursor++) {
+      auto itr = available_resources_.find(cursor);
+      resources.insert(Reservation{*this, *(RemoveFromPool(itr))});
+    }
+    return resources;
+  }
+
+  /*
+   * Moves *itr from available_resources_ to allocated_resources_, and returns
+   * the pointer of the object in the allocated_resources_. The pointer is never
+   * nullptr as it is std::addressof(an object in the unordered_set buffer).
+   *
+   * The itr must belong to available_resources_.
+   */
+  const T* RemoveFromPool(const typename std::unordered_set<T>::iterator itr) {
+    T tmp = std::move(*itr);
+    available_resources_.erase(itr);
+    const auto [new_itr, _] = allocated_resources_.insert(std::move(tmp));
+    return std::addressof(*new_itr);
+  }
+  std::unordered_set<T> available_resources_;
+  std::unordered_set<T> allocated_resources_;
+  std::mutex mutex_;
+};
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/unique_resource_allocator_test.cpp b/common/libs/utils/unique_resource_allocator_test.cpp
new file mode 100644
index 0000000..352c5ab
--- /dev/null
+++ b/common/libs/utils/unique_resource_allocator_test.cpp
@@ -0,0 +1,201 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <unordered_set>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/unique_resource_allocator.h"
+#include "common/libs/utils/unique_resource_allocator_test.h"
+
+namespace cuttlefish {
+
+TEST_P(OneEachTest, GetAnyAvailableOne) {
+  const auto resources = GetParam();
+  auto allocator = UniqueResourceAllocator<unsigned>::New(resources);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+  std::unordered_set<unsigned> expected_ids{resources.cbegin(),
+                                            resources.cend()};
+  using Reservation = UniqueResourceAllocator<unsigned>::Reservation;
+
+  std::vector<Reservation> allocated;
+  for (int i = 0; i < resources.size(); i++) {
+    auto id_opt = allocator->UniqueItem();
+    ASSERT_TRUE(id_opt);
+    ASSERT_TRUE(Contains(expected_ids, id_opt->Get()));
+    allocated.emplace_back(std::move(*id_opt));
+  }
+  ASSERT_FALSE(allocator->UniqueItem());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CvdIdAllocator, OneEachTest,
+    testing::Values(std::vector<unsigned>{}, std::vector<unsigned>{1},
+                    std::vector<unsigned>{1, 22, 3, 43, 5}));
+
+TEST_F(CvdIdAllocatorTest, ClaimAll) {
+  std::vector<unsigned> inputs{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  // request inputs.size() items
+  auto allocated_items_opt = allocator->UniqueItems(inputs.size());
+  ASSERT_TRUE(allocated_items_opt);
+  ASSERT_EQ(allocated_items_opt->size(), inputs.size());
+  // did it claim all?
+  ASSERT_FALSE(allocator->UniqueItem());
+}
+
+TEST_F(CvdIdAllocatorTest, StrideBeyond) {
+  std::vector<unsigned> inputs{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  auto three_opt = allocator->UniqueItems(3);
+  auto four_opt = allocator->UniqueItems(4);
+  auto five_opt = allocator->UniqueItems(5);
+  auto two_opt = allocator->UniqueItems(2);
+  auto another_two_opt = allocator->UniqueItems(2);
+
+  ASSERT_TRUE(three_opt);
+  ASSERT_TRUE(four_opt);
+  ASSERT_FALSE(five_opt);
+  ASSERT_TRUE(two_opt);
+  ASSERT_FALSE(another_two_opt);
+}
+
+TEST_F(CvdIdAllocatorTest, Consecutive) {
+  std::vector<unsigned> inputs{1, 2, 4, 5, 6, 7, 9, 10, 11};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  auto four_consecutive = allocator->UniqueConsecutiveItems(4);
+  auto three_consecutive = allocator->UniqueConsecutiveItems(3);
+  auto another_three_consecutive = allocator->UniqueConsecutiveItems(3);
+  auto two_consecutive = allocator->UniqueConsecutiveItems(2);
+
+  ASSERT_TRUE(four_consecutive);
+  ASSERT_TRUE(three_consecutive);
+  ASSERT_FALSE(another_three_consecutive);
+  ASSERT_TRUE(two_consecutive);
+  // it's empty
+  ASSERT_FALSE(allocator->UniqueItem()) << "one or more left";
+}
+
+TEST_F(CvdIdAllocatorTest, Take) {
+  std::vector<unsigned> inputs{4, 5, 9};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  auto four = allocator->Take(4);
+  auto nine = allocator->Take(9);
+  // wrong
+  auto twenty = allocator->Take(20);
+
+  ASSERT_TRUE(four);
+  ASSERT_TRUE(nine);
+  ASSERT_FALSE(twenty);
+}
+
+TEST_F(CvdIdAllocatorTest, TakeAll) {
+  std::vector<unsigned> inputs{4, 5, 9, 10};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  auto take_4_5_11 = allocator->TakeAll<std::vector<unsigned>>({4, 5, 11});
+  auto take_4_5_10 = allocator->TakeAll<std::vector<unsigned>>({4, 5, 10});
+  auto take_9_10 = allocator->TakeAll<std::vector<unsigned>>({9, 10});
+  auto take_9 = allocator->TakeAll<std::vector<unsigned>>({9});
+
+  ASSERT_FALSE(take_4_5_11);
+  ASSERT_TRUE(take_4_5_10);
+  ASSERT_FALSE(take_9_10);
+  ASSERT_TRUE(take_9);
+}
+
+TEST_F(CvdIdAllocatorTest, TakeRange) {
+  std::vector<unsigned> inputs{1, 2, 4, 5, 6, 7, 8, 9, 10, 11};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+
+  auto take_range_5_12 = allocator->TakeRange(5, 12);
+  // shall fail as 3 is missing
+  auto take_range_2_4 = allocator->TakeRange(2, 4);
+
+  ASSERT_TRUE(take_range_5_12);
+  ASSERT_FALSE(take_range_2_4);
+}
+
+TEST_F(CvdIdAllocatorTest, Reclaim) {
+  std::vector<unsigned> inputs{1, 2, 4, 5, 6, 7, 8, 9, 10, 11};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+  unsigned one_resource = 0;
+  {
+    auto take_range_5_12 = allocator->TakeRange(5, 12);
+    auto any_single_item = allocator->UniqueItem();
+
+    ASSERT_TRUE(take_range_5_12);
+    ASSERT_TRUE(any_single_item);
+    one_resource = any_single_item->Get();
+
+    ASSERT_FALSE(allocator->TakeRange(5, 12));
+    ASSERT_FALSE(allocator->Take(one_resource));
+  }
+  // take_range_5_12 went out of scope, so resources were reclaimed
+  ASSERT_TRUE(allocator->TakeRange(5, 12));
+  ASSERT_TRUE(allocator->Take(one_resource));
+}
+
+TEST(CvdIdAllocatorExpandTest, Expand) {
+  std::vector<unsigned> inputs{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+  auto allocator = UniqueResourceAllocator<unsigned>::New(inputs);
+  if (!allocator) {
+    GTEST_SKIP() << "Memory allocation failed but we aren't testing it.";
+  }
+  auto hold_6_to_10 = allocator->TakeRange(6, 11);
+  if (!hold_6_to_10) {
+    GTEST_SKIP() << "TakeRange(6, 11) failed but it's not what is tested here";
+  }
+
+  auto expand =
+      allocator->ExpandPool(std::vector<unsigned>{2, 4, 6, 8, 12, 14});
+  auto take_12 = allocator->Take(12);
+  auto take_14 = allocator->Take(14);
+  auto take_6 = allocator->Take(6);
+
+  std::vector<unsigned> expected_return_from_expand{2, 4, 6, 8};
+  ASSERT_EQ(expand, expected_return_from_expand);
+  ASSERT_TRUE(take_12);
+  ASSERT_TRUE(take_14);
+  ASSERT_FALSE(take_6);
+}
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/unique_resource_allocator_test.h b/common/libs/utils/unique_resource_allocator_test.h
new file mode 100644
index 0000000..82cd1c0
--- /dev/null
+++ b/common/libs/utils/unique_resource_allocator_test.h
@@ -0,0 +1,34 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <vector>
+
+#include <gtest/gtest.h>
+
+namespace cuttlefish {
+
+// Get one unique item at a time
+class OneEachTest : public testing::TestWithParam<std::vector<unsigned>> {};
+
+/*
+ * ClaimAll, StrideBeyond, Consecutive, Take, TakeAll, TakeRange,
+ * Reclaim
+ *
+ */
+class CvdIdAllocatorTest : public testing::Test {};
+
+}  // namespace cuttlefish
diff --git a/common/libs/utils/unix_sockets_test.cpp b/common/libs/utils/unix_sockets_test.cpp
index 4475064..245747f 100644
--- a/common/libs/utils/unix_sockets_test.cpp
+++ b/common/libs/utils/unix_sockets_test.cpp
@@ -44,10 +44,10 @@
 
   UnixSocketMessage message;
   auto control1 = ControlMessage::FromFileDescriptors({memfd1});
-  ASSERT_TRUE(control1.ok()) << control1.error();
+  ASSERT_TRUE(control1.ok()) << control1.error().Trace();
   message.control.emplace_back(std::move(*control1));
   auto control2 = ControlMessage::FromFileDescriptors({memfd2});
-  ASSERT_TRUE(control2.ok()) << control2.error();
+  ASSERT_TRUE(control2.ok()) << control2.error().Trace();
   message.control.emplace_back(std::move(*control2));
 
   ASSERT_TRUE(message.HasFileDescriptors());
@@ -67,10 +67,10 @@
   auto [writer, reader] = UnixMessageSocketPair();
   UnixSocketMessage message_in = {{1, 2, 3}, {}};
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_TRUE(write_result.ok()) << write_result.error();
+  ASSERT_TRUE(write_result.ok()) << write_result.error().Trace();
 
   auto message_out = reader.ReadMessage();
-  ASSERT_TRUE(message_out.ok()) << message_out.error();
+  ASSERT_TRUE(message_out.ok()) << message_out.error().Trace();
   ASSERT_EQ(message_in.data, message_out->data);
   ASSERT_EQ(0, message_out->control.size());
 }
@@ -81,18 +81,18 @@
   UnixSocketMessage message_in = {{4, 5, 6}, {}};
   auto control_in =
       ControlMessage::FromFileDescriptors({CreateMemFDWithData("abc")});
-  ASSERT_TRUE(control_in.ok()) << control_in.error();
+  ASSERT_TRUE(control_in.ok()) << control_in.error().Trace();
   message_in.control.emplace_back(std::move(*control_in));
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_TRUE(write_result.ok()) << write_result.error();
+  ASSERT_TRUE(write_result.ok()) << write_result.error().Trace();
 
   auto message_out = reader.ReadMessage();
-  ASSERT_TRUE(message_out.ok()) << message_out.error();
+  ASSERT_TRUE(message_out.ok()) << message_out.error().Trace();
   ASSERT_EQ(message_in.data, message_out->data);
 
   ASSERT_EQ(1, message_out->control.size());
   auto fds_out = message_out->control[0].AsSharedFDs();
-  ASSERT_TRUE(fds_out.ok()) << fds_out.error();
+  ASSERT_TRUE(fds_out.ok()) << fds_out.error().Trace();
   ASSERT_EQ(1, fds_out->size());
   ASSERT_EQ("abc", ReadAllFDData((*fds_out)[0]));
 }
@@ -104,18 +104,18 @@
   auto [writer, reader] = UnixMessageSocketPair();
   UnixSocketMessage message_in = {{7, 8, 9}, {}};
   auto control_in = ControlMessage::FromFileDescriptors({memfd1, memfd2});
-  ASSERT_TRUE(control_in.ok()) << control_in.error();
+  ASSERT_TRUE(control_in.ok()) << control_in.error().Trace();
   message_in.control.emplace_back(std::move(*control_in));
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_TRUE(write_result.ok()) << write_result.error();
+  ASSERT_TRUE(write_result.ok()) << write_result.error().Trace();
 
   auto message_out = reader.ReadMessage();
-  ASSERT_TRUE(message_out.ok()) << message_out.error();
+  ASSERT_TRUE(message_out.ok()) << message_out.error().Trace();
   ASSERT_EQ(message_in.data, message_out->data);
 
   ASSERT_EQ(1, message_out->control.size());
   auto fds_out = message_out->control[0].AsSharedFDs();
-  ASSERT_TRUE(fds_out.ok()) << fds_out.error();
+  ASSERT_TRUE(fds_out.ok()) << fds_out.error().Trace();
   ASSERT_EQ(2, fds_out->size());
 
   ASSERT_EQ("abc", ReadAllFDData((*fds_out)[0]));
@@ -125,9 +125,9 @@
 TEST(UnixMessageSocket, SendCredentials) {
   auto [writer, reader] = UnixMessageSocketPair();
   auto writer_creds_status = writer.EnableCredentials(true);
-  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error();
+  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error().Trace();
   auto reader_creds_status = reader.EnableCredentials(true);
-  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error();
+  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error().Trace();
 
   ucred credentials_in;
   credentials_in.pid = getpid();
@@ -137,15 +137,15 @@
   auto control_in = ControlMessage::FromCredentials(credentials_in);
   message_in.control.emplace_back(std::move(control_in));
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_TRUE(write_result.ok()) << write_result.error();
+  ASSERT_TRUE(write_result.ok()) << write_result.error().Trace();
 
   auto message_out = reader.ReadMessage();
-  ASSERT_TRUE(message_out.ok()) << message_out.error();
+  ASSERT_TRUE(message_out.ok()) << message_out.error().Trace();
   ASSERT_EQ(message_in.data, message_out->data);
 
   ASSERT_EQ(1, message_out->control.size());
   auto credentials_out = message_out->control[0].AsCredentials();
-  ASSERT_TRUE(credentials_out.ok()) << credentials_out.error();
+  ASSERT_TRUE(credentials_out.ok()) << credentials_out.error().Trace();
   ASSERT_EQ(credentials_in.pid, credentials_out->pid);
   ASSERT_EQ(credentials_in.uid, credentials_out->uid);
   ASSERT_EQ(credentials_in.gid, credentials_out->gid);
@@ -154,9 +154,9 @@
 TEST(UnixMessageSocket, BadCredentialsBlocked) {
   auto [writer, reader] = UnixMessageSocketPair();
   auto writer_creds_status = writer.EnableCredentials(true);
-  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error();
+  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error().Trace();
   auto reader_creds_status = reader.EnableCredentials(true);
-  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error();
+  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error().Trace();
 
   ucred credentials_in;
   // This assumes the test is running without root privileges
@@ -168,27 +168,27 @@
   auto control_in = ControlMessage::FromCredentials(credentials_in);
   message_in.control.emplace_back(std::move(control_in));
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_FALSE(write_result.ok()) << write_result.error();
+  ASSERT_FALSE(write_result.ok()) << write_result.error().Trace();
 }
 
 TEST(UnixMessageSocket, AutoCredentials) {
   auto [writer, reader] = UnixMessageSocketPair();
   auto writer_creds_status = writer.EnableCredentials(true);
-  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error();
+  ASSERT_TRUE(writer_creds_status.ok()) << writer_creds_status.error().Trace();
   auto reader_creds_status = reader.EnableCredentials(true);
-  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error();
+  ASSERT_TRUE(reader_creds_status.ok()) << reader_creds_status.error().Trace();
 
   UnixSocketMessage message_in = {{3, 6, 9}, {}};
   auto write_result = writer.WriteMessage(message_in);
-  ASSERT_TRUE(write_result.ok()) << write_result.error();
+  ASSERT_TRUE(write_result.ok()) << write_result.error().Trace();
 
   auto message_out = reader.ReadMessage();
-  ASSERT_TRUE(message_out.ok()) << message_out.error();
+  ASSERT_TRUE(message_out.ok()) << message_out.error().Trace();
   ASSERT_EQ(message_in.data, message_out->data);
 
   ASSERT_EQ(1, message_out->control.size());
   auto credentials_out = message_out->control[0].AsCredentials();
-  ASSERT_TRUE(credentials_out.ok()) << credentials_out.error();
+  ASSERT_TRUE(credentials_out.ok()) << credentials_out.error().Trace();
   ASSERT_EQ(getpid(), credentials_out->pid);
   ASSERT_EQ(getuid(), credentials_out->uid);
   ASSERT_EQ(getgid(), credentials_out->gid);
diff --git a/common/libs/utils/users.cpp b/common/libs/utils/users.cpp
index a3699de..e8ff32e 100644
--- a/common/libs/utils/users.cpp
+++ b/common/libs/utils/users.cpp
@@ -17,19 +17,44 @@
 #include "common/libs/utils/users.h"
 
 #include <grp.h>
+#include <pwd.h>
+#include <sys/types.h>
 #include <unistd.h>
 
 #include <algorithm>
 #include <cerrno>
+#include <cstdlib>
 #include <cstring>
+#include <mutex>
 #include <ostream>
 #include <string>
 #include <vector>
 
+#include <android-base/file.h>
 #include <android-base/logging.h>
 
+#include "common/libs/utils/contains.h"
+
 namespace cuttlefish {
 namespace {
+std::vector<gid_t> GetSuplementaryGroups() {
+  int num_groups = getgroups(0, nullptr);
+  if (num_groups < 0) {
+    LOG(ERROR) << "Unable to get number of suplementary groups: "
+               << std::strerror(errno);
+    return {};
+  }
+  std::vector<gid_t> groups(num_groups + 1);
+  int retval = getgroups(groups.size(), groups.data());
+  if (retval < 0) {
+    LOG(ERROR) << "Error obtaining list of suplementary groups (list size: "
+               << groups.size() << "): " << std::strerror(errno);
+    return {};
+  }
+  return groups;
+}
+}  // namespace
+
 gid_t GroupIdFromName(const std::string& group_name) {
   struct group grp{};
   struct group* grp_p{};
@@ -57,24 +82,6 @@
   }
 }
 
-std::vector<gid_t> GetSuplementaryGroups() {
-  int num_groups = getgroups(0, nullptr);
-  if (num_groups < 0) {
-    LOG(ERROR) << "Unable to get number of suplementary groups: "
-               << std::strerror(errno);
-    return {};
-  }
-  std::vector<gid_t> groups(num_groups + 1);
-  int retval = getgroups(groups.size(), groups.data());
-  if (retval < 0) {
-    LOG(ERROR) << "Error obtaining list of suplementary groups (list size: "
-               << groups.size() << "): " << std::strerror(errno);
-    return {};
-  }
-  return groups;
-}
-}  // namespace
-
 bool InGroup(const std::string& group) {
   auto gid = GroupIdFromName(group);
   if (gid == static_cast<gid_t>(-1)) {
@@ -86,11 +93,33 @@
   }
 
   auto groups = GetSuplementaryGroups();
+  return Contains(groups, gid);
+}
 
-  if (std::find(groups.cbegin(), groups.cend(), gid) != groups.cend()) {
-    return true;
+Result<std::string> SystemWideUserHome(const uid_t uid) {
+  // getpwuid() is not thread-safe, so we need a lock across all calls
+  static std::mutex getpwuid_mutex;
+  std::string home_dir;
+  {
+    std::lock_guard<std::mutex> lock(getpwuid_mutex);
+    const auto entry = getpwuid(uid);
+    if (entry) {
+      home_dir = entry->pw_dir;
+    }
+    endpwent();
+    if (home_dir.empty()) {
+      return CF_ERRNO("Failed to find the home directory using " << uid);
+    }
   }
-  return false;
+  std::string home_realpath;
+  if (!android::base::Realpath(home_dir, &home_realpath)) {
+    return CF_ERRNO("Failed to convert " << home_dir << " to its Realpath");
+  }
+  return home_realpath;
+}
+
+Result<std::string> SystemWideUserHome() {
+  return SystemWideUserHome(getuid());
 }
 
 } // namespace cuttlefish
diff --git a/common/libs/utils/users.h b/common/libs/utils/users.h
index 16fcbeb..6b439d5 100644
--- a/common/libs/utils/users.h
+++ b/common/libs/utils/users.h
@@ -15,10 +15,27 @@
  */
 #pragma once
 
+#include <grp.h>
+
 #include <string>
 
+#include "common/libs/utils/result.h"
+
 namespace cuttlefish {
 
+gid_t GroupIdFromName(const std::string& group_name);
 bool InGroup(const std::string& group);
 
+/**
+ * returns the user's home defined by the system
+ *
+ * This is done not by using HOME but by calling getpwuid()
+ */
+Result<std::string> SystemWideUserHome(const uid_t uid);
+
+/**
+ * returns SystemWideUserHome(getuid())
+ */
+Result<std::string> SystemWideUserHome();
+
 }  // namespace cuttlefish
diff --git a/common/libs/utils/vsock_connection.cpp b/common/libs/utils/vsock_connection.cpp
index fbdb02b..9151cf1 100644
--- a/common/libs/utils/vsock_connection.cpp
+++ b/common/libs/utils/vsock_connection.cpp
@@ -129,7 +129,7 @@
 Json::Value VsockConnection::ReadJsonMessage() {
   auto msg = ReadMessage();
   Json::CharReaderBuilder builder;
-  Json::CharReader* reader = builder.newCharReader();
+  std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
   Json::Value json_msg;
   std::string errors;
   if (!reader->parse(msg.data(), msg.data() + msg.size(), &json_msg, &errors)) {
diff --git a/default-permissions.xml b/default-permissions.xml
index 48b0f17..6a7c91b 100644
--- a/default-permissions.xml
+++ b/default-permissions.xml
@@ -54,8 +54,6 @@
         <permission name="android.permission.READ_CALL_LOG" fixed="false"/>
         <permission name="android.permission.WRITE_CALL_LOG" fixed="false"/>
         <!-- Used to set up a Wi-Fi P2P network -->
-        <!-- TODO(b/231966826): Remove the location permission after Restore targets to T. -->
-        <permission name="android.permission.ACCESS_FINE_LOCATION" fixed="false"/>
         <permission name="android.permission.NEARBY_WIFI_DEVICES" fixed="false"/>
         <!-- Notifications -->
         <permission name="android.permission.POST_NOTIFICATIONS" fixed="false"/>
@@ -133,4 +131,9 @@
         <!-- Notifications -->
         <permission name="android.permission.POST_NOTIFICATIONS" fixed="false"/>
     </exception>
+    <exception
+        package="com.google.android.euicc">
+        <!-- Notifications -->
+        <permission name="android.permission.POST_NOTIFICATIONS" fixed="false"/>
+    </exception>
 </exceptions>
diff --git a/fetcher.mk b/fetcher.mk
index e00b1df..724b2b6 100644
--- a/fetcher.mk
+++ b/fetcher.mk
@@ -1,12 +1,21 @@
-bin_path := $(notdir $(HOST_OUT_EXECUTABLES))
+bin_path := $(HOST_OUT_EXECUTABLES)
+ifeq ($(HOST_CROSS_OS)_$(HOST_CROSS_ARCH),linux_bionic_arm64)
+  bin_path := $(OUT_DIR)/host/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)/bin
+endif
 
-fetcher_bin := $(HOST_OUT)/$(bin_path)/fetch_cvd
+cvd_bin := $(bin_path)/cvd
+fetcher_bin := $(bin_path)/fetch_cvd
 
 .PHONY: host_fetcher
 host_fetcher: $(fetcher_bin)
 
 # Build this by default when a developer types make
-droidcore: $(fetcher_bin)
+droidcore: $(cvd_bin) $(fetcher_bin)
 
 # Build and store them on the build server.
+$(call dist-for-goals, dist_files, $(cvd_bin))
 $(call dist-for-goals, dist_files, $(fetcher_bin))
+
+bin_path :=
+cvd_bin :=
+fetcher_bin :=
diff --git a/guest/commands/bt_vhci_forwarder/Android.bp b/guest/commands/bt_vhci_forwarder/Android.bp
index a9f63b1..03b7a05 100644
--- a/guest/commands/bt_vhci_forwarder/Android.bp
+++ b/guest/commands/bt_vhci_forwarder/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/commands/bt_vhci_forwarder/bt_vhci_forwarder.rc b/guest/commands/bt_vhci_forwarder/bt_vhci_forwarder.rc
index 8b7fb36..56fc58d 100644
--- a/guest/commands/bt_vhci_forwarder/bt_vhci_forwarder.rc
+++ b/guest/commands/bt_vhci_forwarder/bt_vhci_forwarder.rc
@@ -1,4 +1,4 @@
-on post-fs
+on property:vendor.dlkm.modules.ready=true
     start bt_vhci_forwarder
 
 service bt_vhci_forwarder /vendor/bin/bt_vhci_forwarder -virtio_console_dev=${vendor.ser.bt-uart}
diff --git a/guest/commands/bt_vhci_forwarder/main.cpp b/guest/commands/bt_vhci_forwarder/main.cpp
index 2f1aab2..faf6b55 100644
--- a/guest/commands/bt_vhci_forwarder/main.cpp
+++ b/guest/commands/bt_vhci_forwarder/main.cpp
@@ -98,8 +98,17 @@
   gflags::ParseCommandLineFlags(&argc, &argv, true);
 
   int vhci_fd = open(kVhciDev, O_RDWR);
+  if (vhci_fd < 0) {
+    PLOG(ERROR) << "Unable to open " << kVhciDev;
+  }
   int virtio_fd = open(FLAGS_virtio_console_dev.c_str(), O_RDWR);
-  setTerminalRaw(virtio_fd);
+  if (virtio_fd < 0) {
+    PLOG(ERROR) << "Unable to open " << FLAGS_virtio_console_dev;
+  }
+  int set_result = setTerminalRaw(virtio_fd);
+  if (set_result < 0) {
+    PLOG(ERROR) << "setTerminalRaw failed " << FLAGS_virtio_console_dev;
+  }
 
   struct pollfd fds[2];
 
@@ -128,6 +137,9 @@
         send(vhci_fd, HCI_ISODATA_PKT, raw_iso.data(), raw_iso.size());
       },
       []() { LOG(INFO) << "HCI socket device disconnected"; });
+
+  bool before_first_command = true;
+
   while (true) {
     int ret = TEMP_FAILURE_RETRY(poll(fds, 2, -1));
     if (ret < 0) {
@@ -141,6 +153,7 @@
       if (c < 0) {
         PLOG(ERROR) << "vhci to virtio-console failed";
       }
+      before_first_command = false;
     }
     if (fds[1].revents & POLLHUP) {
       LOG(ERROR) << "PollHUP";
@@ -148,6 +161,16 @@
       continue;
     }
     if (fds[1].revents & (POLLIN | POLLERR)) {
+      if (before_first_command) {
+        // Drop any data left in the virtio-console from a previous reset.
+        ssize_t bytes = TEMP_FAILURE_RETRY(read(virtio_fd, buf, kBufferSize));
+        if (bytes < 0) {
+          LOG(ERROR) << "virtio_fd ready, but read failed " << strerror(errno);
+        } else {
+          LOG(INFO) << "Discarding " << bytes << " bytes from virtio_fd.";
+        }
+        continue;
+      }
       // 'virtio-console to vhci' depends on H4Packetizer because vhci expects
       // full packet, but the data from virtio-console could be partial.
       h4.OnDataReady(virtio_fd);
diff --git a/guest/commands/dlkm_loader/dlkm_loader.cpp b/guest/commands/dlkm_loader/dlkm_loader.cpp
index 0d22225..95e58a3 100644
--- a/guest/commands/dlkm_loader/dlkm_loader.cpp
+++ b/guest/commands/dlkm_loader/dlkm_loader.cpp
@@ -16,11 +16,22 @@
 
 #include <android-base/logging.h>
 #include <modprobe/modprobe.h>
+#include "android-base/properties.h"
 
-int main(void) {
+int main(int, char **argv) {
+  android::base::InitLogging(argv, android::base::KernelLogger);
   LOG(INFO) << "dlkm loader successfully initialized";
   Modprobe m({"/vendor/lib/modules"}, "modules.load");
-  CHECK(m.LoadListedModules(true)) << "modules from vendor dlkm weren't loaded correctly";
+  // We should continue loading kernel modules even if some modules fail to
+  // load. If we abort loading early, the unloaded modules can cause more
+  // problems, making debugging hard.
+  // e.g. , bluetooth module break, but we
+  // might also see graphics problems, because graphics module gets loaded
+  // after bluetooth, and we aborted loading early.
+  CHECK(m.LoadListedModules(false))
+      << "modules from vendor dlkm weren't loaded correctly";
   LOG(INFO) << "module load count is " << m.GetModuleCount();
+
+  android::base::SetProperty("vendor.dlkm.modules.ready", "true");
   return 0;
 }
diff --git a/guest/commands/rename_netiface/rename_netiface.rc b/guest/commands/rename_netiface/rename_netiface.rc
index 5117320..167308a 100644
--- a/guest/commands/rename_netiface/rename_netiface.rc
+++ b/guest/commands/rename_netiface/rename_netiface.rc
@@ -1,2 +1,3 @@
 service rename_eth0 /vendor/bin/rename_netiface eth0 buried_eth0
+    user root
     oneshot
diff --git a/guest/commands/sensor_injection/Android.bp b/guest/commands/sensor_injection/Android.bp
index 665b116..7444137 100644
--- a/guest/commands/sensor_injection/Android.bp
+++ b/guest/commands/sensor_injection/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/commands/sensor_injection/main.cpp b/guest/commands/sensor_injection/main.cpp
index a78de33..1cf1c1a 100644
--- a/guest/commands/sensor_injection/main.cpp
+++ b/guest/commands/sensor_injection/main.cpp
@@ -14,13 +14,15 @@
  * limitations under the License.
  */
 
+#include <cmath>
+#include <thread>
+
 #include <android-base/chrono_utils.h>
 #include <android-base/logging.h>
 #include <android/binder_manager.h>
+#include <android-base/parseint.h>
 #include <utils/SystemClock.h>
 
-#include <thread>
-
 #include <aidl/android/hardware/sensors/BnSensors.h>
 
 using aidl::android::hardware::sensors::Event;
@@ -32,18 +34,15 @@
 std::shared_ptr<ISensors> startSensorInjection() {
   auto sensors = ISensors::fromBinder(ndk::SpAIBinder(
       AServiceManager_getService("android.hardware.sensors.ISensors/default")));
-  if (sensors == nullptr) {
-    LOG(FATAL) << "Unable to get ISensors.";
-  }
+  CHECK(sensors != nullptr) << "Unable to get ISensors.";
 
   // Place the ISensors HAL into DATA_INJECTION mode so that we can
   // inject events.
   auto result =
       sensors->setOperationMode(ISensors::OperationMode::DATA_INJECTION);
-  if (!result.isOk()) {
-    LOG(FATAL) << "Unable to set ISensors operation mode to DATA_INJECTION: "
-               << result.getDescription();
-  }
+  CHECK(result.isOk())
+      << "Unable to set ISensors operation mode to DATA_INJECTION: "
+      << result.getDescription();
 
   return sensors;
 }
@@ -53,34 +52,29 @@
   int handle = -1;
   std::vector<SensorInfo> sensors_list;
   auto result = sensors->getSensorsList(&sensors_list);
-  if (!result.isOk()) {
-    LOG(FATAL) << "Unable to get ISensors sensors list: "
-               << result.getDescription();
-  }
+  CHECK(result.isOk()) << "Unable to get ISensors sensors list: "
+                        << result.getDescription();
   for (const SensorInfo& sensor : sensors_list) {
     if (sensor.type == type) {
       handle = sensor.sensorHandle;
       break;
     }
   }
-  if (handle == -1) {
-    LOG(FATAL) << "Unable to find sensor.";
-  }
+  CHECK(handle != -1) << "Unable to find sensor.";
   return handle;
 }
 
 void endSensorInjection(const std::shared_ptr<ISensors> sensors) {
   // Return the ISensors HAL back to NORMAL mode.
   auto result = sensors->setOperationMode(ISensors::OperationMode::NORMAL);
-  if (!result.isOk()) {
-    LOG(FATAL) << "Unable to set sensors operation mode to NORMAL: "
-               << result.getDescription();
-  }
+  CHECK(result.isOk()) << "Unable to set sensors operation mode to NORMAL: "
+                       << result.getDescription();
 }
 
 // Inject ACCELEROMETER events to corresponding to a given physical
-// device orientation: portrait or landscape.
-void InjectOrientation(bool portrait) {
+// device position.
+void InjectOrientation(int rotationDeg) {
+  auto rad = M_PI * rotationDeg / 180.0;
   auto sensors = startSensorInjection();
   int handle = getSensorHandle(SensorType::ACCELEROMETER, sensors);
 
@@ -89,13 +83,11 @@
   event.sensorHandle = handle;
   event.sensorType = SensorType::ACCELEROMETER;
   Event::EventPayload::Vec3 vec3;
-  if (portrait) {
-    vec3.x = 0;
-    vec3.y = 9.2;
-  } else {
-    vec3.x = 9.2;
-    vec3.y = 0;
-  }
+  // (x^2 + y^2 + z^2)^1/2 = ~9.8 = 1G
+  vec3.x = 9.2 * std::sin(rad);
+  vec3.y = 9.2 * std::cos(rad);
+  // z is fixed at 3.5, meaning the device is positioned vertically with a
+  // slight inclination backwards.
   vec3.z = 3.5;
   vec3.status = SensorStatus::ACCURACY_HIGH;
   event.payload.set<Event::EventPayload::Tag::vec3>(vec3);
@@ -106,10 +98,8 @@
   while (timer.duration() < 1s) {
     event.timestamp = android::elapsedRealtimeNano();
     auto result = sensors->injectSensorData(event);
-    if (!result.isOk()) {
-      LOG(FATAL) << "Unable to inject ISensors accelerometer event: "
-                 << result.getDescription();
-    }
+    CHECK(result.isOk()) << "Unable to inject ISensors accelerometer event: "
+                         << result.getDescription();
     std::this_thread::sleep_for(10ms);
   }
 
@@ -129,35 +119,26 @@
   event.timestamp = android::elapsedRealtimeNano();
 
   auto result = sensors->injectSensorData(event);
-  if (!result.isOk()) {
-    LOG(FATAL) << "Unable to inject HINGE_ANGLE data"
-               << result.getDescription();
-  }
+  CHECK(result.isOk()) << "Unable to inject HINGE_ANGLE data"
+                       << result.getDescription();
 
   endSensorInjection(sensors);
 }
 
 int main(int argc, char** argv) {
-  if (argc == 2) {
-    LOG(FATAL) << "Expected command line args 'rotate <portrait|landscape>' or "
-                  "'hinge_angle <value>'";
-  }
+  CHECK(argc == 3)
+      << "Expected command line args 'rotate <angle>' or 'hinge_angle <value>'";
 
   if (!strcmp(argv[1], "rotate")) {
-    bool portrait = true;
-    if (!strcmp(argv[2], "portrait")) {
-      portrait = true;
-    } else if (!strcmp(argv[2], "landscape")) {
-      portrait = false;
-    } else {
-      LOG(FATAL) << "Expected command line arg 'portrait' or 'landscape'";
-    }
-    InjectOrientation(portrait);
+    int rotationDeg;
+    CHECK(android::base::ParseInt(argv[2], &rotationDeg))
+        << "Rotation angle must be an integer";
+    InjectOrientation(rotationDeg);
   } else if (!strcmp(argv[1], "hinge_angle")) {
-    int angle = std::stoi(argv[2]);
-    if (angle < 0 || angle > 360) {
-      LOG(FATAL) << "Bad hinge_angle value: " << argv[2];
-    }
+    int angle;
+    CHECK(android::base::ParseInt(argv[2], &angle))
+        << "Hinge angle must be an integer";
+    CHECK(angle >= 0 && angle <= 360) << "Bad hinge_angle value: " << argv[2];
     InjectHingeAngle(angle);
   } else {
     LOG(FATAL) << "Unknown arg: " << argv[1];
diff --git a/guest/commands/setup_wifi/main.cpp b/guest/commands/setup_wifi/main.cpp
index 4a91442..2727fb4 100644
--- a/guest/commands/setup_wifi/main.cpp
+++ b/guest/commands/setup_wifi/main.cpp
@@ -33,6 +33,7 @@
 #include "common/libs/net/network_interface_manager.h"
 
 DEFINE_string(mac_prefix, "", "mac prefix to use for wlan0");
+DEFINE_string(interface, "eth2", "interface to create wlan wrapper on");
 
 static std::array<unsigned char, 6> prefix_to_mac(
     const std::string& mac_prefix) {
@@ -138,9 +139,9 @@
 
   gflags::ParseCommandLineFlags(&argc, &argv, true);
 
-  int renamed_eth2 = RenameNetwork("eth2", "buried_eth2");
-  if (renamed_eth2 != 0) {
-    return renamed_eth2;
+  int renamed_if = RenameNetwork(FLAGS_interface, "buried_" + FLAGS_interface);
+  if (renamed_if != 0) {
+    return renamed_if;
   }
-  return CreateWifiWrapper("buried_eth2", "wlan0");
+  return CreateWifiWrapper("buried_" + FLAGS_interface, "wlan0");
 }
diff --git a/guest/commands/setup_wifi/setup_wifi.rc b/guest/commands/setup_wifi/setup_wifi.rc
index a2f1eb5..c6e2785 100644
--- a/guest/commands/setup_wifi/setup_wifi.rc
+++ b/guest/commands/setup_wifi/setup_wifi.rc
@@ -1,2 +1,3 @@
-service setup_wifi /vendor/bin/setup_wifi
+service setup_wifi /vendor/bin/setup_wifi --interface=${ro.vendor.virtwifi.port}
     oneshot
+    user root
diff --git a/guest/hals/audio/effects/manifest.xml b/guest/hals/audio/effects/manifest.xml
new file mode 100644
index 0000000..6b4e832
--- /dev/null
+++ b/guest/hals/audio/effects/manifest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+/*
+** Copyright 2022, The Android Open Source Project.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** limitations under the License.
+*/
+-->
+
+<manifest version="1.0" type="device">
+    <hal format="hidl">
+        <name>android.hardware.audio.effect</name>
+        <transport>hwbinder</transport>
+        <version>7.0</version>
+        <interface>
+            <name>IEffectsFactory</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/guest/hals/bt/data/Android.bp b/guest/hals/bt/data/Android.bp
index 845223f..2ad6085 100644
--- a/guest/hals/bt/data/Android.bp
+++ b/guest/hals/bt/data/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/hals/bt/remote/Android.bp b/guest/hals/bt/remote/Android.bp
deleted file mode 100644
index 512bee5..0000000
--- a/guest/hals/bt/remote/Android.bp
+++ /dev/null
@@ -1,31 +0,0 @@
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-cc_binary {
-    name: "android.hardware.bluetooth@1.1-service.remote",
-    defaults: ["cuttlefish_guest_only", "hidl_defaults"],
-    vendor: true,
-    relative_install_path: "hw",
-    srcs: [
-        "remote_bluetooth.cpp",
-        "service.cpp",
-    ],
-
-    shared_libs: [
-        "android.hardware.bluetooth@1.0",
-        "android.hardware.bluetooth@1.1",
-        "libcuttlefish_fs",
-        "libbase",
-        "libhidlbase",
-        "libutils",
-        "liblog",
-        "libcutils",
-        "libprotobuf-cpp-lite",
-    ],
-    static_libs: [
-        "libbt-rootcanal",
-        "async_fd_watcher",
-    ],
-    init_rc: ["android.hardware.bluetooth@1.1-service.remote.rc"],
-}
diff --git a/guest/hals/bt/remote/android.hardware.bluetooth@1.1-service.remote.rc b/guest/hals/bt/remote/android.hardware.bluetooth@1.1-service.remote.rc
deleted file mode 100644
index f4ed9e24..0000000
--- a/guest/hals/bt/remote/android.hardware.bluetooth@1.1-service.remote.rc
+++ /dev/null
@@ -1,4 +0,0 @@
-service vendor.bluetooth-1-1 /vendor/bin/hw/android.hardware.bluetooth@1.1-service.remote
-    class hal
-    user bluetooth
-    group bluetooth
diff --git a/guest/hals/bt/remote/remote_bluetooth.cpp b/guest/hals/bt/remote/remote_bluetooth.cpp
deleted file mode 100644
index d041af7..0000000
--- a/guest/hals/bt/remote/remote_bluetooth.cpp
+++ /dev/null
@@ -1,195 +0,0 @@
-//
-// Copyright 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#define LOG_TAG "android.hardware.bluetooth@1.1.remote"
-
-#include "remote_bluetooth.h"
-
-#include <cutils/properties.h>
-#include <fcntl.h>
-#include <netdb.h>
-#include <netinet/in.h>
-#include <poll.h>
-#include <string.h>
-#include <sys/uio.h>
-#include <termios.h>
-#include <utils/Log.h>
-#include "log/log.h"
-
-namespace {
-int SetTerminalRaw(int fd) {
-  termios terminal_settings;
-  int rval = tcgetattr(fd, &terminal_settings);
-  if (rval < 0) {
-    return rval;
-  }
-  cfmakeraw(&terminal_settings);
-  rval = tcsetattr(fd, TCSANOW, &terminal_settings);
-  return rval;
-}
-}  // namespace
-
-namespace android {
-namespace hardware {
-namespace bluetooth {
-namespace V1_1 {
-namespace remote {
-
-using ::android::hardware::hidl_vec;
-
-class BluetoothDeathRecipient : public hidl_death_recipient {
- public:
-  BluetoothDeathRecipient(const sp<IBluetoothHci> hci) : mHci(hci) {}
-
-  void serviceDied(
-      uint64_t /* cookie */,
-      const wp<::android::hidl::base::V1_0::IBase>& /* who */) override {
-    LOG(ERROR)
-        << "BluetoothDeathRecipient::serviceDied - Bluetooth service died";
-    has_died_ = true;
-    mHci->close();
-  }
-  sp<IBluetoothHci> mHci;
-  bool getHasDied() const { return has_died_; }
-  void setHasDied(bool has_died) { has_died_ = has_died; }
-
- private:
-  bool has_died_;
-};
-
-BluetoothHci::BluetoothHci(const std::string& dev_path)
-    : death_recipient_(new BluetoothDeathRecipient(this)),
-      dev_path_(dev_path) {}
-
-Return<void> BluetoothHci::initialize(
-    const sp<V1_0::IBluetoothHciCallbacks>& cb) {
-  return initialize_impl(cb, nullptr);
-}
-
-Return<void> BluetoothHci::initialize_1_1(
-    const sp<V1_1::IBluetoothHciCallbacks>& cb) {
-  return initialize_impl(cb, cb);
-}
-
-Return<void> BluetoothHci::initialize_impl(
-    const sp<V1_0::IBluetoothHciCallbacks>& cb,
-    const sp<V1_1::IBluetoothHciCallbacks>& cb_1_1) {
-  LOG(INFO) << __func__;
-
-  cb_ = cb;
-  cb_1_1_ = cb_1_1;
-  fd_ = open(dev_path_.c_str(), O_RDWR);
-  if (fd_ < 0) {
-    LOG(FATAL) << "Could not connect to bt: " << fd_;
-  }
-  if (int ret = SetTerminalRaw(fd_) < 0) {
-    LOG(FATAL) << "Could not make " << fd_ << " a raw terminal: " << ret;
-  }
-
-  if (cb == nullptr) {
-    LOG(ERROR)
-        << "cb == nullptr! -> Unable to call initializationComplete(ERR)";
-    return Void();
-  }
-
-  death_recipient_->setHasDied(false);
-  auto link_ret = cb->linkToDeath(death_recipient_, 0);
-
-  unlink_cb_ = [this, cb](sp<BluetoothDeathRecipient>& death_recipient) {
-    if (death_recipient->getHasDied())
-      LOG(INFO) << "Skipping unlink call, service died.";
-    else {
-      auto ret = cb->unlinkToDeath(death_recipient);
-      if (!ret.isOk()) {
-        CHECK(death_recipient_->getHasDied())
-            << "Error calling unlink, but no death notification.";
-      }
-    }
-  };
-
-  auto init_ret = cb->initializationComplete(V1_0::Status::SUCCESS);
-  if (!init_ret.isOk()) {
-    CHECK(death_recipient_->getHasDied())
-        << "Error sending init callback, but no death notification.";
-  }
-  h4_ = rootcanal::H4Packetizer(
-      fd_,
-      [](const std::vector<uint8_t>& /* raw_command */) {
-        LOG_ALWAYS_FATAL("Unexpected command!");
-      },
-      [this](const std::vector<uint8_t>& raw_event) {
-        cb_->hciEventReceived(hidl_vec<uint8_t>(raw_event));
-      },
-      [this](const std::vector<uint8_t>& raw_acl) {
-        cb_->hciEventReceived(hidl_vec<uint8_t>(raw_acl));
-      },
-      [this](const std::vector<uint8_t>& raw_sco) {
-        cb_->hciEventReceived(hidl_vec<uint8_t>(raw_sco));
-      },
-      [this](const std::vector<uint8_t>& raw_iso) {
-        if (cb_1_1_) {
-          cb_1_1_->hciEventReceived(hidl_vec<uint8_t>(raw_iso));
-        }
-      },
-      []() { LOG(INFO) << "HCI socket device disconnected"; });
-  fd_watcher_.WatchFdForNonBlockingReads(
-      fd_, [this](int fd) { h4_.OnDataReady(fd); });
-  return Void();
-}
-
-Return<void> BluetoothHci::close() {
-  LOG(INFO) << __func__;
-  fd_watcher_.StopWatchingFileDescriptors();
-  ::close(fd_);
-
-  return Void();
-}
-
-Return<void> BluetoothHci::sendHciCommand(const hidl_vec<uint8_t>& packet) {
-  send(rootcanal::PacketType::COMMAND, packet);
-  return Void();
-}
-
-Return<void> BluetoothHci::sendAclData(const hidl_vec<uint8_t>& packet) {
-  send(rootcanal::PacketType::ACL, packet);
-  return Void();
-}
-
-Return<void> BluetoothHci::sendScoData(const hidl_vec<uint8_t>& packet) {
-  send(rootcanal::PacketType::SCO, packet);
-  return Void();
-}
-
-Return<void> BluetoothHci::sendIsoData(const hidl_vec<uint8_t>& packet) {
-  send(rootcanal::PacketType::ISO, packet);
-  return Void();
-}
-
-void BluetoothHci::send(rootcanal::PacketType type,
-                        const ::android::hardware::hidl_vec<uint8_t>& v) {
-  h4_.Send(static_cast<uint8_t>(type), v.data(), v.size());
-}
-
-/* Fallback to shared library if there is no service. */
-IBluetoothHci* HIDL_FETCH_IBluetoothHci(const char* /* name */) {
-  return new BluetoothHci();
-}
-
-}  // namespace remote
-}  // namespace V1_1
-}  // namespace bluetooth
-}  // namespace hardware
-}  // namespace android
diff --git a/guest/hals/bt/remote/remote_bluetooth.h b/guest/hals/bt/remote/remote_bluetooth.h
deleted file mode 100644
index ade39d0..0000000
--- a/guest/hals/bt/remote/remote_bluetooth.h
+++ /dev/null
@@ -1,101 +0,0 @@
-//
-// Copyright 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#pragma once
-
-#include <android/hardware/bluetooth/1.1/IBluetoothHci.h>
-
-#include <android-base/logging.h>
-#include <hidl/MQDescriptor.h>
-#include <string>
-#include "async_fd_watcher.h"
-#include "model/hci/h4_packetizer.h"
-
-namespace android {
-namespace hardware {
-namespace bluetooth {
-namespace V1_1 {
-namespace remote {
-
-class BluetoothDeathRecipient;
-
-// This Bluetooth HAL implementation is connected with the root-canal process in
-// the host side via virtio-console device(refer to dev_path_). It receives and
-// deliver responses and requests from/to Bluetooth HAL.
-class BluetoothHci : public IBluetoothHci {
- public:
-  // virtio-console device connected with root-canal in the host side.
-  BluetoothHci(const std::string& dev_path = "/dev/hvc5");
-
-  ::android::hardware::Return<void> initialize(
-      const sp<V1_0::IBluetoothHciCallbacks>& cb) override;
-  ::android::hardware::Return<void> initialize_1_1(
-      const sp<V1_1::IBluetoothHciCallbacks>& cb) override;
-
-  ::android::hardware::Return<void> sendHciCommand(
-      const ::android::hardware::hidl_vec<uint8_t>& packet) override;
-
-  ::android::hardware::Return<void> sendAclData(
-      const ::android::hardware::hidl_vec<uint8_t>& packet) override;
-
-  ::android::hardware::Return<void> sendScoData(
-      const ::android::hardware::hidl_vec<uint8_t>& packet) override;
-
-  ::android::hardware::Return<void> sendIsoData(
-      const ::android::hardware::hidl_vec<uint8_t>& packet) override;
-
-  ::android::hardware::Return<void> close() override;
-
-  static void OnPacketReady();
-
-  static BluetoothHci* get();
-
- private:
-  int fd_{-1};
-  ::android::sp<V1_0::IBluetoothHciCallbacks> cb_ = nullptr;
-  ::android::sp<V1_1::IBluetoothHciCallbacks> cb_1_1_ = nullptr;
-
-  rootcanal::H4Packetizer h4_{fd_,
-                              [](const std::vector<uint8_t>&) {},
-                              [](const std::vector<uint8_t>&) {},
-                              [](const std::vector<uint8_t>&) {},
-                              [](const std::vector<uint8_t>&) {},
-                              [](const std::vector<uint8_t>&) {},
-                              [] {}};
-
-  ::android::hardware::Return<void> initialize_impl(
-      const sp<V1_0::IBluetoothHciCallbacks>& cb,
-      const sp<V1_1::IBluetoothHciCallbacks>& cb_1_1);
-
-  sp<BluetoothDeathRecipient> death_recipient_;
-
-  const std::string dev_path_;
-
-  std::function<void(sp<BluetoothDeathRecipient>&)> unlink_cb_;
-
-  ::android::hardware::bluetooth::async::AsyncFdWatcher fd_watcher_;
-
-  void send(rootcanal::PacketType type,
-            const ::android::hardware::hidl_vec<uint8_t>& packet);
-};
-
-extern "C" IBluetoothHci* HIDL_FETCH_IBluetoothHci(const char* name);
-
-}  // namespace remote
-}  // namespace V1_1
-}  // namespace bluetooth
-}  // namespace hardware
-}  // namespace android
diff --git a/guest/hals/bt/remote/service.cpp b/guest/hals/bt/remote/service.cpp
deleted file mode 100644
index 10ed3bc..0000000
--- a/guest/hals/bt/remote/service.cpp
+++ /dev/null
@@ -1,39 +0,0 @@
-//
-// Copyright 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#define LOG_TAG "android.hardware.bluetooth@1.1-service.remote"
-
-#include <android/hardware/bluetooth/1.1/IBluetoothHci.h>
-#include <hidl/HidlSupport.h>
-#include <hidl/HidlTransportSupport.h>
-
-#include "remote_bluetooth.h"
-
-using ::android::sp;
-using ::android::hardware::configureRpcThreadpool;
-using ::android::hardware::joinRpcThreadpool;
-using ::android::hardware::bluetooth::V1_1::IBluetoothHci;
-using ::android::hardware::bluetooth::V1_1::remote::BluetoothHci;
-
-int main(int /* argc */, char** /* argv */) {
-  sp<IBluetoothHci> bluetooth = new BluetoothHci();
-  configureRpcThreadpool(1, true);
-  android::status_t status = bluetooth->registerAsService();
-  if (status == android::OK)
-    joinRpcThreadpool();
-  else
-    LOG(ERROR) << "Could not register as a service!";
-}
diff --git a/guest/hals/camera/Android.bp b/guest/hals/camera/Android.bp
index 7af1542..ce4c46c 100644
--- a/guest/hals/camera/Android.bp
+++ b/guest/hals/camera/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/hals/camera/vsock_camera_device_session_3_4.cpp b/guest/hals/camera/vsock_camera_device_session_3_4.cpp
index 9c0b597..2fabc70 100644
--- a/guest/hals/camera/vsock_camera_device_session_3_4.cpp
+++ b/guest/hals/camera/vsock_camera_device_session_3_4.cpp
@@ -341,6 +341,7 @@
   }
 
   std::vector<uint64_t> buffer_ids;
+  buffer_ids.reserve(request.outputBuffers.size());
   for (size_t i = 0; i < request.outputBuffers.size(); i++) {
     buffer_cache_.update(request.outputBuffers[i]);
     buffer_ids.emplace_back(request.outputBuffers[i].bufferId);
diff --git a/guest/hals/confirmationui/Android.bp b/guest/hals/confirmationui/Android.bp
index cb9318a..62e0422 100644
--- a/guest/hals/confirmationui/Android.bp
+++ b/guest/hals/confirmationui/Android.bp
@@ -24,15 +24,16 @@
 }
 
 cc_binary {
-    name: "android.hardware.confirmationui@1.0-service.cuttlefish",
-    defaults: ["hidl_defaults", "cuttlefish_guest_only"],
+    name: "android.hardware.confirmationui-service.cuttlefish",
+    defaults: ["cuttlefish_guest_only"],
     relative_install_path: "hw",
     vendor: true,
     shared_libs: [
-        "android.hardware.confirmationui@1.0",
-        "android.hardware.confirmationui@1.0-lib.cuttlefish",
+        "android.hardware.confirmationui-V1-ndk",
+        "android.hardware.confirmationui-lib.cuttlefish",
         "libbase",
-        "libhidlbase",
+        "libbinder_ndk",
+        "libteeui_hal_support",
         "libutils",
     ],
     static_libs: [
@@ -55,16 +56,18 @@
 }
 
 cc_library {
-    name: "android.hardware.confirmationui@1.0-lib.cuttlefish",
-    defaults: ["hidl_defaults", "cuttlefish_guest_only"],
+    name: "android.hardware.confirmationui-lib.cuttlefish",
+    defaults: [
+        "cuttlefish_guest_only",
+        "keymint_use_latest_hal_aidl_ndk_shared",
+    ],
     vendor: true,
     shared_libs: [
-        "android.hardware.confirmationui@1.0",
-        "android.hardware.keymaster@4.0",
+        "android.hardware.confirmationui-V1-ndk",
         "libbase",
+        "libbinder_ndk",
         "libcutils",
         "libdmabufheap",
-        "libhidlbase",
         "libteeui_hal_support",
         "libtrusty",
         "libutils",
@@ -86,4 +89,3 @@
         "-DTEEUI_USE_STD_VECTOR",
     ],
 }
-
diff --git a/guest/hals/confirmationui/TrustyConfirmationUI.cpp b/guest/hals/confirmationui/TrustyConfirmationUI.cpp
index 7854332..827bbd6 100644
--- a/guest/hals/confirmationui/TrustyConfirmationUI.cpp
+++ b/guest/hals/confirmationui/TrustyConfirmationUI.cpp
@@ -17,38 +17,33 @@
 
 #include "TrustyConfirmationUI.h"
 
+#include <android/binder_manager.h>
 #include <cutils/properties.h>
 
-namespace android {
-namespace hardware {
-namespace confirmationui {
-namespace V1_0 {
-namespace implementation {
-
+namespace aidl::android::hardware::confirmationui {
 using ::teeui::MsgString;
 using ::teeui::MsgVector;
-using ::android::hardware::keymaster::V4_0::HardwareAuthToken;
 using TeeuiRc = ::teeui::ResponseCode;
 
 namespace {
 teeui::UIOption convertUIOption(UIOption uio) {
-    static_assert(uint32_t(UIOption::AccessibilityInverted) ==
+    static_assert(uint32_t(UIOption::ACCESSIBILITY_INVERTED) ==
                           uint32_t(teeui::UIOption::AccessibilityInverted) &&
-                      uint32_t(UIOption::AccessibilityMagnified) ==
+                      uint32_t(UIOption::ACCESSIBILITY_MAGNIFIED) ==
                           uint32_t(teeui::UIOption::AccessibilityMagnified),
                   "teeui::UIOPtion and ::android::hardware::confirmationui::V1_0::UIOption "
                   "are out of sync");
     return teeui::UIOption(uio);
 }
 
-inline MsgString hidl2MsgString(const hidl_string& s) {
+inline MsgString str2MsgString(const string& s) {
     return {s.c_str(), s.c_str() + s.size()};
 }
-template <typename T> inline MsgVector<T> hidl2MsgVector(const hidl_vec<T>& v) {
+template <typename T> inline MsgVector<T> vec2MsgVector(const vector<T>& v) {
     return {v};
 }
 
-inline MsgVector<teeui::UIOption> hidl2MsgVector(const hidl_vec<UIOption>& v) {
+inline MsgVector<teeui::UIOption> vec2MsgVector(const vector<UIOption>& v) {
     MsgVector<teeui::UIOption> result(v.size());
     for (unsigned int i = 0; i < v.size(); ++i) {
         result[i] = convertUIOption(v[i]);
@@ -57,28 +52,25 @@
 }
 }  // namespace
 
-cuttlefish::SharedFD TrustyConfirmationUI::ConnectToHost() {
-    using namespace std::chrono_literals;
-    while (true) {
-        auto host_fd = cuttlefish::SharedFD::VsockClient(2, host_vsock_port_, SOCK_STREAM);
-        if (host_fd->IsOpen()) {
-            ConfUiLog(INFO) << "Client connection is established";
-            return host_fd;
-        }
-        ConfUiLog(INFO) << "host service is not on. Sleep for 500 ms";
-        std::this_thread::sleep_for(500ms);
-    }
+const char* TrustyConfirmationUI::GetVirtioConsoleDevicePath() {
+    static char device_path[] = "/dev/hvc8";
+    return device_path;
 }
 
 TrustyConfirmationUI::TrustyConfirmationUI()
     : listener_state_(ListenerState::None),
-      prompt_result_(ResponseCode::Ignored), host_vsock_port_{static_cast<int>(property_get_int64(
-                                                 "ro.boot.vsock_confirmationui_port", 7700))},
-      current_session_id_{10} {
-    ConfUiLog(INFO) << "Connecting to Confirmation UI host listening on port " << host_vsock_port_;
-    host_fd_ = ConnectToHost();
-    auto fetching_cmd = [this]() { HostMessageFetcherLoop(); };
+      prompt_result_(IConfirmationUI::IGNORED), current_session_id_{10} {
+    host_fd_ = cuttlefish::SharedFD::Open(GetVirtioConsoleDevicePath(), O_RDWR);
+    CHECK(host_fd_->IsOpen()) << "ConfUI: " << GetVirtioConsoleDevicePath() << " is not open.";
+    CHECK(host_fd_->SetTerminalRaw() >= 0)
+        << "ConfUI: " << GetVirtioConsoleDevicePath() << " fail in SetTerminalRaw()";
+
+    constexpr static const auto enable_confirmationui_property = "ro.boot.enable_confirmationui";
+    const auto arg = property_get_int32(enable_confirmationui_property, -1);
+    is_supported_vm_ = (arg == 1);
+
     if (host_fd_->IsOpen()) {
+        auto fetching_cmd = [this]() { HostMessageFetcherLoop(); };
         host_cmd_fetcher_thread_ = std::thread(fetching_cmd);
     }
 }
@@ -103,9 +95,12 @@
             ConfUiLog(ERROR) << "host_fd_ is not open";
             return;
         }
+        ConfUiLog(INFO) << "Trying to fetch command";
         auto msg = cuttlefish::confui::RecvConfUiMsg(host_fd_);
+        ConfUiLog(INFO) << "RecvConfUiMsg() returned";
         if (!msg) {
-            // socket is broken for now
+            // virtio-console is broken for now
+            ConfUiLog(ERROR) << "received message was null";
             return;
         }
         {
@@ -126,17 +121,17 @@
     }
 }
 
-void TrustyConfirmationUI::RunSession(sp<IConfirmationResultCallback> resultCB,
-                                      hidl_string promptText, hidl_vec<uint8_t> extraData,
-                                      hidl_string locale, hidl_vec<UIOption> uiOptions) {
+void TrustyConfirmationUI::RunSession(shared_ptr<IConfirmationResultCallback> resultCB,
+                                      string promptText, vector<uint8_t> extraData, string locale,
+                                      vector<UIOption> uiOptions) {
     cuttlefish::SharedFD fd = host_fd_;
     // ownership of the fd is passed to GuestSession
     {
         std::unique_lock<std::mutex> lk(current_session_lock_);
         current_session_ = std::make_unique<GuestSession>(
             current_session_id_, listener_state_, listener_state_lock_, listener_state_condv_, fd,
-            hidl2MsgString(promptText), hidl2MsgVector(extraData), hidl2MsgString(locale),
-            hidl2MsgVector(uiOptions));
+            str2MsgString(promptText), vec2MsgVector(extraData), str2MsgString(locale),
+            vec2MsgVector(uiOptions));
     }
 
     auto [rc, msg, token] = current_session_->PromptUserConfirmation();
@@ -151,7 +146,12 @@
     if (do_callback) {
         auto error = resultCB->result(prompt_result_, msg, token);
         if (!error.isOk()) {
-            ConfUiLog(ERROR) << "Result callback failed " << error.description();
+            if (error.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                ConfUiLog(ERROR) << "Result callback failed error: "
+                                 << error.getServiceSpecificError();
+            } else {
+                ConfUiLog(ERROR) << "Result callback failed error: " << error.getStatus();
+            }
         }
         ConfUiLog(INFO) << "Result callback returned.";
     } else {
@@ -161,15 +161,18 @@
 
 // Methods from ::android::hardware::confirmationui::V1_0::IConfirmationUI
 // follow.
-Return<ResponseCode> TrustyConfirmationUI::promptUserConfirmation(
-    const sp<IConfirmationResultCallback>& resultCB, const hidl_string& promptText,
-    const hidl_vec<uint8_t>& extraData, const hidl_string& locale,
-    const hidl_vec<UIOption>& uiOptions) {
+::ndk::ScopedAStatus TrustyConfirmationUI::promptUserConfirmation(
+    const shared_ptr<IConfirmationResultCallback>& resultCB, const vector<uint8_t>& promptTextBytes,
+    const vector<uint8_t>& extraData, const string& locale, const vector<UIOption>& uiOptions) {
     std::unique_lock<std::mutex> stateLock(listener_state_lock_, std::defer_lock);
     ConfUiLog(INFO) << "promptUserConfirmation is called";
-
+    string promptText(promptTextBytes.begin(), promptTextBytes.end());
+    if (!is_supported_vm_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(IConfirmationUI::UNIMPLEMENTED));
+    }
     if (!stateLock.try_lock()) {
-        return ResponseCode::OperationPending;
+        return ndk::ScopedAStatus(
+            AStatus_fromServiceSpecificError(IConfirmationUI::OPERATION_PENDING));
     }
     switch (listener_state_) {
     case ListenerState::None:
@@ -177,22 +180,22 @@
     case ListenerState::Starting:
     case ListenerState::SetupDone:
     case ListenerState::Interactive:
-        return ResponseCode::OperationPending;
+        return ndk::ScopedAStatus(
+            AStatus_fromServiceSpecificError(IConfirmationUI::OPERATION_PENDING));
     case ListenerState::Terminating:
         callback_thread_.join();
         listener_state_ = ListenerState::None;
         break;
     default:
-        return ResponseCode::Unexpected;
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(IConfirmationUI::UNEXPECTED));
     }
     assert(listener_state_ == ListenerState::None);
     listener_state_ = ListenerState::Starting;
-    ConfUiLog(INFO) << "Per promptUserConfirmation, "
-                    << "an active TEE UI session starts";
+
     current_session_id_++;
-    auto worker = [this](const sp<IConfirmationResultCallback>& resultCB,
-                         const hidl_string& promptText, const hidl_vec<uint8_t>& extraData,
-                         const hidl_string& locale, const hidl_vec<UIOption>& uiOptions) {
+    auto worker = [this](const shared_ptr<IConfirmationResultCallback>& resultCB,
+                         const string& promptText, const vector<uint8_t>& extraData,
+                         const string& locale, const vector<UIOption>& uiOptions) {
         RunSession(resultCB, promptText, extraData, locale, uiOptions);
     };
     callback_thread_ = std::thread(worker, resultCB, promptText, extraData, locale, uiOptions);
@@ -205,44 +208,45 @@
     if (listener_state_ == ListenerState::Terminating) {
         callback_thread_.join();
         listener_state_ = ListenerState::None;
-        if (prompt_result_ == ResponseCode::Canceled) {
+        if (prompt_result_ == IConfirmationUI::CANCELED) {
             // VTS expects this
-            return ResponseCode::OK;
+            return ndk::ScopedAStatus::ok();
         }
-        return prompt_result_;
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(prompt_result_));
     }
-    return ResponseCode::OK;
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<ResponseCode>
+::ndk::ScopedAStatus
 TrustyConfirmationUI::deliverSecureInputEvent(const HardwareAuthToken& auth_token) {
     ConfUiLog(INFO) << "deliverSecureInputEvent is called";
-    ResponseCode rc = ResponseCode::Ignored;
+    int rc = IConfirmationUI::IGNORED;
+    if (!is_supported_vm_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(IConfirmationUI::UNIMPLEMENTED));
+    }
     {
         std::unique_lock<std::mutex> lock(current_session_lock_);
         if (!current_session_) {
-            return rc;
+            return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(rc));
         }
-        return current_session_->DeliverSecureInputEvent(auth_token);
-    }
-}
-
-Return<void> TrustyConfirmationUI::abort() {
-    {
-        std::unique_lock<std::mutex> lock(current_session_lock_);
-        if (!current_session_) {
-            return Void();
+        rc = current_session_->DeliverSecureInputEvent(auth_token);
+        if (rc != IConfirmationUI::OK) {
+            return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(rc));
         }
-        return current_session_->Abort();
     }
+    return ndk::ScopedAStatus::ok();
 }
 
-android::sp<IConfirmationUI> createTrustyConfirmationUI() {
-    return new TrustyConfirmationUI();
+::ndk::ScopedAStatus TrustyConfirmationUI::abort() {
+    if (!is_supported_vm_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(IConfirmationUI::UNIMPLEMENTED));
+    }
+    std::unique_lock<std::mutex> lock(current_session_lock_);
+    if (!current_session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(IConfirmationUI::IGNORED));
+    }
+    current_session_->Abort();
+    return ndk::ScopedAStatus::ok();
 }
 
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace confirmationui
-}  // namespace hardware
-}  // namespace android
+}  // namespace aidl::android::hardware::confirmationui
diff --git a/guest/hals/confirmationui/TrustyConfirmationUI.h b/guest/hals/confirmationui/TrustyConfirmationUI.h
index 1742d88..06e54f1 100644
--- a/guest/hals/confirmationui/TrustyConfirmationUI.h
+++ b/guest/hals/confirmationui/TrustyConfirmationUI.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_TRUSTY_CONFIRMATIONUI_H
-#define ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_TRUSTY_CONFIRMATIONUI_H
+#pragma once
 
 #include <atomic>
 #include <condition_variable>
@@ -24,9 +23,10 @@
 #include <mutex>
 #include <thread>
 
-#include <android/hardware/confirmationui/1.0/IConfirmationUI.h>
-#include <android/hardware/keymaster/4.0/types.h>
-#include <hidl/Status.h>
+#include <aidl/android/hardware/confirmationui/BnConfirmationUI.h>
+#include <aidl/android/hardware/confirmationui/IConfirmationResultCallback.h>
+#include <aidl/android/hardware/confirmationui/UIOption.h>
+#include <aidl/android/hardware/security/keymint/HardwareAuthToken.h>
 #include <teeui/generic_messages.h>
 
 #include "common/libs/concurrency/thread_safe_queue.h"
@@ -34,20 +34,14 @@
 #include "common/libs/fs/shared_fd.h"
 #include "guest_session.h"
 
-namespace android {
-namespace hardware {
-namespace confirmationui {
-namespace V1_0 {
-namespace implementation {
+namespace aidl::android::hardware::confirmationui {
 
-using ::android::sp;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
+using ::aidl::android::hardware::security::keymint::HardwareAuthToken;
+using std::shared_ptr;
+using std::string;
+using std::vector;
 
-class TrustyConfirmationUI : public IConfirmationUI {
+class TrustyConfirmationUI : public BnConfirmationUI {
   public:
     using ConfUiMessage = cuttlefish::confui::ConfUiMessage;
     using ConfUiAckMessage = cuttlefish::confui::ConfUiAckMessage;
@@ -57,15 +51,14 @@
     virtual ~TrustyConfirmationUI();
     // Methods from ::android::hardware::confirmationui::V1_0::IConfirmationUI
     // follow.
-    Return<ResponseCode> promptUserConfirmation(const sp<IConfirmationResultCallback>& resultCB,
-                                                const hidl_string& promptText,
-                                                const hidl_vec<uint8_t>& extraData,
-                                                const hidl_string& locale,
-                                                const hidl_vec<UIOption>& uiOptions) override;
-    Return<ResponseCode> deliverSecureInputEvent(
-        const ::android::hardware::keymaster::V4_0::HardwareAuthToken& secureInputToken) override;
+    ::ndk::ScopedAStatus
+    promptUserConfirmation(const shared_ptr<IConfirmationResultCallback>& resultCB,
+                           const vector<uint8_t>& promptText, const vector<uint8_t>& extraData,
+                           const string& locale, const vector<UIOption>& uiOptions) override;
+    ::ndk::ScopedAStatus
+    deliverSecureInputEvent(const HardwareAuthToken& secureInputToken) override;
 
-    Return<void> abort() override;
+    ::ndk::ScopedAStatus abort() override;
 
   private:
     /*
@@ -102,10 +95,9 @@
 
     std::mutex listener_state_lock_;
     std::condition_variable listener_state_condv_;
-    ResponseCode prompt_result_;
+    int prompt_result_;
 
-    // client socket to the host
-    int host_vsock_port_;
+    // client virtio-console fd to the host
     cuttlefish::SharedFD host_fd_;
 
     // ack, response, command from the host, and the abort command from the guest
@@ -113,17 +105,13 @@
     std::mutex current_session_lock_;
     std::unique_ptr<GuestSession> current_session_;
     std::thread host_cmd_fetcher_thread_;
+    bool is_supported_vm_;
 
     cuttlefish::SharedFD ConnectToHost();
     void HostMessageFetcherLoop();
-    void RunSession(sp<IConfirmationResultCallback> resultCB, hidl_string promptText,
-                    hidl_vec<uint8_t> extraData, hidl_string locale, hidl_vec<UIOption> uiOptions);
+    void RunSession(shared_ptr<IConfirmationResultCallback> resultCB, string promptText,
+                    vector<uint8_t> extraData, string locale, vector<UIOption> uiOptions);
+    static const char* GetVirtioConsoleDevicePath();
 };
 
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace confirmationui
-}  // namespace hardware
-}  // namespace android
-
-#endif  // ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_TRUSTY_CONFIRMATIONUI_H
+}  // namespace aidl::android::hardware::confirmationui
diff --git a/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.rc b/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.rc
index 81dfd49..618f211 100644
--- a/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.rc
+++ b/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.rc
@@ -1,5 +1,6 @@
-service confirmationui-1-0 /vendor/bin/hw/android.hardware.confirmationui@1.0-service.cuttlefish
-    interface android.hardware.confirmationui@1.0::IConfirmationUI default
+service vendor.confirmationui_default /vendor/bin/hw/android.hardware.confirmationui-service.cuttlefish
+    interface aidl android.hardware.confirmationui.IConfirmationUI/default
     class hal
     user system
     group drmrpc input system
+    disabled
\ No newline at end of file
diff --git a/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.xml b/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.xml
index 9008b87..afa2e8e 100644
--- a/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.xml
+++ b/guest/hals/confirmationui/android.hardware.confirmationui@1.0-service.cuttlefish.xml
@@ -1,8 +1,7 @@
 <manifest version="1.0" type="device">
-    <hal format="hidl">
+    <hal format="aidl">
         <name>android.hardware.confirmationui</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
+        <version>1</version>
         <interface>
         <name>IConfirmationUI</name>
             <instance>default</instance>
diff --git a/guest/hals/confirmationui/guest_session.cpp b/guest/hals/confirmationui/guest_session.cpp
index aa2ab12..3112a06 100644
--- a/guest/hals/confirmationui/guest_session.cpp
+++ b/guest/hals/confirmationui/guest_session.cpp
@@ -17,13 +17,12 @@
 
 #include "guest_session.h"
 
+#include <aidl/android/hardware/confirmationui/BnConfirmationUI.h>
+#include <aidl/android/hardware/confirmationui/TestModeCommands.h>
+
 #include <future>
 
-namespace android {
-namespace hardware {
-namespace confirmationui {
-namespace V1_0 {
-namespace implementation {
+namespace aidl::android::hardware::confirmationui {
 using TeeuiRc = teeui::ResponseCode;
 
 GuestSession::ResultTriple GuestSession::PromptUserConfirmation() {
@@ -58,8 +57,8 @@
      */
 
     GuestSession::ResultTriple error;
-    auto& error_rc = std::get<ResponseCode>(error);
-    error_rc = ResponseCode::SystemError;
+    auto& error_rc = std::get<int>(error);
+    error_rc = IConfirmationUI::SYSTEM_ERROR;
 
     CHECK(listener_state_ == ListenerState::Starting) << "ListenerState should be Starting";
 
@@ -72,60 +71,38 @@
     if (payload_lower_bound > upper_bound) {
         ConfUiLog(INFO) << "UI message too long to send to the host";
         // message is too long anyway, and don't send it to the host
-        error_rc = ResponseCode::UIErrorMessageTooLong;
+        error_rc = IConfirmationUI::UI_ERROR_MESSAGE_TOO_LONG;
         return error;
     }
     SerializedSend(cuttlefish::confui::SendStartCmd, host_fd_, session_name_, prompt_text_,
                    extra_data_, locale_, ui_options_);
     ConfUiLog(INFO) << "Session " << GetSessionId() << " started on both the guest and the host";
 
-    auto clean_up_and_get_first = [&]() -> std::unique_ptr<ConfUiMessage> {
-        // blocking wait to get the first msg that belongs to this session
-        while (true) {
-            auto first_curr_session_msg = incoming_msg_queue_.Pop();
-            if (!first_curr_session_msg ||
-                first_curr_session_msg->GetSessionId() != GetSessionId()) {
-                continue;
-            }
-            return std::move(first_curr_session_msg);
-        }
-    };
+    auto first_msg = incoming_msg_queue_.Pop();
 
-    /*
-     * Unconditionally wait ack, or host abort
-     *
-     * First couple of messages could be from the previous session.
-     * We should clear them up.
-     *
-     * Even though the guest HAL sends kAbort to the host, the kAbort
-     * does not happen immediately. Between the incoming_msg_queue_.FlushAll()
-     * and the actual abort on the host, there could still be messages
-     * sent from the host to the guest. As these lines are the first read
-     * for the current session, we clear up the preceding messages
-     * from the previous session until we see the message for the current
-     * session.
-     *
-     * Note that abort() call puts the Abort command in the queue. So,
-     * it will also show up in incoming_msg_queue_
-     *
-     */
-    auto first_msg = std::move(clean_up_and_get_first());
+    // the logic must guarantee first_msg is kCliAck
+    CHECK(first_msg->GetType() == cuttlefish::confui::ConfUiCmd::kCliAck)
+        << "first message from the host in a new session must be kCliAck "
+        << "but is " << cuttlefish::confui::ToString(first_msg->GetType());
 
     cuttlefish::confui::ConfUiAckMessage& start_ack_msg =
         static_cast<cuttlefish::confui::ConfUiAckMessage&>(*first_msg);
+    // ack to kStart has been received
+
     if (!start_ack_msg.IsSuccess()) {
         // handle errors: MALFORMED_UTF8 or Message too long
         const std::string error_msg = start_ack_msg.GetStatusMessage();
         if (error_msg == cuttlefish::confui::HostError::kMessageTooLongError) {
             ConfUiLog(ERROR) << "Message + Extra data + Meta info were too long";
-            error_rc = ResponseCode::UIErrorMessageTooLong;
+            error_rc = IConfirmationUI::UI_ERROR_MESSAGE_TOO_LONG;
         }
         if (error_msg == cuttlefish::confui::HostError::kIncorrectUTF8) {
             ConfUiLog(ERROR) << "Message is incorrectly UTF-encoded";
-            error_rc = ResponseCode::UIErrorMalformedUTF8Encoding;
+            error_rc = IConfirmationUI::UI_ERROR_MALFORMED_UTF8ENCODING;
         }
         return error;
     }
+    // the ack to kStart was success.
 
     //  ############################## Start 2nd Phase #############################################
     listener_state_ = ListenerState::SetupDone;
@@ -170,7 +147,7 @@
     if (user_or_abort->GetType() == cuttlefish::confui::ConfUiCmd::kAbort) {
         ConfUiLog(ERROR) << "Abort called or the user/host aborted"
                          << " while waiting user response";
-        return {ResponseCode::Aborted, {}, {}};
+        return {IConfirmationUI::ABORTED, {}, {}};
     }
     if (user_or_abort->GetType() == cuttlefish::confui::ConfUiCmd::kCliAck) {
         auto& ack_msg = static_cast<cuttlefish::confui::ConfUiAckMessage&>(*user_or_abort);
@@ -178,7 +155,7 @@
             ConfUiLog(ERROR) << "When host failed, it is supposed to send "
                              << "kCliAck with fail, but this is kCliAck with success";
         }
-        error_rc = ResponseCode::SystemError;
+        error_rc = IConfirmationUI::SYSTEM_ERROR;
         return error;
     }
     cuttlefish::confui::ConfUiCliResponseMessage& user_response =
@@ -191,7 +168,7 @@
     // make up the result triple
     if (user_response.GetResponse() == cuttlefish::confui::UserResponse::kCancel) {
         SerializedSend(cuttlefish::confui::SendStopCmd, host_fd_, GetSessionId());
-        return {ResponseCode::Canceled, {}, {}};
+        return {IConfirmationUI::CANCELED, {}, {}};
     }
 
     if (user_response.GetResponse() != cuttlefish::confui::UserResponse::kConfirm) {
@@ -200,12 +177,11 @@
     }
     SerializedSend(cuttlefish::confui::SendStopCmd, host_fd_, GetSessionId());
     //  ############################## Start 4th Phase - cleanup ##################################
-    return {ResponseCode::OK, user_response.GetMessage(), user_response.GetSign()};
+    return {IConfirmationUI::OK, user_response.GetMessage(), user_response.GetSign()};
 }
 
-Return<ResponseCode> GuestSession::DeliverSecureInputEvent(
-    const android::hardware::keymaster::V4_0::HardwareAuthToken& auth_token) {
-    ResponseCode rc = ResponseCode::Ignored;
+int GuestSession::DeliverSecureInputEvent(const HardwareAuthToken& auth_token) {
+    int rc = IConfirmationUI::IGNORED;
     {
         /*
          * deliverSecureInputEvent is only used by the VTS test to mock human input. A correct
@@ -222,7 +198,7 @@
         std::unique_lock<std::mutex> stateLock(listener_state_lock_);
         listener_state_condv_.wait(stateLock,
                                    [this] { return listener_state_ != ListenerState::SetupDone; });
-        if (listener_state_ != ListenerState::Interactive) return ResponseCode::Ignored;
+        if (listener_state_ != ListenerState::Interactive) return IConfirmationUI::IGNORED;
         if (static_cast<TestModeCommands>(auth_token.challenge) == TestModeCommands::OK_EVENT) {
             SerializedSend(cuttlefish::confui::SendUserSelection, host_fd_, GetSessionId(),
                            cuttlefish::confui::UserResponse::kConfirm);
@@ -230,7 +206,7 @@
             SerializedSend(cuttlefish::confui::SendUserSelection, host_fd_, GetSessionId(),
                            cuttlefish::confui::UserResponse::kCancel);
         }
-        rc = ResponseCode::OK;
+        rc = IConfirmationUI::OK;
     }
     listener_state_condv_.notify_all();
     // VTS test expect an OK response if the event was successfully delivered.
@@ -238,11 +214,11 @@
     // Canceled into OK. Canceled is only returned if the delivered event canceled
     // the operation, which means that the event was successfully delivered. Thus
     // we return OK.
-    if (rc == ResponseCode::Canceled) return ResponseCode::OK;
+    if (rc == IConfirmationUI::CANCELED) return IConfirmationUI::OK;
     return rc;
 }
 
-Return<void> GuestSession::Abort() {
+void GuestSession::Abort() {
     {
         std::unique_lock<std::mutex> stateLock(listener_state_lock_);
         if (listener_state_ == ListenerState::SetupDone ||
@@ -256,10 +232,5 @@
         }
     }
     listener_state_condv_.notify_all();
-    return Void();
 }
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace confirmationui
-}  // namespace hardware
-}  // namespace android
+}  // namespace aidl::android::hardware::confirmationui
diff --git a/guest/hals/confirmationui/guest_session.h b/guest/hals/confirmationui/guest_session.h
index 0dceffe..ebd8475 100644
--- a/guest/hals/confirmationui/guest_session.h
+++ b/guest/hals/confirmationui/guest_session.h
@@ -17,9 +17,8 @@
 
 #pragma once
 
+#include <aidl/android/hardware/security/keymint/HardwareAuthToken.h>
 #include <android-base/logging.h>
-#include <android/hardware/confirmationui/1.0/types.h>
-#include <android/hardware/keymaster/4.0/types.h>
 
 #include <condition_variable>
 #include <cstdint>
@@ -33,11 +32,8 @@
 #include "common/libs/confui/confui.h"
 #include "common/libs/fs/shared_fd.h"
 
-namespace android {
-namespace hardware {
-namespace confirmationui {
-namespace V1_0 {
-namespace implementation {
+namespace aidl::android::hardware::confirmationui {
+using ::aidl::android::hardware::security::keymint::HardwareAuthToken;
 class GuestSession {
   public:
     using ConfUiMessage = cuttlefish::confui::ConfUiMessage;
@@ -78,14 +74,12 @@
         // join host_cmd_fetcher_thread_ once Session takes the ownership of fd
     }
 
-    using ResultTriple =
-        std::tuple<ResponseCode, teeui::MsgVector<uint8_t>, teeui::MsgVector<uint8_t>>;
+    using ResultTriple = std::tuple<int, teeui::MsgVector<uint8_t>, teeui::MsgVector<uint8_t>>;
     ResultTriple PromptUserConfirmation();
 
-    Return<ResponseCode> DeliverSecureInputEvent(
-        const ::android::hardware::keymaster::V4_0::HardwareAuthToken& secureInputToken);
+    int DeliverSecureInputEvent(const HardwareAuthToken& secureInputToken);
 
-    Return<void> Abort();
+    void Abort();
     std::string GetSessionId() const { return session_name_; }
 
     void Push(std::unique_ptr<ConfUiMessage>&& msg) { incoming_msg_queue_.Push(std::move(msg)); }
@@ -139,8 +133,4 @@
      */
     std::mutex send_serializer_mtx_;
 };
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace confirmationui
-}  // namespace hardware
-}  // namespace android
+}  // namespace aidl::android::hardware::confirmationui
diff --git a/guest/hals/confirmationui/include/TrustyConfirmationuiHal.h b/guest/hals/confirmationui/include/TrustyConfirmationuiHal.h
deleted file mode 100644
index 2ab9389..0000000
--- a/guest/hals/confirmationui/include/TrustyConfirmationuiHal.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <android/hardware/confirmationui/1.0/IConfirmationUI.h>
-
-namespace android {
-namespace hardware {
-namespace confirmationui {
-namespace V1_0 {
-namespace implementation {
-
-android::sp<IConfirmationUI> createTrustyConfirmationUI();
-
-}  // namespace implementation
-}  // namespace V1_0
-}  // namespace confirmationui
-}  // namespace hardware
-}  // namespace android
diff --git a/guest/hals/confirmationui/service.cpp b/guest/hals/confirmationui/service.cpp
index dd7e84b..cec2563 100644
--- a/guest/hals/confirmationui/service.cpp
+++ b/guest/hals/confirmationui/service.cpp
@@ -15,21 +15,29 @@
  */
 
 #include <android-base/logging.h>
-#include <hidl/HidlTransportSupport.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
 
-#include <TrustyConfirmationuiHal.h>
+#include "TrustyConfirmationUI.h"
 
-using android::sp;
-using android::hardware::confirmationui::V1_0::implementation::createTrustyConfirmationUI;
+using ::aidl::android::hardware::confirmationui::TrustyConfirmationUI;
 
 int main() {
-    ::android::hardware::configureRpcThreadpool(1, true /*willJoinThreadpool*/);
-    auto service = createTrustyConfirmationUI();
-    auto status = service->registerAsService();
-    if (status != android::OK) {
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
+
+    std::shared_ptr<TrustyConfirmationUI> confirmationui =
+        ndk::SharedRefBase::make<TrustyConfirmationUI>();
+
+    const std::string instance = std::string() + TrustyConfirmationUI::descriptor + "/default";
+    binder_status_t status =
+        AServiceManager_addService(confirmationui->asBinder().get(), instance.c_str());
+    CHECK_EQ(status, STATUS_OK);
+
+    if (status != STATUS_OK) {
         LOG(FATAL) << "Could not register service for ConfirmationUI 1.0 (" << status << ")";
         return -1;
     }
-    ::android::hardware::joinRpcThreadpool();
-    return -1;
+
+    ABinderProcess_joinThreadPool();
+    return -1;  // Should never get here.
 }
diff --git a/guest/hals/gatekeeper/remote/Android.bp b/guest/hals/gatekeeper/remote/Android.bp
index d20ad9f..a9025ad 100644
--- a/guest/hals/gatekeeper/remote/Android.bp
+++ b/guest/hals/gatekeeper/remote/Android.bp
@@ -17,11 +17,11 @@
 }
 
 cc_binary {
-    name: "android.hardware.gatekeeper@1.0-service.remote",
-    defaults: ["cuttlefish_guest_only", "hidl_defaults"],
+    name: "android.hardware.gatekeeper-service.remote",
+    defaults: ["cuttlefish_guest_only"],
     vendor: true,
     relative_install_path: "hw",
-    init_rc: ["android.hardware.gatekeeper@1.0-service.remote.rc"],
+    init_rc: ["android.hardware.gatekeeper-service.remote.rc"],
 
     srcs: [
         "remote_gatekeeper.cpp",
@@ -39,7 +39,9 @@
     ],
 
     shared_libs: [
-        "android.hardware.gatekeeper@1.0",
+        "android.hardware.gatekeeper-V1-ndk",
+        "libbinder_ndk",
+        "libhardware",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_security",
@@ -51,5 +53,5 @@
         "libtrusty",
     ],
 
-    vintf_fragments: ["android.hardware.gatekeeper@1.0-service.remote.xml"],
+    vintf_fragments: ["android.hardware.gatekeeper-service.remote.xml"],
 }
diff --git a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.rc b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.rc
new file mode 100644
index 0000000..1bac0b9
--- /dev/null
+++ b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.rc
@@ -0,0 +1,4 @@
+service vendor.gatekeeper_default /vendor/bin/hw/android.hardware.gatekeeper-service.remote
+    class hal
+    user system
+    group system
diff --git a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.xml b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.xml
new file mode 100644
index 0000000..9b99f65
--- /dev/null
+++ b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper-service.remote.xml
@@ -0,0 +1,10 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.gatekeeper</name>
+        <version>1</version>
+        <interface>
+            <name>IGatekeeper</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
\ No newline at end of file
diff --git a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.rc b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.rc
deleted file mode 100644
index 45bf268..0000000
--- a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.rc
+++ /dev/null
@@ -1,4 +0,0 @@
-service vendor.gatekeeper-1-0 /vendor/bin/hw/android.hardware.gatekeeper@1.0-service.remote
-    class hal
-    user system
-    group system
diff --git a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.xml b/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.xml
deleted file mode 100644
index 19714a8..0000000
--- a/guest/hals/gatekeeper/remote/android.hardware.gatekeeper@1.0-service.remote.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.gatekeeper</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-        <name>IGatekeeper</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/guest/hals/gatekeeper/remote/remote_gatekeeper.cpp b/guest/hals/gatekeeper/remote/remote_gatekeeper.cpp
index 5cca879..ad84663 100644
--- a/guest/hals/gatekeeper/remote/remote_gatekeeper.cpp
+++ b/guest/hals/gatekeeper/remote/remote_gatekeeper.cpp
@@ -18,143 +18,165 @@
 
 #include "remote_gatekeeper.h"
 
+#include <endian.h>
 #include <limits>
 
 #include <android-base/logging.h>
+#include <gatekeeper/password_handle.h>
+#include <hardware/hw_auth_token.h>
 
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::gatekeeper::V1_0::GatekeeperStatusCode;
-using ::gatekeeper::EnrollRequest;
-using ::gatekeeper::EnrollResponse;
+namespace aidl::android::hardware::gatekeeper {
+
 using ::gatekeeper::ERROR_INVALID;
 using ::gatekeeper::ERROR_MEMORY_ALLOCATION_FAILED;
 using ::gatekeeper::ERROR_NONE;
 using ::gatekeeper::ERROR_RETRY;
+using ::gatekeeper::ERROR_UNKNOWN;
 using ::gatekeeper::SizedBuffer;
-using ::gatekeeper::VerifyRequest;
-using ::gatekeeper::VerifyResponse;
 
-namespace gatekeeper {
+RemoteGateKeeperDevice::RemoteGateKeeperDevice(
+    cuttlefish::SharedFdGatekeeperChannel* channel)
+    : gatekeeper_channel_(channel), error_(0) {}
 
-RemoteGateKeeperDevice::RemoteGateKeeperDevice(cuttlefish::GatekeeperChannel* channel)
-    : gatekeeper_channel_(channel), error_(0) {
-}
+RemoteGateKeeperDevice::~RemoteGateKeeperDevice() {}
 
-RemoteGateKeeperDevice::~RemoteGateKeeperDevice() {
-}
-
-SizedBuffer hidl_vec2sized_buffer(const hidl_vec<uint8_t>& vec) {
+SizedBuffer vec2sized_buffer(const std::vector<uint8_t>& vec) {
     if (vec.size() == 0 || vec.size() > std::numeric_limits<uint32_t>::max()) return {};
     auto unused = new uint8_t[vec.size()];
     std::copy(vec.begin(), vec.end(), unused);
     return {unused, static_cast<uint32_t>(vec.size())};
 }
 
-Return<void> RemoteGateKeeperDevice::enroll(uint32_t uid,
-                                            const hidl_vec<uint8_t>& currentPasswordHandle,
-                                            const hidl_vec<uint8_t>& currentPassword,
-                                            const hidl_vec<uint8_t>& desiredPassword,
-                                            enroll_cb _hidl_cb) {
+void sizedBuffer2AidlHWToken(SizedBuffer& buffer,
+                             android::hardware::security::keymint::HardwareAuthToken* aidlToken) {
+    const hw_auth_token_t* authToken =
+        reinterpret_cast<const hw_auth_token_t*>(buffer.Data<uint8_t>());
+    aidlToken->challenge = authToken->challenge;
+    aidlToken->userId = authToken->user_id;
+    aidlToken->authenticatorId = authToken->authenticator_id;
+    // these are in network order: translate to host
+    aidlToken->authenticatorType =
+        static_cast<android::hardware::security::keymint::HardwareAuthenticatorType>(
+            be32toh(authToken->authenticator_type));
+    aidlToken->timestamp.milliSeconds = be64toh(authToken->timestamp);
+    aidlToken->mac.insert(aidlToken->mac.begin(), std::begin(authToken->hmac),
+                          std::end(authToken->hmac));
+}
+
+::ndk::ScopedAStatus
+RemoteGateKeeperDevice::enroll(int32_t uid, const std::vector<uint8_t>& currentPasswordHandle,
+                               const std::vector<uint8_t>& currentPassword,
+                               const std::vector<uint8_t>& desiredPassword,
+                               GatekeeperEnrollResponse* rsp) {
     if (error_ != 0) {
         LOG(ERROR) << "Gatekeeper in invalid state";
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
-        return {};
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     }
 
     if (desiredPassword.size() == 0) {
         LOG(ERROR) << "Desired password size is 0";
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
-        return {};
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     }
 
-    EnrollRequest request(uid, hidl_vec2sized_buffer(currentPasswordHandle),
-                          hidl_vec2sized_buffer(desiredPassword),
-                          hidl_vec2sized_buffer(currentPassword));
+    if (currentPasswordHandle.size() > 0) {
+        if (currentPasswordHandle.size() != sizeof(::gatekeeper::password_handle_t)) {
+            LOG(ERROR) << "Password handle has wrong length";
+            return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
+        }
+    }
+
+    EnrollRequest request(uid, vec2sized_buffer(currentPasswordHandle),
+                          vec2sized_buffer(desiredPassword), vec2sized_buffer(currentPassword));
     EnrollResponse response;
     auto error = Send(request, &response);
     if (error != ERROR_NONE) {
         LOG(ERROR) << "Enroll request gave error: " << error;
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     } else if (response.error == ERROR_RETRY) {
         LOG(ERROR) << "Enroll response has a retry error";
-        _hidl_cb({GatekeeperStatusCode::ERROR_RETRY_TIMEOUT, response.retry_timeout, {}});
+        *rsp = {ERROR_RETRY_TIMEOUT, static_cast<int32_t>(response.retry_timeout), 0, {}};
+        return ndk::ScopedAStatus::ok();
     } else if (response.error != ERROR_NONE) {
         LOG(ERROR) << "Enroll response has an error: " << response.error;
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     } else {
-        hidl_vec<uint8_t> new_handle(response.enrolled_password_handle.Data<uint8_t>(),
-                                     response.enrolled_password_handle.Data<uint8_t>() +
-                                             response.enrolled_password_handle.size());
-        _hidl_cb({GatekeeperStatusCode::STATUS_OK, response.retry_timeout, new_handle});
+        const ::gatekeeper::password_handle_t* password_handle =
+            response.enrolled_password_handle.Data<::gatekeeper::password_handle_t>();
+        *rsp = {STATUS_OK,
+                0,
+                static_cast<int64_t>(password_handle->user_id),
+                {response.enrolled_password_handle.Data<uint8_t>(),
+                 (response.enrolled_password_handle.Data<uint8_t>() +
+                  response.enrolled_password_handle.size())}};
     }
-    return {};
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> RemoteGateKeeperDevice::verify(
-        uint32_t uid, uint64_t challenge,
-        const ::android::hardware::hidl_vec<uint8_t>& enrolledPasswordHandle,
-        const ::android::hardware::hidl_vec<uint8_t>& providedPassword, verify_cb _hidl_cb) {
+::ndk::ScopedAStatus RemoteGateKeeperDevice::verify(
+    int32_t uid, int64_t challenge, const std::vector<uint8_t>& enrolledPasswordHandle,
+    const std::vector<uint8_t>& providedPassword, GatekeeperVerifyResponse* rsp) {
     if (error_ != 0) {
         LOG(ERROR) << "Gatekeeper in invalid state";
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
-        return {};
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     }
 
     if (enrolledPasswordHandle.size() == 0) {
         LOG(ERROR) << "Enrolled password size is 0";
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
-        return {};
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     }
 
-    VerifyRequest request(uid, challenge, hidl_vec2sized_buffer(enrolledPasswordHandle),
-                          hidl_vec2sized_buffer(providedPassword));
+    if (enrolledPasswordHandle.size() > 0) {
+        if (enrolledPasswordHandle.size() != sizeof(::gatekeeper::password_handle_t)) {
+            LOG(ERROR) << "Password handle has wrong length";
+            return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
+        }
+    }
+
+    VerifyRequest request(uid, challenge, vec2sized_buffer(enrolledPasswordHandle),
+                          vec2sized_buffer(providedPassword));
     VerifyResponse response;
 
     auto error = Send(request, &response);
     if (error != ERROR_NONE) {
         LOG(ERROR) << "Verify request gave error: " << error;
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     } else if (response.error == ERROR_RETRY) {
         LOG(ERROR) << "Verify request response gave retry error";
-        _hidl_cb({GatekeeperStatusCode::ERROR_RETRY_TIMEOUT, response.retry_timeout, {}});
+        *rsp = {ERROR_RETRY_TIMEOUT, static_cast<int32_t>(response.retry_timeout), {}};
+        return ndk::ScopedAStatus::ok();
     } else if (response.error != ERROR_NONE) {
         LOG(ERROR) << "Verify request response gave error: " << response.error;
-        _hidl_cb({GatekeeperStatusCode::ERROR_GENERAL_FAILURE, 0, {}});
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_GENERAL_FAILURE));
     } else {
-        hidl_vec<uint8_t> auth_token(
-                response.auth_token.Data<uint8_t>(),
-                response.auth_token.Data<uint8_t>() + response.auth_token.size());
-
-        _hidl_cb({response.request_reenroll ? GatekeeperStatusCode::STATUS_REENROLL
-                                            : GatekeeperStatusCode::STATUS_OK,
-                  response.retry_timeout, auth_token});
+        // On Success, return GatekeeperVerifyResponse with Success Status, timeout{0} and
+        // valid HardwareAuthToken.
+        *rsp = {response.request_reenroll ? STATUS_REENROLL : STATUS_OK, 0, {}};
+        // Convert the hw_auth_token_t to HardwareAuthToken in the response.
+        sizedBuffer2AidlHWToken(response.auth_token, &rsp->hardwareAuthToken);
     }
-    return {};
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> RemoteGateKeeperDevice::deleteUser(uint32_t /*uid*/, deleteUser_cb _hidl_cb) {
+::ndk::ScopedAStatus RemoteGateKeeperDevice::deleteUser(int32_t /*uid*/) {
     LOG(ERROR) << "deleteUser is unimplemented";
-    _hidl_cb({GatekeeperStatusCode::ERROR_NOT_IMPLEMENTED, 0, {}});
-    return {};
+    return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_NOT_IMPLEMENTED));
 }
 
-Return<void> RemoteGateKeeperDevice::deleteAllUsers(deleteAllUsers_cb _hidl_cb) {
+::ndk::ScopedAStatus RemoteGateKeeperDevice::deleteAllUsers() {
     LOG(ERROR) << "deleteAllUsers is unimplemented";
-    _hidl_cb({GatekeeperStatusCode::ERROR_NOT_IMPLEMENTED, 0, {}});
-    return {};
+    return ndk::ScopedAStatus(AStatus_fromServiceSpecificError(ERROR_NOT_IMPLEMENTED));
 }
 
 gatekeeper_error_t RemoteGateKeeperDevice::Send(uint32_t command, const GateKeeperMessage& request,
-        GateKeeperMessage *response) {
+                                                GateKeeperMessage* response) {
     if (!gatekeeper_channel_->SendRequest(command, request)) {
-      LOG(ERROR) << "Failed to send request";
-      return ERROR_UNKNOWN;
+        LOG(ERROR) << "Failed to send request";
+        return ERROR_UNKNOWN;
     }
     auto remote_response = gatekeeper_channel_->ReceiveMessage();
     if (!remote_response) {
-      LOG(ERROR) << "Failed to receive response";
-      return ERROR_UNKNOWN;
+        LOG(ERROR) << "Failed to receive response";
+        return ERROR_UNKNOWN;
     }
     const uint8_t* buffer = remote_response->payload;
     const uint8_t* buffer_end = remote_response->payload + remote_response->payload_size;
@@ -166,4 +188,4 @@
     return rc;
 }
 
-};
+};  // namespace aidl::android::hardware::gatekeeper
diff --git a/guest/hals/gatekeeper/remote/remote_gatekeeper.h b/guest/hals/gatekeeper/remote/remote_gatekeeper.h
index 5575052..c72da90 100644
--- a/guest/hals/gatekeeper/remote/remote_gatekeeper.h
+++ b/guest/hals/gatekeeper/remote/remote_gatekeeper.h
@@ -13,78 +13,86 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
-#ifndef TRUSTY_GATEKEEPER_H
-#define TRUSTY_GATEKEEPER_H
+#pragma once
 
 #include <memory>
 
-#include <android/hardware/gatekeeper/1.0/IGatekeeper.h>
-#include <hidl/Status.h>
+#include <aidl/android/hardware/gatekeeper/BnGatekeeper.h>
 #include <gatekeeper/gatekeeper_messages.h>
 
-#include "common/libs/security/gatekeeper_channel.h"
+#include "common/libs/security/gatekeeper_channel_sharedfd.h"
 
-namespace gatekeeper {
+namespace aidl::android::hardware::gatekeeper {
 
-class RemoteGateKeeperDevice : public ::android::hardware::gatekeeper::V1_0::IGatekeeper {
+using aidl::android::hardware::gatekeeper::GatekeeperEnrollResponse;
+using aidl::android::hardware::gatekeeper::GatekeeperVerifyResponse;
+using aidl::android::hardware::gatekeeper::IGatekeeper;
+using ::gatekeeper::ENROLL;
+using ::gatekeeper::EnrollRequest;
+using ::gatekeeper::EnrollResponse;
+using ::gatekeeper::gatekeeper_error_t;
+using ::gatekeeper::GateKeeperMessage;
+using ::gatekeeper::VERIFY;
+using ::gatekeeper::VerifyRequest;
+using ::gatekeeper::VerifyResponse;
+
+class RemoteGateKeeperDevice : public BnGatekeeper {
   public:
-    explicit RemoteGateKeeperDevice(cuttlefish::GatekeeperChannel* gatekeeper_channel);
-    ~RemoteGateKeeperDevice();
-    /**
-     * Enrolls password_payload, which should be derived from a user selected pin or password,
-     * with the authentication factor private key used only for enrolling authentication
-     * factor data.
-     *
-     * Returns: 0 on success or an error code less than 0 on error.
-     * On error, enrolled_password_handle will not be allocated.
-     */
-    ::android::hardware::Return<void> enroll(
-            uint32_t uid, const ::android::hardware::hidl_vec<uint8_t>& currentPasswordHandle,
-            const ::android::hardware::hidl_vec<uint8_t>& currentPassword,
-            const ::android::hardware::hidl_vec<uint8_t>& desiredPassword,
-            enroll_cb _hidl_cb) override;
+   explicit RemoteGateKeeperDevice(
+       cuttlefish::SharedFdGatekeeperChannel* gatekeeper_channel);
+   ~RemoteGateKeeperDevice();
+   /**
+    * Enrolls password_payload, which should be derived from a user selected pin
+    * or password, with the authentication factor private key used only for
+    * enrolling authentication factor data.
+    *
+    * Returns: 0 on success or an error code less than 0 on error.
+    * On error, enrolled_password_handle will not be allocated.
+    */
+   ::ndk::ScopedAStatus enroll(
+       int32_t uid, const std::vector<uint8_t>& currentPasswordHandle,
+       const std::vector<uint8_t>& currentPassword,
+       const std::vector<uint8_t>& desiredPassword,
+       GatekeeperEnrollResponse* _aidl_return) override;
+   /**
+    * Verifies provided_password matches enrolled_password_handle.
+    *
+    * Implementations of this module may retain the result of this call
+    * to attest to the recency of authentication.
+    *
+    * On success, writes the address of a verification token to auth_token,
+    * usable to attest password verification to other trusted services. Clients
+    * may pass NULL for this value.
+    *
+    * Returns: 0 on success or an error code less than 0 on error
+    * On error, verification token will not be allocated
+    */
+   ::ndk::ScopedAStatus verify(
+       int32_t uid, int64_t challenge,
+       const std::vector<uint8_t>& enrolledPasswordHandle,
+       const std::vector<uint8_t>& providedPassword,
+       GatekeeperVerifyResponse* _aidl_return) override;
 
-    /**
-     * Verifies provided_password matches enrolled_password_handle.
-     *
-     * Implementations of this module may retain the result of this call
-     * to attest to the recency of authentication.
-     *
-     * On success, writes the address of a verification token to auth_token,
-     * usable to attest password verification to other trusted services. Clients
-     * may pass NULL for this value.
-     *
-     * Returns: 0 on success or an error code less than 0 on error
-     * On error, verification token will not be allocated
-     */
-    ::android::hardware::Return<void> verify(
-            uint32_t uid, uint64_t challenge,
-            const ::android::hardware::hidl_vec<uint8_t>& enrolledPasswordHandle,
-            const ::android::hardware::hidl_vec<uint8_t>& providedPassword,
-            verify_cb _hidl_cb) override;
+   ::ndk::ScopedAStatus deleteAllUsers() override;
 
-    ::android::hardware::Return<void> deleteUser(uint32_t uid, deleteUser_cb _hidl_cb) override;
-
-    ::android::hardware::Return<void> deleteAllUsers(deleteAllUsers_cb _hidl_cb) override;
+   ::ndk::ScopedAStatus deleteUser(int32_t uid) override;
 
   private:
-    cuttlefish::GatekeeperChannel* gatekeeper_channel_;
+   cuttlefish::SharedFdGatekeeperChannel* gatekeeper_channel_;
 
-    gatekeeper_error_t Send(uint32_t command, const GateKeeperMessage& request,
+   gatekeeper_error_t Send(uint32_t command, const GateKeeperMessage& request,
                            GateKeeperMessage* response);
 
-    gatekeeper_error_t Send(const EnrollRequest& request, EnrollResponse *response) {
-        return Send(ENROLL, request, response);
-    }
+   gatekeeper_error_t Send(const EnrollRequest& request,
+                           EnrollResponse* response) {
+     return Send(ENROLL, request, response);
+   }
 
-    gatekeeper_error_t Send(const VerifyRequest& request, VerifyResponse *response) {
+    gatekeeper_error_t Send(const VerifyRequest& request, VerifyResponse* response) {
         return Send(VERIFY, request, response);
     }
 
     int error_;
 };
 
-}  // namespace gatekeeper
-
-#endif
+}  // namespace aidl::android::hardware::gatekeeper
diff --git a/guest/hals/gatekeeper/remote/service.cpp b/guest/hals/gatekeeper/remote/service.cpp
index 9a230f5..85e3b50 100644
--- a/guest/hals/gatekeeper/remote/service.cpp
+++ b/guest/hals/gatekeeper/remote/service.cpp
@@ -13,49 +13,47 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-#define LOG_TAG "android.hardware.gatekeeper@1.0-service.remote"
+#define LOG_TAG "android.hardware.gatekeeper-service.remote"
 
 #include <android-base/logging.h>
-#include <android/hardware/gatekeeper/1.0/IGatekeeper.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
 #include <cutils/properties.h>
 #include <gflags/gflags.h>
 
-#include <hidl/LegacySupport.h>
-
 #include "common/libs/fs/shared_fd.h"
 #include "common/libs/security/gatekeeper_channel.h"
 #include "guest/hals/gatekeeper/remote/remote_gatekeeper.h"
+#include "remote_gatekeeper.h"
 
-// Generated HIDL files
-using android::hardware::gatekeeper::V1_0::IGatekeeper;
-using gatekeeper::RemoteGateKeeperDevice;
+using aidl::android::hardware::gatekeeper::RemoteGateKeeperDevice;
 
 const char device[] = "/dev/hvc4";
 
 int main(int argc, char** argv) {
-  ::android::base::InitLogging(argv, ::android::base::KernelLogger);
-  gflags::ParseCommandLineFlags(&argc, &argv, true);
-  ::android::hardware::configureRpcThreadpool(1, true /* willJoinThreadpool */);
+    ::android::base::InitLogging(argv, ::android::base::KernelLogger);
+    gflags::ParseCommandLineFlags(&argc, &argv, true);
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
 
-  auto fd = cuttlefish::SharedFD::Open(device, O_RDWR);
-  if (!fd->IsOpen()) {
-    LOG(FATAL) << "Could not connect to gatekeeper: " << fd->StrError();
-  }
+    auto fd = cuttlefish::SharedFD::Open(device, O_RDWR);
+    if (!fd->IsOpen()) {
+        LOG(FATAL) << "Could not connect to gatekeeper: " << fd->StrError();
+    }
 
-  if (fd->SetTerminalRaw() < 0) {
-    LOG(FATAL) << "Could not make " << device << " a raw terminal: "
-                << fd->StrError();
-  }
+    if (fd->SetTerminalRaw() < 0) {
+        LOG(FATAL) << "Could not make " << device << " a raw terminal: " << fd->StrError();
+    }
 
-  cuttlefish::GatekeeperChannel gatekeeperChannel(fd, fd);
+    cuttlefish::SharedFdGatekeeperChannel gatekeeperChannel(fd, fd);
 
-  android::sp<RemoteGateKeeperDevice> gatekeeper(
-    new RemoteGateKeeperDevice(&gatekeeperChannel));
-  auto status = gatekeeper->registerAsService();
-  if (status != android::OK) {
-    LOG(FATAL) << "Could not register service for Gatekeeper 1.0 (remote) (" << status << ")";
-  }
+    std::shared_ptr<RemoteGateKeeperDevice> gatekeeper =
+        ndk::SharedRefBase::make<RemoteGateKeeperDevice>(&gatekeeperChannel);
 
-  android::hardware::joinRpcThreadpool();
-  return -1;  // Should never get here.
+    const std::string instance = std::string() + RemoteGateKeeperDevice::descriptor + "/default";
+    binder_status_t status =
+        AServiceManager_addService(gatekeeper->asBinder().get(), instance.c_str());
+    CHECK_EQ(status, STATUS_OK);
+
+    ABinderProcess_joinThreadPool();
+    return -1;  // Should never get here.
 }
diff --git a/guest/hals/health/Android.bp b/guest/hals/health/Android.bp
index cb9d866..a452c1e 100644
--- a/guest/hals/health/Android.bp
+++ b/guest/hals/health/Android.bp
@@ -45,7 +45,7 @@
         "libhidlbase",
         "liblog",
         "libutils",
-        "android.hardware.health-V1-ndk",
+        "android.hardware.health-V2-ndk",
     ],
 
     defaults: ["enabled_on_p_and_later"],
diff --git a/guest/hals/health/android.hardware.health-service.cuttlefish.xml b/guest/hals/health/android.hardware.health-service.cuttlefish.xml
index 98026cb..1fe9b8d 100644
--- a/guest/hals/health/android.hardware.health-service.cuttlefish.xml
+++ b/guest/hals/health/android.hardware.health-service.cuttlefish.xml
@@ -1,7 +1,7 @@
 <manifest version="1.0" type="device">
     <hal format="aidl">
         <name>android.hardware.health</name>
-        <version>1</version>
+        <version>2</version>
         <fqname>IHealth/default</fqname>
     </hal>
 </manifest>
diff --git a/guest/hals/identity/Android.bp b/guest/hals/identity/Android.bp
index c0142ee..335ee21 100644
--- a/guest/hals/identity/Android.bp
+++ b/guest/hals/identity/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/hals/identity/libeic/EicOpsImpl.cc b/guest/hals/identity/libeic/EicOpsImpl.cc
index 0921c72..1bd54ce 100644
--- a/guest/hals/identity/libeic/EicOpsImpl.cc
+++ b/guest/hals/identity/libeic/EicOpsImpl.cc
@@ -470,7 +470,7 @@
 }
 
 bool eicOpsEcdh(const uint8_t publicKey[EIC_P256_PUB_KEY_SIZE],
-                const uint8_t privateKey[EIC_P256_PUB_KEY_SIZE],
+                const uint8_t privateKey[EIC_P256_PRIV_KEY_SIZE],
                 uint8_t sharedSecret[EIC_P256_COORDINATE_SIZE]) {
   vector<uint8_t> pubKeyVec(EIC_P256_PUB_KEY_SIZE + 1);
   pubKeyVec[0] = 0x04;
diff --git a/guest/hals/keymaster/remote/remote_keymaster.cpp b/guest/hals/keymaster/remote/remote_keymaster.cpp
index daf5956..c31793f 100644
--- a/guest/hals/keymaster/remote/remote_keymaster.cpp
+++ b/guest/hals/keymaster/remote/remote_keymaster.cpp
@@ -22,7 +22,7 @@
 
 namespace keymaster {
 
-RemoteKeymaster::RemoteKeymaster(cuttlefish::KeymasterChannel* channel)
+RemoteKeymaster::RemoteKeymaster(cuttlefish::SharedFdKeymasterChannel* channel)
     : channel_(channel) {}
 
 RemoteKeymaster::~RemoteKeymaster() {
diff --git a/guest/hals/keymaster/remote/remote_keymaster.h b/guest/hals/keymaster/remote/remote_keymaster.h
index 13c428f..d0dc8a1 100644
--- a/guest/hals/keymaster/remote/remote_keymaster.h
+++ b/guest/hals/keymaster/remote/remote_keymaster.h
@@ -19,59 +19,71 @@
 
 #include <keymaster/android_keymaster_messages.h>
 
-#include "common/libs/security/keymaster_channel.h"
+#include "common/libs/security/keymaster_channel_sharedfd.h"
 
 namespace keymaster {
 
 class RemoteKeymaster {
   private:
-    cuttlefish::KeymasterChannel* channel_;
+   cuttlefish::SharedFdKeymasterChannel* channel_;
 
-    void ForwardCommand(
-        AndroidKeymasterCommand command, const Serializable& req, KeymasterResponse* rsp);
+   void ForwardCommand(AndroidKeymasterCommand command, const Serializable& req,
+                       KeymasterResponse* rsp);
+
   public:
-    RemoteKeymaster(cuttlefish::KeymasterChannel*);
-    ~RemoteKeymaster();
-    bool Initialize();
-    void GetVersion(const GetVersionRequest& request, GetVersionResponse* response);
-    void SupportedAlgorithms(const SupportedAlgorithmsRequest& request,
-                             SupportedAlgorithmsResponse* response);
-    void SupportedBlockModes(const SupportedBlockModesRequest& request,
-                             SupportedBlockModesResponse* response);
-    void SupportedPaddingModes(const SupportedPaddingModesRequest& request,
-                               SupportedPaddingModesResponse* response);
-    void SupportedDigests(const SupportedDigestsRequest& request,
-                          SupportedDigestsResponse* response);
-    void SupportedImportFormats(const SupportedImportFormatsRequest& request,
-                                SupportedImportFormatsResponse* response);
-    void SupportedExportFormats(const SupportedExportFormatsRequest& request,
-                                SupportedExportFormatsResponse* response);
-    void AddRngEntropy(const AddEntropyRequest& request, AddEntropyResponse* response);
-    void Configure(const ConfigureRequest& request, ConfigureResponse* response);
-    void GenerateKey(const GenerateKeyRequest& request, GenerateKeyResponse* response);
-    void GetKeyCharacteristics(const GetKeyCharacteristicsRequest& request,
-                               GetKeyCharacteristicsResponse* response);
-    void ImportKey(const ImportKeyRequest& request, ImportKeyResponse* response);
-    void ImportWrappedKey(const ImportWrappedKeyRequest& request,
-                          ImportWrappedKeyResponse* response);
-    void ExportKey(const ExportKeyRequest& request, ExportKeyResponse* response);
-    void AttestKey(const AttestKeyRequest& request, AttestKeyResponse* response);
-    void UpgradeKey(const UpgradeKeyRequest& request, UpgradeKeyResponse* response);
-    void DeleteKey(const DeleteKeyRequest& request, DeleteKeyResponse* response);
-    void DeleteAllKeys(const DeleteAllKeysRequest& request, DeleteAllKeysResponse* response);
-    void BeginOperation(const BeginOperationRequest& request, BeginOperationResponse* response);
-    void UpdateOperation(const UpdateOperationRequest& request, UpdateOperationResponse* response);
-    void FinishOperation(const FinishOperationRequest& request, FinishOperationResponse* response);
-    void AbortOperation(const AbortOperationRequest& request, AbortOperationResponse* response);
-    GetHmacSharingParametersResponse GetHmacSharingParameters();
-    ComputeSharedHmacResponse ComputeSharedHmac(const ComputeSharedHmacRequest& request);
-    VerifyAuthorizationResponse VerifyAuthorization(const VerifyAuthorizationRequest& request);
-    DeviceLockedResponse DeviceLocked(const DeviceLockedRequest& request);
-    EarlyBootEndedResponse EarlyBootEnded();
+   RemoteKeymaster(cuttlefish::SharedFdKeymasterChannel*);
+   ~RemoteKeymaster();
+   bool Initialize();
+   void GetVersion(const GetVersionRequest& request,
+                   GetVersionResponse* response);
+   void SupportedAlgorithms(const SupportedAlgorithmsRequest& request,
+                            SupportedAlgorithmsResponse* response);
+   void SupportedBlockModes(const SupportedBlockModesRequest& request,
+                            SupportedBlockModesResponse* response);
+   void SupportedPaddingModes(const SupportedPaddingModesRequest& request,
+                              SupportedPaddingModesResponse* response);
+   void SupportedDigests(const SupportedDigestsRequest& request,
+                         SupportedDigestsResponse* response);
+   void SupportedImportFormats(const SupportedImportFormatsRequest& request,
+                               SupportedImportFormatsResponse* response);
+   void SupportedExportFormats(const SupportedExportFormatsRequest& request,
+                               SupportedExportFormatsResponse* response);
+   void AddRngEntropy(const AddEntropyRequest& request,
+                      AddEntropyResponse* response);
+   void Configure(const ConfigureRequest& request, ConfigureResponse* response);
+   void GenerateKey(const GenerateKeyRequest& request,
+                    GenerateKeyResponse* response);
+   void GetKeyCharacteristics(const GetKeyCharacteristicsRequest& request,
+                              GetKeyCharacteristicsResponse* response);
+   void ImportKey(const ImportKeyRequest& request, ImportKeyResponse* response);
+   void ImportWrappedKey(const ImportWrappedKeyRequest& request,
+                         ImportWrappedKeyResponse* response);
+   void ExportKey(const ExportKeyRequest& request, ExportKeyResponse* response);
+   void AttestKey(const AttestKeyRequest& request, AttestKeyResponse* response);
+   void UpgradeKey(const UpgradeKeyRequest& request,
+                   UpgradeKeyResponse* response);
+   void DeleteKey(const DeleteKeyRequest& request, DeleteKeyResponse* response);
+   void DeleteAllKeys(const DeleteAllKeysRequest& request,
+                      DeleteAllKeysResponse* response);
+   void BeginOperation(const BeginOperationRequest& request,
+                       BeginOperationResponse* response);
+   void UpdateOperation(const UpdateOperationRequest& request,
+                        UpdateOperationResponse* response);
+   void FinishOperation(const FinishOperationRequest& request,
+                        FinishOperationResponse* response);
+   void AbortOperation(const AbortOperationRequest& request,
+                       AbortOperationResponse* response);
+   GetHmacSharingParametersResponse GetHmacSharingParameters();
+   ComputeSharedHmacResponse ComputeSharedHmac(
+       const ComputeSharedHmacRequest& request);
+   VerifyAuthorizationResponse VerifyAuthorization(
+       const VerifyAuthorizationRequest& request);
+   DeviceLockedResponse DeviceLocked(const DeviceLockedRequest& request);
+   EarlyBootEndedResponse EarlyBootEnded();
 
-    // CF HAL and remote sides are always compiled together, so will never disagree about message
-    // versions.
-    uint32_t message_version() { return kDefaultMessageVersion; }
+   // CF HAL and remote sides are always compiled together, so will never
+   // disagree about message versions.
+   uint32_t message_version() { return kDefaultMessageVersion; }
 };
 
 }  // namespace keymaster
diff --git a/guest/hals/keymaster/remote/service4.cpp b/guest/hals/keymaster/remote/service4.cpp
index 47f6261..24727dc 100644
--- a/guest/hals/keymaster/remote/service4.cpp
+++ b/guest/hals/keymaster/remote/service4.cpp
@@ -45,7 +45,7 @@
                 << fd->StrError();
   }
 
-  cuttlefish::KeymasterChannel keymasterChannel(fd, fd);
+  cuttlefish::SharedFdKeymasterChannel keymasterChannel(fd, fd);
 
   auto remoteKeymaster = new keymaster::RemoteKeymaster(&keymasterChannel);
 
diff --git a/guest/hals/keymint/remote/Android.bp b/guest/hals/keymint/remote/Android.bp
index 4944d37..748a0ec 100644
--- a/guest/hals/keymint/remote/Android.bp
+++ b/guest/hals/keymint/remote/Android.bp
@@ -32,6 +32,7 @@
         "-Wextra",
     ],
     shared_libs: [
+        "android.hardware.security.rkp-V3-ndk",
         "android.hardware.security.secureclock-V1-ndk",
         "android.hardware.security.sharedsecret-V1-ndk",
         "lib_android_keymaster_keymint_utils",
@@ -61,7 +62,6 @@
         "keymint_use_latest_hal_aidl_ndk_shared",
     ],
     required: [
-        "RemoteProvisioner",
         "android.hardware.hardware_keystore.remote-keymint.xml",
     ],
 }
diff --git a/guest/hals/keymint/remote/android.hardware.hardware_keystore.remote-keymint.xml b/guest/hals/keymint/remote/android.hardware.hardware_keystore.remote-keymint.xml
index 2ebf1fe..4c75596 100644
--- a/guest/hals/keymint/remote/android.hardware.hardware_keystore.remote-keymint.xml
+++ b/guest/hals/keymint/remote/android.hardware.hardware_keystore.remote-keymint.xml
@@ -14,5 +14,5 @@
      limitations under the License.
 -->
 <permissions>
-  <feature name="android.hardware.hardware_keystore" version="200" />
+  <feature name="android.hardware.hardware_keystore" version="300" />
 </permissions>
diff --git a/guest/hals/keymint/remote/android.hardware.security.keymint-service.remote.xml b/guest/hals/keymint/remote/android.hardware.security.keymint-service.remote.xml
index a4d0302..0568ae6 100644
--- a/guest/hals/keymint/remote/android.hardware.security.keymint-service.remote.xml
+++ b/guest/hals/keymint/remote/android.hardware.security.keymint-service.remote.xml
@@ -1,12 +1,12 @@
 <manifest version="1.0" type="device">
     <hal format="aidl">
         <name>android.hardware.security.keymint</name>
-        <version>2</version>
+        <version>3</version>
         <fqname>IKeyMintDevice/default</fqname>
     </hal>
     <hal format="aidl">
         <name>android.hardware.security.keymint</name>
-        <version>2</version>
+        <version>3</version>
         <fqname>IRemotelyProvisionedComponent/default</fqname>
     </hal>
 </manifest>
diff --git a/guest/hals/keymint/remote/remote_keymaster.cpp b/guest/hals/keymint/remote/remote_keymaster.cpp
index 763c139..b07cfa5 100644
--- a/guest/hals/keymint/remote/remote_keymaster.cpp
+++ b/guest/hals/keymint/remote/remote_keymaster.cpp
@@ -24,7 +24,7 @@
 
 namespace keymaster {
 
-RemoteKeymaster::RemoteKeymaster(cuttlefish::KeymasterChannel* channel,
+RemoteKeymaster::RemoteKeymaster(cuttlefish::SharedFdKeymasterChannel* channel,
                                  int32_t message_version)
     : channel_(channel), message_version_(message_version) {}
 
@@ -111,6 +111,64 @@
     }
   }
 
+  // Pass attestation IDs to the remote KM implementation.
+  // Skip MEID as it is not present on emulators.
+  SetAttestationIdsKM3Request requestKM3(message_version());
+
+  static constexpr char brand_prop_name[] = "ro.product.brand";
+  static constexpr char device_prop_name[] = "ro.product.device";
+  static constexpr char product_prop_name[] = "ro.product.name";
+  static constexpr char serial_prop_name[] = "ro.serialno";
+  static constexpr char manufacturer_prop_name[] = "ro.product.manufacturer";
+  static constexpr char model_prop_name[] = "ro.product.model";
+
+  std::string brand_prop_value =
+      android::base::GetProperty(brand_prop_name, "");
+  std::string device_prop_value =
+      android::base::GetProperty(device_prop_name, "");
+  std::string product_prop_value =
+      android::base::GetProperty(product_prop_name, "");
+  std::string serial_prop_value =
+      android::base::GetProperty(serial_prop_name, "");
+  std::string manufacturer_prop_value =
+      android::base::GetProperty(manufacturer_prop_name, "");
+  std::string model_prop_value =
+      android::base::GetProperty(model_prop_name, "");
+
+  // Currently modem_simulator always returns one fixed value. See
+  // `handleGetIMEI` in
+  // device/google/cuttlefish/host/commands/modem_simulator/misc_service.cpp for
+  // more details.
+  // This should be updated once the below bug is fixed -
+  // b/263188546 - Use device-specific IMEI values rather than one hardcoded
+  // value.
+  std::string imei_value = "867400022047199";
+  requestKM3.base.imei.Reinitialize(imei_value.data(), imei_value.size());
+
+  requestKM3.base.brand.Reinitialize(brand_prop_value.data(),
+                                     brand_prop_value.size());
+  requestKM3.base.device.Reinitialize(device_prop_value.data(),
+                                      device_prop_value.size());
+  requestKM3.base.product.Reinitialize(product_prop_value.data(),
+                                       product_prop_value.size());
+  requestKM3.base.serial.Reinitialize(serial_prop_value.data(),
+                                      serial_prop_value.size());
+  requestKM3.base.manufacturer.Reinitialize(manufacturer_prop_value.data(),
+                                            manufacturer_prop_value.size());
+  requestKM3.base.model.Reinitialize(model_prop_value.data(),
+                                     model_prop_value.size());
+
+  std::string second_imei_value = "867400022047199";
+  requestKM3.second_imei.Reinitialize(second_imei_value.data(),
+                                      second_imei_value.size());
+
+  SetAttestationIdsKM3Response responseKM3 = SetAttestationIdsKM3(requestKM3);
+  if (responseKM3.error != KM_ERROR_OK) {
+    LOG(ERROR) << "Failed to configure keymaster attestation IDs: "
+               << responseKM3.error;
+    return false;
+  }
+
   return true;
 }
 
@@ -187,6 +245,11 @@
   ForwardCommand(GENERATE_CSR, request, response);
 }
 
+void RemoteKeymaster::GenerateCsrV2(const GenerateCsrV2Request& request,
+                                    GenerateCsrV2Response* response) {
+  ForwardCommand(GENERATE_CSR_V2, request, response);
+}
+
 void RemoteKeymaster::GetKeyCharacteristics(
     const GetKeyCharacteristicsRequest& request,
     GetKeyCharacteristicsResponse* response) {
@@ -319,4 +382,26 @@
   return response;
 }
 
+GetHwInfoResponse RemoteKeymaster::GetHwInfo() {
+  // Unused empty buffer to allow ForwardCommand to have something to serialize
+  Buffer request;
+  GetHwInfoResponse response(message_version());
+  ForwardCommand(GET_HW_INFO, request, &response);
+  return response;
+}
+
+SetAttestationIdsResponse RemoteKeymaster::SetAttestationIds(
+    const SetAttestationIdsRequest& request) {
+  SetAttestationIdsResponse response(message_version());
+  ForwardCommand(SET_ATTESTATION_IDS, request, &response);
+  return response;
+}
+
+SetAttestationIdsKM3Response RemoteKeymaster::SetAttestationIdsKM3(
+    const SetAttestationIdsKM3Request& request) {
+  SetAttestationIdsKM3Response response(message_version());
+  ForwardCommand(SET_ATTESTATION_IDS_KM3, request, &response);
+  return response;
+}
+
 }  // namespace keymaster
diff --git a/guest/hals/keymint/remote/remote_keymaster.h b/guest/hals/keymint/remote/remote_keymaster.h
index 2e0668f..e920334 100644
--- a/guest/hals/keymint/remote/remote_keymaster.h
+++ b/guest/hals/keymint/remote/remote_keymaster.h
@@ -19,20 +19,20 @@
 
 #include <keymaster/android_keymaster_messages.h>
 
-#include "common/libs/security/keymaster_channel.h"
+#include "common/libs/security/keymaster_channel_sharedfd.h"
 
 namespace keymaster {
 
 class RemoteKeymaster {
  private:
-  cuttlefish::KeymasterChannel* channel_;
+  cuttlefish::SharedFdKeymasterChannel* channel_;
   const int32_t message_version_;
 
   void ForwardCommand(AndroidKeymasterCommand command, const Serializable& req,
                       KeymasterResponse* rsp);
 
  public:
-  RemoteKeymaster(cuttlefish::KeymasterChannel*,
+  RemoteKeymaster(cuttlefish::SharedFdKeymasterChannel*,
                   int32_t message_version = kDefaultMessageVersion);
   ~RemoteKeymaster();
   bool Initialize();
@@ -59,6 +59,8 @@
                       GenerateRkpKeyResponse* response);
   void GenerateCsr(const GenerateCsrRequest& request,
                    GenerateCsrResponse* response);
+  void GenerateCsrV2(const GenerateCsrV2Request& request,
+                     GenerateCsrV2Response* response);
   void GetKeyCharacteristics(const GetKeyCharacteristicsRequest& request,
                              GetKeyCharacteristicsResponse* response);
   void ImportKey(const ImportKeyRequest& request, ImportKeyResponse* response);
@@ -95,6 +97,11 @@
   void GenerateTimestampToken(GenerateTimestampTokenRequest& request,
                               GenerateTimestampTokenResponse* response);
   GetRootOfTrustResponse GetRootOfTrust(const GetRootOfTrustRequest& request);
+  GetHwInfoResponse GetHwInfo();
+  SetAttestationIdsResponse SetAttestationIds(
+      const SetAttestationIdsRequest& request);
+  SetAttestationIdsKM3Response SetAttestationIdsKM3(
+      const SetAttestationIdsKM3Request& request);
 
   // CF HAL and remote sides are always compiled together, so will never
   // disagree about message versions.
diff --git a/guest/hals/keymint/remote/remote_keymint_device.cpp b/guest/hals/keymint/remote/remote_keymint_device.cpp
index c6db283..0fc9c5d 100644
--- a/guest/hals/keymint/remote/remote_keymint_device.cpp
+++ b/guest/hals/keymint/remote/remote_keymint_device.cpp
@@ -97,6 +97,7 @@
       case KM_TAG_ATTESTATION_ID_BRAND:
       case KM_TAG_ATTESTATION_ID_DEVICE:
       case KM_TAG_ATTESTATION_ID_IMEI:
+      case KM_TAG_ATTESTATION_ID_SECOND_IMEI:
       case KM_TAG_ATTESTATION_ID_MANUFACTURER:
       case KM_TAG_ATTESTATION_ID_MEID:
       case KM_TAG_ATTESTATION_ID_MODEL:
diff --git a/guest/hals/keymint/remote/remote_keymint_operation.cpp b/guest/hals/keymint/remote/remote_keymint_operation.cpp
index b88715a..59624a5 100644
--- a/guest/hals/keymint/remote/remote_keymint_operation.cpp
+++ b/guest/hals/keymint/remote/remote_keymint_operation.cpp
@@ -50,13 +50,17 @@
 }
 
 ScopedAStatus RemoteKeyMintOperation::updateAad(
-    const vector<uint8_t>& input,
-    const optional<HardwareAuthToken>& /* authToken */,
+    const vector<uint8_t>& input, const optional<HardwareAuthToken>& authToken,
     const optional<TimeStampToken>& /* timestampToken */) {
   UpdateOperationRequest request(impl_.message_version());
   request.op_handle = opHandle_;
   request.additional_params.push_back(TAG_ASSOCIATED_DATA, input.data(),
                                       input.size());
+  if (authToken) {
+    auto tokenAsVec(authToken2AidlVec(*authToken));
+    request.additional_params.push_back(keymaster::TAG_AUTH_TOKEN,
+                                        tokenAsVec.data(), tokenAsVec.size());
+  }
 
   UpdateOperationResponse response(impl_.message_version());
   impl_.UpdateOperation(request, &response);
@@ -65,8 +69,7 @@
 }
 
 ScopedAStatus RemoteKeyMintOperation::update(
-    const vector<uint8_t>& input,
-    const optional<HardwareAuthToken>& /* authToken */,
+    const vector<uint8_t>& input, const optional<HardwareAuthToken>& authToken,
     const optional<TimeStampToken>&
     /* timestampToken */,
     vector<uint8_t>* output) {
@@ -75,6 +78,11 @@
   UpdateOperationRequest request(impl_.message_version());
   request.op_handle = opHandle_;
   request.input.Reinitialize(input.data(), input.size());
+  if (authToken) {
+    auto tokenAsVec(authToken2AidlVec(*authToken));
+    request.additional_params.push_back(keymaster::TAG_AUTH_TOKEN,
+                                        tokenAsVec.data(), tokenAsVec.size());
+  }
 
   UpdateOperationResponse response(impl_.message_version());
   impl_.UpdateOperation(request, &response);
@@ -92,9 +100,9 @@
 ScopedAStatus RemoteKeyMintOperation::finish(
     const optional<vector<uint8_t>>& input,      //
     const optional<vector<uint8_t>>& signature,  //
-    const optional<HardwareAuthToken>& /* authToken */,
+    const optional<HardwareAuthToken>& authToken,
     const optional<TimeStampToken>& /* timestampToken */,
-    const optional<vector<uint8_t>>& /* confirmationToken */,
+    const optional<vector<uint8_t>>& confirmationToken,
     vector<uint8_t>* output) {
   if (!output) {
     return ScopedAStatus(AStatus_fromServiceSpecificError(
@@ -104,8 +112,19 @@
   FinishOperationRequest request(impl_.message_version());
   request.op_handle = opHandle_;
   if (input) request.input.Reinitialize(input->data(), input->size());
-  if (signature)
+  if (signature) {
     request.signature.Reinitialize(signature->data(), signature->size());
+  }
+  if (authToken) {
+    auto tokenAsVec(authToken2AidlVec(*authToken));
+    request.additional_params.push_back(keymaster::TAG_AUTH_TOKEN,
+                                        tokenAsVec.data(), tokenAsVec.size());
+  }
+  if (confirmationToken) {
+    request.additional_params.push_back(keymaster::TAG_CONFIRMATION_TOKEN,
+                                        confirmationToken->data(),
+                                        confirmationToken->size());
+  }
 
   FinishOperationResponse response(impl_.message_version());
   impl_.FinishOperation(request, &response);
diff --git a/guest/hals/keymint/remote/remote_remotely_provisioned_component.cpp b/guest/hals/keymint/remote/remote_remotely_provisioned_component.cpp
index 4098362..f5815ad 100644
--- a/guest/hals/keymint/remote/remote_remotely_provisioned_component.cpp
+++ b/guest/hals/keymint/remote/remote_remotely_provisioned_component.cpp
@@ -36,14 +36,15 @@
 using namespace keymaster;
 
 using ::aidl::android::hardware::security::keymint::km_utils::kmBlob2vector;
+using ::aidl::android::hardware::security::keymint::km_utils::
+    kmError2ScopedAStatus;
 using ::ndk::ScopedAStatus;
 
 // Error codes from the provisioning stack are negated.
 ndk::ScopedAStatus toKeymasterError(const KeymasterResponse& response) {
   auto error =
       static_cast<keymaster_error_t>(-static_cast<int32_t>(response.error));
-  return ::aidl::android::hardware::security::keymint::km_utils::
-      kmError2ScopedAStatus(error);
+  return kmError2ScopedAStatus(error);
 }
 
 }  // namespace
@@ -54,10 +55,16 @@
 
 ScopedAStatus RemoteRemotelyProvisionedComponent::getHardwareInfo(
     RpcHardwareInfo* info) {
-  info->versionNumber = 2;
-  info->rpcAuthorName = "Google";
-  info->supportedEekCurve = RpcHardwareInfo::CURVE_25519;
-  info->uniqueId = "remote keymint";
+  GetHwInfoResponse response = impl_.GetHwInfo();
+  if (response.error != KM_ERROR_OK) {
+    return toKeymasterError(response);
+  }
+
+  info->versionNumber = response.version;
+  info->rpcAuthorName = response.rpcAuthorName;
+  info->supportedEekCurve = response.supportedEekCurve;
+  info->uniqueId = response.uniqueId;
+  info->supportedNumKeysInCsr = response.supportedNumKeysInCsr;
   return ScopedAStatus::ok();
 }
 
@@ -106,4 +113,28 @@
   return ScopedAStatus::ok();
 }
 
+ScopedAStatus RemoteRemotelyProvisionedComponent::generateCertificateRequestV2(
+    const std::vector<MacedPublicKey>& keysToSign,
+    const std::vector<uint8_t>& challenge, std::vector<uint8_t>* csr) {
+  GenerateCsrV2Request request(impl_.message_version());
+  if (!request.InitKeysToSign(keysToSign.size())) {
+    return kmError2ScopedAStatus(static_cast<keymaster_error_t>(
+        BnRemotelyProvisionedComponent::STATUS_FAILED));
+  }
+
+  for (size_t i = 0; i < keysToSign.size(); i++) {
+    request.SetKeyToSign(i, keysToSign[i].macedKey.data(),
+                         keysToSign[i].macedKey.size());
+  }
+  request.SetChallenge(challenge.data(), challenge.size());
+  GenerateCsrV2Response response(impl_.message_version());
+  impl_.GenerateCsrV2(request, &response);
+
+  if (response.error != KM_ERROR_OK) {
+    return toKeymasterError(response);
+  }
+  *csr = km_utils::kmBlob2vector(response.csr);
+  return ScopedAStatus::ok();
+}
+
 }  // namespace aidl::android::hardware::security::keymint
diff --git a/guest/hals/keymint/remote/remote_remotely_provisioned_component.h b/guest/hals/keymint/remote/remote_remotely_provisioned_component.h
index 35e182a..30be9e8 100644
--- a/guest/hals/keymint/remote/remote_remotely_provisioned_component.h
+++ b/guest/hals/keymint/remote/remote_remotely_provisioned_component.h
@@ -47,6 +47,11 @@
       ProtectedData* protectedData,
       std::vector<uint8_t>* keysToSignMac) override;
 
+  ndk::ScopedAStatus generateCertificateRequestV2(
+      const std::vector<MacedPublicKey>& keysToSign,
+      const std::vector<uint8_t>& challenge,
+      std::vector<uint8_t>* csr) override;
+
  private:
   keymaster::RemoteKeymaster& impl_;
 };
diff --git a/guest/hals/keymint/remote/service.cpp b/guest/hals/keymint/remote/service.cpp
index 1fb0143..7ecbf74 100644
--- a/guest/hals/keymint/remote/service.cpp
+++ b/guest/hals/keymint/remote/service.cpp
@@ -89,11 +89,11 @@
                << " a raw terminal: " << fd->StrError();
   }
 
-  cuttlefish::KeymasterChannel keymasterChannel(fd, fd);
+  cuttlefish::SharedFdKeymasterChannel keymasterChannel(fd, fd);
 
   keymaster::RemoteKeymaster remote_keymaster(
       &keymasterChannel, keymaster::MessageVersion(
-                             keymaster::KmVersion::KEYMINT_2, 0 /* km_date */));
+                             keymaster::KmVersion::KEYMINT_3, 0 /* km_date */));
 
   if (!remote_keymaster.Initialize()) {
     LOG(FATAL) << "Could not initialize keymaster";
diff --git a/guest/hals/keymint/rust/Android.bp b/guest/hals/keymint/rust/Android.bp
new file mode 100644
index 0000000..c073d0a
--- /dev/null
+++ b/guest/hals/keymint/rust/Android.bp
@@ -0,0 +1,50 @@
+//
+// Copyright (C) 2022 The Android Open-Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_binary {
+    name: "android.hardware.security.keymint-service.rust",
+    relative_install_path: "hw",
+    init_rc: ["android.hardware.security.keymint-service.rust.rc"],
+    vintf_fragments: [
+        "android.hardware.security.keymint-service.rust.xml",
+        "android.hardware.security.secureclock-service.rust.xml",
+        "android.hardware.security.sharedsecret-service.rust.xml",
+    ],
+    vendor: true,
+    srcs: ["src/keymint_hal_main.rs"],
+    rustlibs: [
+        "libandroid_logger",
+        "libbinder_rs",
+        "libhex",
+        "libkmr_wire",
+        "libkmr_hal",
+        "liblibc",
+        "liblog_rust",
+    ],
+    required: [
+        "android.hardware.hardware_keystore.rust-keymint.xml",
+    ],
+}
+
+prebuilt_etc {
+    name: "android.hardware.hardware_keystore.rust-keymint.xml",
+    sub_dir: "permissions",
+    vendor: true,
+    src: "android.hardware.hardware_keystore.rust-keymint.xml",
+}
diff --git a/guest/hals/keymint/rust/android.hardware.hardware_keystore.rust-keymint.xml b/guest/hals/keymint/rust/android.hardware.hardware_keystore.rust-keymint.xml
new file mode 100644
index 0000000..4c75596
--- /dev/null
+++ b/guest/hals/keymint/rust/android.hardware.hardware_keystore.rust-keymint.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<permissions>
+  <feature name="android.hardware.hardware_keystore" version="300" />
+</permissions>
diff --git a/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.rc b/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.rc
new file mode 100644
index 0000000..5c6d1f8
--- /dev/null
+++ b/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.rc
@@ -0,0 +1,6 @@
+service vendor.keymint-rust /vendor/bin/hw/android.hardware.security.keymint-service.rust
+    class early_hal
+    user nobody
+    # The keymint service is not allowed to restart.
+    # If it crashes, a device restart is required.
+    oneshot
diff --git a/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.xml b/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.xml
new file mode 100644
index 0000000..0568ae6
--- /dev/null
+++ b/guest/hals/keymint/rust/android.hardware.security.keymint-service.rust.xml
@@ -0,0 +1,12 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.security.keymint</name>
+        <version>3</version>
+        <fqname>IKeyMintDevice/default</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.security.keymint</name>
+        <version>3</version>
+        <fqname>IRemotelyProvisionedComponent/default</fqname>
+    </hal>
+</manifest>
diff --git a/guest/hals/keymint/rust/android.hardware.security.secureclock-service.rust.xml b/guest/hals/keymint/rust/android.hardware.security.secureclock-service.rust.xml
new file mode 100644
index 0000000..c0ff775
--- /dev/null
+++ b/guest/hals/keymint/rust/android.hardware.security.secureclock-service.rust.xml
@@ -0,0 +1,6 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.security.secureclock</name>
+        <fqname>ISecureClock/default</fqname>
+    </hal>
+</manifest>
diff --git a/guest/hals/keymint/rust/android.hardware.security.sharedsecret-service.rust.xml b/guest/hals/keymint/rust/android.hardware.security.sharedsecret-service.rust.xml
new file mode 100644
index 0000000..d37981f
--- /dev/null
+++ b/guest/hals/keymint/rust/android.hardware.security.sharedsecret-service.rust.xml
@@ -0,0 +1,6 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.security.sharedsecret</name>
+        <fqname>ISharedSecret/default</fqname>
+    </hal>
+</manifest>
diff --git a/guest/hals/keymint/rust/src/keymint_hal_main.rs b/guest/hals/keymint/rust/src/keymint_hal_main.rs
new file mode 100644
index 0000000..08709b7
--- /dev/null
+++ b/guest/hals/keymint/rust/src/keymint_hal_main.rs
@@ -0,0 +1,242 @@
+// Copyright 2021, The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! This crate implements the KeyMint HAL service in Rust, communicating with a Rust
+//! trusted application (TA) running on the Cuttlefish host.
+
+use kmr_hal::env::get_property;
+use log::{debug, error, info};
+use std::ops::DerefMut;
+use std::os::unix::io::FromRawFd;
+use std::panic;
+use std::sync::{Arc, Mutex};
+
+/// Device file used to communicate with the KeyMint TA.
+static DEVICE_FILE_NAME: &str = "/dev/hvc3";
+
+/// Name of KeyMint binder device instance.
+static SERVICE_INSTANCE: &str = "default";
+
+static KM_SERVICE_NAME: &str = "android.hardware.security.keymint.IKeyMintDevice";
+static RPC_SERVICE_NAME: &str = "android.hardware.security.keymint.IRemotelyProvisionedComponent";
+static CLOCK_SERVICE_NAME: &str = "android.hardware.security.secureclock.ISecureClock";
+static SECRET_SERVICE_NAME: &str = "android.hardware.security.sharedsecret.ISharedSecret";
+
+/// Local error type for failures in the HAL service.
+#[derive(Debug, Clone)]
+struct HalServiceError(String);
+
+/// Read-write file used for communication with host TA.
+#[derive(Debug)]
+struct FileChannel(std::fs::File);
+
+impl kmr_hal::SerializedChannel for FileChannel {
+    const MAX_SIZE: usize = kmr_wire::DEFAULT_MAX_SIZE;
+
+    fn execute(&mut self, serialized_req: &[u8]) -> binder::Result<Vec<u8>> {
+        kmr_hal::write_msg(&mut self.0, serialized_req)?;
+        kmr_hal::read_msg(&mut self.0)
+    }
+}
+
+/// Set 'raw' mode for the given file descriptor.
+fn set_terminal_raw(fd: libc::c_int) -> Result<(), HalServiceError> {
+    let mut settings: libc::termios = unsafe { std::mem::zeroed() };
+    let result = unsafe { libc::tcgetattr(fd, &mut settings) };
+    if result < 0 {
+        return Err(HalServiceError(format!(
+            "Failed to get terminal attributes for {}: {:?}",
+            fd,
+            std::io::Error::last_os_error()
+        )));
+    }
+
+    let result = unsafe {
+        libc::cfmakeraw(&mut settings);
+        libc::tcsetattr(fd, libc::TCSANOW, &settings)
+    };
+    if result < 0 {
+        return Err(HalServiceError(format!(
+            "Failed to set terminal attributes for {}: {:?}",
+            fd,
+            std::io::Error::last_os_error()
+        )));
+    }
+    Ok(())
+}
+
+fn main() {
+    if let Err(e) = inner_main() {
+        panic!("HAL service failed: {:?}", e);
+    }
+}
+
+fn inner_main() -> Result<(), HalServiceError> {
+    // Initialize android logging.
+    android_logger::init_once(
+        android_logger::Config::default()
+            .with_tag("keymint-hal")
+            .with_min_level(log::Level::Info)
+            .with_log_id(android_logger::LogId::System),
+    );
+    // Redirect panic messages to logcat.
+    panic::set_hook(Box::new(|panic_info| {
+        error!("{}", panic_info);
+    }));
+
+    info!("KeyMint HAL service is starting.");
+
+    info!("Starting thread pool now.");
+    binder::ProcessState::start_thread_pool();
+
+    // Create a connection to the TA.
+    let path = std::ffi::CString::new(DEVICE_FILE_NAME).unwrap();
+    let fd = unsafe { libc::open(path.as_ptr(), libc::O_RDWR) };
+    if fd < 0 {
+        return Err(HalServiceError(format!(
+            "Failed to open device file '{}': {:?}",
+            DEVICE_FILE_NAME,
+            std::io::Error::last_os_error()
+        )));
+    }
+    set_terminal_raw(fd)?;
+    let channel = Arc::new(Mutex::new(FileChannel(unsafe { std::fs::File::from_raw_fd(fd) })));
+
+    let km_service = kmr_hal::keymint::Device::new_as_binder(channel.clone());
+    let service_name = format!("{}/{}", KM_SERVICE_NAME, SERVICE_INSTANCE);
+    binder::add_service(&service_name, km_service.as_binder()).map_err(|e| {
+        HalServiceError(format!("Failed to register service {} because of {:?}.", service_name, e))
+    })?;
+
+    let rpc_service = kmr_hal::rpc::Device::new_as_binder(channel.clone());
+    let service_name = format!("{}/{}", RPC_SERVICE_NAME, SERVICE_INSTANCE);
+    binder::add_service(&service_name, rpc_service.as_binder()).map_err(|e| {
+        HalServiceError(format!("Failed to register service {} because of {:?}.", service_name, e))
+    })?;
+
+    let clock_service = kmr_hal::secureclock::Device::new_as_binder(channel.clone());
+    let service_name = format!("{}/{}", CLOCK_SERVICE_NAME, SERVICE_INSTANCE);
+    binder::add_service(&service_name, clock_service.as_binder()).map_err(|e| {
+        HalServiceError(format!("Failed to register service {} because of {:?}.", service_name, e))
+    })?;
+
+    let secret_service = kmr_hal::sharedsecret::Device::new_as_binder(channel.clone());
+    let service_name = format!("{}/{}", SECRET_SERVICE_NAME, SERVICE_INSTANCE);
+    binder::add_service(&service_name, secret_service.as_binder()).map_err(|e| {
+        HalServiceError(format!("Failed to register service {} because of {:?}.", service_name, e))
+    })?;
+
+    info!("Successfully registered KeyMint HAL services.");
+
+    // Let the TA know information about the boot environment. In a real device this
+    // is communicated directly from the bootloader to the TA, but here we retrieve
+    // the information from system properties and send from the HAL service.
+    // TODO: investigate Cuttlefish bootloader info propagation
+    // https://android.googlesource.com/platform/external/u-boot/+/2114f87e56d262220c4dc5e00c3321e99e12204b/boot/android_bootloader_keymint.c
+    let boot_req = get_boot_info();
+    debug!("boot/HAL->TA: boot info is {:?}", boot_req);
+    kmr_hal::send_boot_info(channel.lock().unwrap().deref_mut(), boot_req)
+        .map_err(|e| HalServiceError(format!("Failed to send boot info: {:?}", e)))?;
+
+    // Let the TA know information about the userspace environment.
+    if let Err(e) = kmr_hal::send_hal_info(channel.lock().unwrap().deref_mut()) {
+        error!("Failed to send HAL info: {:?}", e);
+    }
+
+    // Let the TA know about attestation IDs. (In a real device these would be pre-provisioned into
+    // the TA.)
+    let attest_ids = attestation_id_info();
+    if let Err(e) = kmr_hal::send_attest_ids(channel.lock().unwrap().deref_mut(), attest_ids) {
+        error!("Failed to send attestation ID info: {:?}", e);
+    }
+
+    info!("Joining thread pool now.");
+    binder::ProcessState::join_thread_pool();
+    info!("KeyMint HAL service is terminating.");
+    Ok(())
+}
+
+/// Populate attestation ID information based on properties (where available).
+fn attestation_id_info() -> kmr_wire::AttestationIdInfo {
+    let prop = |name| {
+        get_property(name).unwrap_or_else(|_| format!("{} unavailable", name)).as_bytes().to_vec()
+    };
+    kmr_wire::AttestationIdInfo {
+        brand: prop("ro.product.brand"),
+        device: prop("ro.product.device"),
+        product: prop("ro.product.name"),
+        serial: prop("ro.serialno"),
+        manufacturer: prop("ro.product.manufacturer"),
+        model: prop("ro.product.model"),
+        // Currently modem_simulator always returns one fixed value. See `handleGetIMEI` in
+        // device/google/cuttlefish/host/commands/modem_simulator/misc_service.cpp for more details.
+        // TODO(b/263188546): Use device-specific IMEI values when available.
+        imei: b"867400022047199".to_vec(),
+        imei2: b"867400022047199".to_vec(),
+        meid: vec![],
+    }
+}
+
+/// Get boot information based on system properties.
+fn get_boot_info() -> kmr_wire::SetBootInfoRequest {
+    // No access to a verified boot key.
+    let verified_boot_key = vec![0; 32];
+    let vbmeta_digest = get_property("ro.boot.vbmeta.digest").unwrap_or_else(|_| "00".repeat(32));
+    let verified_boot_hash = hex::decode(&vbmeta_digest).unwrap_or_else(|_e| {
+        error!("failed to parse hex data in '{}'", vbmeta_digest);
+        vec![0; 32]
+    });
+    let device_boot_locked = match get_property("ro.boot.vbmeta.device_state")
+        .unwrap_or_else(|_| "no-prop".to_string())
+        .as_str()
+    {
+        "locked" => true,
+        "unlocked" => false,
+        v => {
+            error!("Unknown device_state '{}', treating as unlocked", v);
+            false
+        }
+    };
+    let verified_boot_state = match get_property("ro.boot.verifiedbootstate")
+        .unwrap_or_else(|_| "no-prop".to_string())
+        .as_str()
+    {
+        "green" => 0,  // Verified
+        "yellow" => 1, // SelfSigned
+        "orange" => 2, // Unverified,
+        "red" => 3,    // Failed,
+        v => {
+            error!("Unknown boot state '{}', treating as Unverified", v);
+            2
+        }
+    };
+
+    // Attempt to get the boot patchlevel from a system property.  This requires an SELinux
+    // permission, so fall back to re-using the OS patchlevel if this can't be done.
+    let boot_patchlevel_prop = get_property("ro.vendor.boot_security_patch").unwrap_or_else(|e| {
+        error!("Failed to retrieve boot patchlevel: {:?}", e);
+        get_property(kmr_hal::env::OS_PATCHLEVEL_PROPERTY)
+            .unwrap_or_else(|_| "1970-09-19".to_string())
+    });
+    let boot_patchlevel =
+        kmr_hal::env::extract_patchlevel(&boot_patchlevel_prop).unwrap_or(19700919);
+
+    kmr_wire::SetBootInfoRequest {
+        verified_boot_key,
+        device_boot_locked,
+        verified_boot_state,
+        verified_boot_hash,
+        boot_patchlevel,
+    }
+}
diff --git a/guest/hals/nfc/Android.bp b/guest/hals/nfc/Android.bp
index bf71656..3203e81 100644
--- a/guest/hals/nfc/Android.bp
+++ b/guest/hals/nfc/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/hals/nfc/conf/Android.bp b/guest/hals/nfc/conf/Android.bp
index cc0a2b4..c0ab476 100644
--- a/guest/hals/nfc/conf/Android.bp
+++ b/guest/hals/nfc/conf/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/hals/ril/reference-libril/Android.bp b/guest/hals/ril/reference-libril/Android.bp
index ab5ac03..de07522 100644
--- a/guest/hals/ril/reference-libril/Android.bp
+++ b/guest/hals/ril/reference-libril/Android.bp
@@ -24,6 +24,11 @@
         "-Wno-unused-parameter",
     ],
     srcs: [
+        "RefRadioSim.cpp",
+        "RefRadioModem.cpp",
+        "RefRadioIms.cpp",
+        "RefImsMedia.cpp",
+        "RefImsMediaSession.cpp",
         "RefRadioNetwork.cpp",
         "ril.cpp",
         "RilSapSocket.cpp",
@@ -38,13 +43,16 @@
     ],
     shared_libs: [
         "android.hardware.radio-library.compat",
-        "android.hardware.radio.config-V1-ndk",
-        "android.hardware.radio.data-V1-ndk",
-        "android.hardware.radio.messaging-V1-ndk",
-        "android.hardware.radio.modem-V1-ndk",
-        "android.hardware.radio.network-V1-ndk",
-        "android.hardware.radio.sim-V1-ndk",
-        "android.hardware.radio.voice-V1-ndk",
+        "android.hardware.radio.config-V2-ndk",
+        "android.hardware.radio.data-V2-ndk",
+        "android.hardware.radio.ims-V1-ndk",
+        "android.hardware.radio.ims.media-V1-ndk",
+        "android.hardware.radio.messaging-V2-ndk",
+        "android.hardware.radio.modem-V2-ndk",
+        "android.hardware.radio.network-V2-ndk",
+        "android.hardware.radio.sap-V1-ndk",
+        "android.hardware.radio.sim-V2-ndk",
+        "android.hardware.radio.voice-V2-ndk",
         "android.hardware.radio@1.0",
         "android.hardware.radio@1.1",
         "android.hardware.radio@1.2",
@@ -74,6 +82,6 @@
 filegroup {
     name: "libril-modem-lib-manifests",
     srcs: [
-        "android.hardware.radio@2.0.xml",
+        "android.hardware.radio@2.1.xml",
     ],
 }
diff --git a/guest/hals/ril/reference-libril/RefImsMedia.cpp b/guest/hals/ril/reference-libril/RefImsMedia.cpp
new file mode 100644
index 0000000..41b0360
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefImsMedia.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RefImsMedia.h"
+
+namespace cf::ril {
+
+using ::ndk::ScopedAStatus;
+using namespace ::aidl::android::hardware::radio::ims::media;
+constexpr auto ok = &ScopedAStatus::ok;
+std::shared_ptr<IImsMediaListener> mediaListener;
+
+ScopedAStatus RefImsMedia::setListener(
+        const std::shared_ptr<::aidl::android::hardware::radio::ims::media::IImsMediaListener>&
+                in_mediaListener) {
+    mediaListener = in_mediaListener;
+    return ok();
+}
+ScopedAStatus RefImsMedia::openSession(
+        int32_t in_sessionId,
+        const ::aidl::android::hardware::radio::ims::media::LocalEndPoint& in_localEndPoint,
+        const ::aidl::android::hardware::radio::ims::media::RtpConfig& in_config) {
+    std::shared_ptr<IImsMediaSession> session =
+            ndk::SharedRefBase::make<RefImsMediaSession>(mContext, mHal1_5, mCallbackManager);
+
+    mediaListener->onOpenSessionSuccess(in_sessionId, session);
+    return ok();
+}
+ScopedAStatus RefImsMedia::closeSession(int32_t in_sessionId) {
+    mediaListener->onSessionClosed(in_sessionId);
+    return ok();
+}
+
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefImsMedia.h b/guest/hals/ril/reference-libril/RefImsMedia.h
new file mode 100644
index 0000000..619b321
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefImsMedia.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <libradiocompat/RadioImsMedia.h>
+#include <libradiocompat/RadioImsMediaSession.h>
+
+namespace cf::ril {
+
+class RefImsMedia : public android::hardware::radio::compat::RadioImsMedia {
+  public:
+    using android::hardware::radio::compat::RadioImsMedia::RadioImsMedia;
+
+    ::ndk::ScopedAStatus setListener(
+            const std::shared_ptr<::aidl::android::hardware::radio::ims::media::IImsMediaListener>&
+                    in_mediaListener) override;
+    ::ndk::ScopedAStatus openSession(
+            int32_t in_sessionId,
+            const ::aidl::android::hardware::radio::ims::media::LocalEndPoint& in_localEndPoint,
+            const ::aidl::android::hardware::radio::ims::media::RtpConfig& in_config) override;
+    ::ndk::ScopedAStatus closeSession(int32_t in_sessionId) override;
+};
+
+class RefImsMediaSession : public android::hardware::radio::compat::RadioImsMediaSession {
+  public:
+    using android::hardware::radio::compat::RadioImsMediaSession::RadioImsMediaSession;
+
+    ::ndk::ScopedAStatus setListener(
+            const std::shared_ptr<
+                    ::aidl::android::hardware::radio::ims::media::IImsMediaSessionListener>&
+                    in_sessionListener) override;
+    ::ndk::ScopedAStatus modifySession(
+            const ::aidl::android::hardware::radio::ims::media::RtpConfig& in_config) override;
+    ::ndk::ScopedAStatus sendDtmf(char16_t in_dtmfDigit, int32_t in_duration) override;
+    ::ndk::ScopedAStatus startDtmf(char16_t in_dtmfDigit) override;
+    ::ndk::ScopedAStatus stopDtmf() override;
+    ::ndk::ScopedAStatus sendHeaderExtension(
+            const std::vector<::aidl::android::hardware::radio::ims::media::RtpHeaderExtension>&
+                    in_extensions) override;
+    ::ndk::ScopedAStatus setMediaQualityThreshold(
+            const ::aidl::android::hardware::radio::ims::media::MediaQualityThreshold& in_threshold)
+            override;
+};
+
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefImsMediaSession.cpp b/guest/hals/ril/reference-libril/RefImsMediaSession.cpp
new file mode 100644
index 0000000..aeb7d9d
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefImsMediaSession.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RefImsMedia.h"
+
+namespace cf::ril {
+
+using ::ndk::ScopedAStatus;
+using namespace ::aidl::android::hardware::radio::ims::media;
+constexpr auto ok = &ScopedAStatus::ok;
+std::shared_ptr<IImsMediaSessionListener> mediaSessionListener;
+
+ScopedAStatus RefImsMediaSession::setListener(
+        const std::shared_ptr<
+                ::aidl::android::hardware::radio::ims::media::IImsMediaSessionListener>&
+                in_sessionListener) {
+    mediaSessionListener = in_sessionListener;
+    return ok();
+}
+
+ScopedAStatus RefImsMediaSession::modifySession(
+        const ::aidl::android::hardware::radio::ims::media::RtpConfig& in_config) {
+    mediaSessionListener->onModifySessionResponse(
+            in_config, ::aidl::android::hardware::radio::ims::media::RtpError::NONE);
+    return ok();
+}
+
+ScopedAStatus RefImsMediaSession::sendDtmf(char16_t in_dtmfDigit, int32_t in_duration) {
+    return ok();
+}
+ScopedAStatus RefImsMediaSession::startDtmf(char16_t in_dtmfDigit) {
+    return ok();
+}
+ScopedAStatus RefImsMediaSession::stopDtmf() {
+    return ok();
+}
+ScopedAStatus RefImsMediaSession::sendHeaderExtension(
+        const std::vector<::aidl::android::hardware::radio::ims::media::RtpHeaderExtension>&
+                in_extensions) {
+    return ok();
+}
+ScopedAStatus RefImsMediaSession::setMediaQualityThreshold(
+        const ::aidl::android::hardware::radio::ims::media::MediaQualityThreshold& in_threshold) {
+    return ok();
+}
+
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioIms.cpp b/guest/hals/ril/reference-libril/RefRadioIms.cpp
new file mode 100644
index 0000000..d9b09fe
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioIms.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RefRadioIms.h"
+
+namespace cf::ril {
+
+using ::ndk::ScopedAStatus;
+using namespace ::aidl::android::hardware::radio;
+constexpr auto ok = &ScopedAStatus::ok;
+
+static RadioResponseInfo responseInfo(int32_t serial) {
+    return {
+            .type = RadioResponseType::SOLICITED,
+            .serial = serial,
+            .error = RadioError::NONE,
+    };
+}
+
+ScopedAStatus RefRadioIms::setSrvccCallInfo(
+        int32_t serial,
+        const std::vector<::aidl::android::hardware::radio::ims::SrvccCall>& srvccCalls) {
+    respond()->setSrvccCallInfoResponse(responseInfo(serial));
+    return ok();
+}
+ScopedAStatus RefRadioIms::updateImsRegistrationInfo(
+        int32_t serial,
+        const ::aidl::android::hardware::radio::ims::ImsRegistration& imsRegistration) {
+    respond()->updateImsRegistrationInfoResponse(responseInfo(serial));
+    return ok();
+}
+ScopedAStatus RefRadioIms::startImsTraffic(
+        int32_t serial, int32_t token,
+        ::aidl::android::hardware::radio::ims::ImsTrafficType imsTrafficType,
+        ::aidl::android::hardware::radio::AccessNetwork accessNetworkType,
+        ::aidl::android::hardware::radio::ims::ImsCall::Direction trafficDirection) {
+    respond()->startImsTrafficResponse(responseInfo(serial), {});
+    return ok();
+}
+ScopedAStatus RefRadioIms::stopImsTraffic(int32_t serial, int32_t token) {
+    respond()->stopImsTrafficResponse(responseInfo(serial));
+    return ok();
+}
+ScopedAStatus RefRadioIms::triggerEpsFallback(
+        int32_t serial, ::aidl::android::hardware::radio::ims::EpsFallbackReason reason) {
+    respond()->triggerEpsFallbackResponse(responseInfo(serial));
+    return ok();
+}
+ScopedAStatus RefRadioIms::sendAnbrQuery(
+        int32_t serial, ::aidl::android::hardware::radio::ims::ImsStreamType mediaType,
+        ::aidl::android::hardware::radio::ims::ImsStreamDirection direction,
+        int32_t bitsPerSecond) {
+    respond()->sendAnbrQueryResponse(responseInfo(serial));
+    return ok();
+}
+ScopedAStatus RefRadioIms::updateImsCallStatus(
+        int32_t serial,
+        const std::vector<::aidl::android::hardware::radio::ims::ImsCall>& imsCalls) {
+    respond()->updateImsCallStatusResponse(responseInfo(serial));
+    return ok();
+}
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioIms.h b/guest/hals/ril/reference-libril/RefRadioIms.h
new file mode 100644
index 0000000..3202971
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioIms.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <libradiocompat/RadioIms.h>
+
+namespace cf::ril {
+
+class RefRadioIms : public android::hardware::radio::compat::RadioIms {
+  public:
+    using android::hardware::radio::compat::RadioIms::RadioIms;
+
+    ::ndk::ScopedAStatus setSrvccCallInfo(
+            int32_t serial,
+            const std::vector<::aidl::android::hardware::radio::ims::SrvccCall>& srvccCalls)
+            override;
+    ::ndk::ScopedAStatus updateImsRegistrationInfo(
+            int32_t serial,
+            const ::aidl::android::hardware::radio::ims::ImsRegistration& imsRegistration) override;
+    ::ndk::ScopedAStatus startImsTraffic(
+            int32_t serial, int32_t token,
+            ::aidl::android::hardware::radio::ims::ImsTrafficType imsTrafficType,
+            ::aidl::android::hardware::radio::AccessNetwork accessNetworkType,
+            ::aidl::android::hardware::radio::ims::ImsCall::Direction trafficDirection) override;
+    ::ndk::ScopedAStatus stopImsTraffic(int32_t serial, int32_t token) override;
+    ::ndk::ScopedAStatus triggerEpsFallback(
+            int32_t serial,
+            ::aidl::android::hardware::radio::ims::EpsFallbackReason reason) override;
+    ::ndk::ScopedAStatus sendAnbrQuery(
+            int32_t serial, ::aidl::android::hardware::radio::ims::ImsStreamType mediaType,
+            ::aidl::android::hardware::radio::ims::ImsStreamDirection direction,
+            int32_t bitsPerSecond) override;
+    ::ndk::ScopedAStatus updateImsCallStatus(
+            int32_t serial,
+            const std::vector<::aidl::android::hardware::radio::ims::ImsCall>& imsCalls) override;
+};
+
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioModem.cpp b/guest/hals/ril/reference-libril/RefRadioModem.cpp
new file mode 100644
index 0000000..867c8f0
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioModem.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RefRadioModem.h"
+#include "ril_service.h"
+
+using ::android::hardware::hidl_string;
+
+namespace cf::ril {
+
+    using ::ndk::ScopedAStatus;
+    using namespace ::aidl::android::hardware::radio;
+    constexpr auto ok = &ScopedAStatus::ok;
+
+    static RadioResponseInfo responseInfo(int32_t serial, RadioError error = RadioError::NONE) {
+        return {
+                .type = RadioResponseType::SOLICITED,
+                .serial = serial,
+                .error = error,
+        };
+    }
+
+    hidl_string convertCharPtrToHidlString(const char *ptr) {
+        hidl_string ret;
+        if (ptr != NULL) {
+            ret.setToExternal(ptr, strlen(ptr));
+        }
+        return ret;
+    }
+
+    ScopedAStatus RefRadioModem::getImei(int32_t serial) {
+        ::aidl::android::hardware::radio::modem::ImeiInfo imeiInfo = {};
+        imeiInfo.type = (::aidl::android::hardware::radio::modem::ImeiInfo::ImeiType) 1;
+        imeiInfo.imei = convertCharPtrToHidlString("867400022047199");
+        imeiInfo.svn = convertCharPtrToHidlString("01");
+        respond()->getImeiResponse(responseInfo(serial), imeiInfo);
+        return ok();
+    }
+}
\ No newline at end of file
diff --git a/guest/hals/ril/reference-libril/RefRadioModem.h b/guest/hals/ril/reference-libril/RefRadioModem.h
new file mode 100644
index 0000000..a0a20b1
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioModem.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <libradiocompat/RadioModem.h>
+
+namespace cf::ril {
+
+class RefRadioModem : public android::hardware::radio::compat::RadioModem {
+    public:
+        using android::hardware::radio::compat::RadioModem::RadioModem;
+
+         ::ndk::ScopedAStatus getImei(int32_t serial) override;
+    };
+}
diff --git a/guest/hals/ril/reference-libril/RefRadioNetwork.cpp b/guest/hals/ril/reference-libril/RefRadioNetwork.cpp
index 4f39944..407efaa 100644
--- a/guest/hals/ril/reference-libril/RefRadioNetwork.cpp
+++ b/guest/hals/ril/reference-libril/RefRadioNetwork.cpp
@@ -47,4 +47,46 @@
     return ok();
 }
 
+ScopedAStatus RefRadioNetwork::setEmergencyMode(int32_t serial,
+                                                network::EmergencyMode emergencyMode) {
+    network::EmergencyRegResult regState;
+    respond()->setEmergencyModeResponse(responseInfo(serial), regState);
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::triggerEmergencyNetworkScan(
+        int32_t serial, const network::EmergencyNetworkScanTrigger& request) {
+    respond()->triggerEmergencyNetworkScanResponse(responseInfo(serial));
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::exitEmergencyMode(int32_t serial) {
+    respond()->exitEmergencyModeResponse(responseInfo(serial));
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::cancelEmergencyNetworkScan(int32_t serial, bool resetScan) {
+    respond()->cancelEmergencyNetworkScanResponse(responseInfo(serial));
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::isN1ModeEnabled(int32_t serial) {
+    respond()->isN1ModeEnabledResponse(responseInfo(serial), false);
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::setN1ModeEnabled(int32_t serial, bool enable) {
+    respond()->setN1ModeEnabledResponse(responseInfo(serial));
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::setNullCipherAndIntegrityEnabled(int32_t serial, bool enabled) {
+    respond()->setNullCipherAndIntegrityEnabledResponse(responseInfo(serial));
+    return ok();
+}
+
+ScopedAStatus RefRadioNetwork::isNullCipherAndIntegrityEnabled(int32_t serial) {
+    respond()->isNullCipherAndIntegrityEnabledResponse(responseInfo(serial), true);
+    return ok();
+}
 }  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioNetwork.h b/guest/hals/ril/reference-libril/RefRadioNetwork.h
index 5f16b14..c99bf18 100644
--- a/guest/hals/ril/reference-libril/RefRadioNetwork.h
+++ b/guest/hals/ril/reference-libril/RefRadioNetwork.h
@@ -30,6 +30,26 @@
             int32_t serial,
             ::aidl::android::hardware::radio::network::UsageSetting usageSetting) override;
     ::ndk::ScopedAStatus getUsageSetting(int32_t serial) override;
+
+    ::ndk::ScopedAStatus setEmergencyMode(
+            int32_t serial,
+            ::aidl::android::hardware::radio::network::EmergencyMode emergencyMode) override;
+
+    ::ndk::ScopedAStatus triggerEmergencyNetworkScan(
+            int32_t serial,
+            const ::aidl::android::hardware::radio::network::EmergencyNetworkScanTrigger& request)
+            override;
+
+    ::ndk::ScopedAStatus exitEmergencyMode(int32_t serial) override;
+
+    ::ndk::ScopedAStatus cancelEmergencyNetworkScan(int32_t serial, bool resetScan) override;
+
+    ::ndk::ScopedAStatus isN1ModeEnabled(int32_t serial) override;
+
+    ::ndk::ScopedAStatus setN1ModeEnabled(int32_t serial, bool enable) override;
+
+    ::ndk::ScopedAStatus setNullCipherAndIntegrityEnabled(int32_t serial, bool enabled) override;
+    ::ndk::ScopedAStatus isNullCipherAndIntegrityEnabled(int32_t serial) override;
 };
 
 }  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioSim.cpp b/guest/hals/ril/reference-libril/RefRadioSim.cpp
new file mode 100644
index 0000000..56ce6a1
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioSim.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RefRadioSim.h"
+#include "ril_service.h"
+
+namespace cf::ril {
+
+using ::ndk::ScopedAStatus;
+using namespace ::aidl::android::hardware::radio;
+constexpr auto ok = &ScopedAStatus::ok;
+
+static RadioResponseInfo responseInfo(int32_t serial, RadioError error = RadioError::NONE) {
+    return {
+            .type = RadioResponseType::SOLICITED,
+            .serial = serial,
+            .error = error,
+    };
+}
+
+ScopedAStatus RefRadioSim::iccCloseLogicalChannelWithSessionInfo(
+        int32_t serial, const ::aidl::android::hardware::radio::sim::SessionInfo& sessionInfo) {
+    if (sessionInfo.sessionId == 0) {
+        respond()->iccCloseLogicalChannelWithSessionInfoResponse(
+                responseInfo(serial, RadioError::INVALID_ARGUMENTS));
+        return ok();
+    }
+    // fallback on the deprecated iccCloseLogicalChannel function for
+    // actual channel close functionality
+    return iccCloseLogicalChannel(serial, sessionInfo.sessionId);
+}
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/RefRadioSim.h b/guest/hals/ril/reference-libril/RefRadioSim.h
new file mode 100644
index 0000000..5608639
--- /dev/null
+++ b/guest/hals/ril/reference-libril/RefRadioSim.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <libradiocompat/RadioSim.h>
+
+namespace cf::ril {
+
+class RefRadioSim : public android::hardware::radio::compat::RadioSim {
+  public:
+    using android::hardware::radio::compat::RadioSim::RadioSim;
+
+    ::ndk::ScopedAStatus iccCloseLogicalChannelWithSessionInfo(
+            int32_t serial,
+            const ::aidl::android::hardware::radio::sim::SessionInfo& SessionInfo) override;
+};
+}  // namespace cf::ril
diff --git a/guest/hals/ril/reference-libril/android.hardware.radio@2.1.xml b/guest/hals/ril/reference-libril/android.hardware.radio@2.1.xml
new file mode 100644
index 0000000..0ffa0d7
--- /dev/null
+++ b/guest/hals/ril/reference-libril/android.hardware.radio@2.1.xml
@@ -0,0 +1,49 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.radio.config</name>
+        <version>2</version>
+        <fqname>IRadioConfig/default</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.data</name>
+        <version>2</version>
+        <fqname>IRadioData/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.ims</name>
+        <fqname>IRadioIms/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.ims.media</name>
+        <fqname>IImsMedia/default</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.messaging</name>
+        <version>2</version>
+        <fqname>IRadioMessaging/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.modem</name>
+        <version>2</version>
+        <fqname>IRadioModem/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.network</name>
+        <version>2</version>
+        <fqname>IRadioNetwork/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.sim</name>
+        <version>2</version>
+        <fqname>IRadioSim/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.sap</name>
+        <fqname>ISap/slot1</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.hardware.radio.voice</name>
+        <version>2</version>
+        <fqname>IRadioVoice/slot1</fqname>
+    </hal>
+</manifest>
diff --git a/guest/hals/ril/reference-libril/ril.cpp b/guest/hals/ril/reference-libril/ril.cpp
index 9196ef4..f397bbb 100644
--- a/guest/hals/ril/reference-libril/ril.cpp
+++ b/guest/hals/ril/reference-libril/ril.cpp
@@ -489,7 +489,7 @@
     radio_1_6::registerConfigService(&s_callbacks, s_configCommands);
     /* }@ */
 
-
+    sap::registerService(&s_callbacks);
 }
 
 extern "C" void
@@ -618,7 +618,7 @@
 }
 extern "C" void
 RIL_onRequestComplete(RIL_Token t, RIL_Errno e, void *response, size_t responselen) {
-    RequestInfo *pRI;
+    RequestInfo* pRI;
     int ret;
     RIL_SOCKET_ID socket_id = RIL_SOCKET_1;
 
diff --git a/guest/hals/ril/reference-libril/ril.h b/guest/hals/ril/reference-libril/ril.h
index f7bc2c5..6f33ae7 100644
--- a/guest/hals/ril/reference-libril/ril.h
+++ b/guest/hals/ril/reference-libril/ril.h
@@ -8544,7 +8544,7 @@
 } RIL_KeepaliveStatusCode;
 
 typedef struct {
-    uint32_t sessionHandle;
+    int32_t sessionHandle;
     RIL_KeepaliveStatusCode code;
 } RIL_KeepaliveStatus;
 
diff --git a/guest/hals/ril/reference-libril/ril_config.cpp b/guest/hals/ril/reference-libril/ril_config.cpp
index 80c8aa0..706026d 100644
--- a/guest/hals/ril/reference-libril/ril_config.cpp
+++ b/guest/hals/ril/reference-libril/ril_config.cpp
@@ -285,7 +285,7 @@
     radioConfigService->mRadioConfigIndicationV1_2 = NULL;
 
     // use a compat shim to convert HIDL interface to AIDL and publish it
-    // PLEASE NOTE this is a temporary solution
+    // TODO(bug 220004469): replace with a full AIDL implementation
     static auto aidlHal = ndk::SharedRefBase::make<compat::RadioConfig>(radioConfigService);
     const auto instance = compat::RadioConfig::descriptor + "/"s + std::string(serviceNames);
     const auto status = AServiceManager_addService(aidlHal->asBinder().get(), instance.c_str());
diff --git a/guest/hals/ril/reference-libril/ril_service.cpp b/guest/hals/ril/reference-libril/ril_service.cpp
index 5fdc3dc..93232cf 100644
--- a/guest/hals/ril/reference-libril/ril_service.cpp
+++ b/guest/hals/ril/reference-libril/ril_service.cpp
@@ -16,6 +16,10 @@
 
 #define LOG_TAG "RILC"
 
+#include "RefRadioSim.h"
+#include "RefImsMedia.h"
+#include "RefRadioIms.h"
+#include "RefRadioModem.h"
 #include "RefRadioNetwork.h"
 
 #include <android-base/logging.h>
@@ -27,6 +31,8 @@
 #include <android/hardware/radio/1.6/types.h>
 #include <libradiocompat/CallbackManager.h>
 #include <libradiocompat/RadioData.h>
+#include <libradiocompat/RadioIms.h>
+#include <libradiocompat/RadioImsMedia.h>
 #include <libradiocompat/RadioMessaging.h>
 #include <libradiocompat/RadioModem.h>
 #include <libradiocompat/RadioSim.h>
@@ -106,6 +112,7 @@
 // counter used for synchronization. It is incremented every time response callbacks are updated.
 volatile int32_t mCounterRadio[1];
 volatile int32_t mCounterOemHook[1];
+hidl_vec<uint8_t> osAppIdVec;
 #endif
 
 static pthread_rwlock_t radioServiceRwlock = PTHREAD_RWLOCK_INITIALIZER;
@@ -3378,12 +3385,7 @@
 #if VDBG
     RLOGD("%s(): %d", __FUNCTION__, serial);
 #endif
-    RequestInfo *pRI = android::addRequestToList(serial, mSlotId, RIL_REQUEST_STOP_KEEPALIVE);
-    if (pRI == NULL) {
-        return Void();
-    }
-
-    CALL_ONREQUEST(pRI->pCI->requestNumber, &sessionHandle, sizeof(uint32_t), pRI, mSlotId);
+    dispatchInts(serial, mSlotId, RIL_REQUEST_STOP_KEEPALIVE, 1, sessionHandle);
     return Void();
 }
 
@@ -4310,41 +4312,47 @@
         ras_to.channels_length = ras_from.channels.size();
 
         std::copy(ras_from.channels.begin(), ras_from.channels.end(), ras_to.channels);
-        const std::vector<uint32_t> * bands = nullptr;
         switch (request.specifiers[i].radioAccessNetwork) {
-            case V1_5::RadioAccessNetworks::GERAN:
-                ras_to.bands_length = ras_from.bands.geranBands().size();
-
-                bands = (std::vector<uint32_t> *) &ras_from.bands.geranBands();
+            case V1_5::RadioAccessNetworks::GERAN: {
+                hidl_vec<V1_1::GeranBands> geranBands = ras_from.bands.geranBands();
+                ras_to.bands_length = MIN(geranBands.size(), MAX_BANDS);
                 // safe to copy to geran_bands because it's a union member
                 for (size_t idx = 0; idx < ras_to.bands_length; ++idx) {
-                    ras_to.bands.geran_bands[idx] = (RIL_GeranBands) (*bands)[idx];
+                    ras_to.bands.geran_bands[idx] =
+                            static_cast<RIL_GeranBands>(geranBands[idx]);
                 }
                 break;
-            case V1_5::RadioAccessNetworks::UTRAN:
-                ras_to.bands_length = ras_from.bands.utranBands().size();
-                bands = (std::vector<uint32_t> *) &ras_from.bands;
-                // safe to copy to geran_bands because it's a union member
+            }
+            case V1_5::RadioAccessNetworks::UTRAN: {
+                hidl_vec<V1_5::UtranBands> utranBands = ras_from.bands.utranBands();
+                ras_to.bands_length = MIN(utranBands.size(), MAX_BANDS);
+                // safe to copy to utran_bands because it's a union member
                 for (size_t idx = 0; idx < ras_to.bands_length; ++idx) {
-                    ras_to.bands.utran_bands[idx] = (RIL_UtranBands) (*bands)[idx];
+                    ras_to.bands.utran_bands[idx] =
+                            static_cast<RIL_UtranBands>(utranBands[idx]);
                 }
                 break;
-            case V1_5::RadioAccessNetworks::EUTRAN:
-                ras_to.bands_length = ras_from.bands.eutranBands().size();
-                bands = (std::vector<uint32_t> *) &ras_from.bands;
-                // safe to copy to geran_bands because it's a union member
+            }
+            case V1_5::RadioAccessNetworks::EUTRAN: {
+                hidl_vec<V1_5::EutranBands> eutranBands = ras_from.bands.eutranBands();
+                ras_to.bands_length = MIN(eutranBands.size(), MAX_BANDS);
+                // safe to copy to eutran_bands because it's a union member
                 for (size_t idx = 0; idx < ras_to.bands_length; ++idx) {
-                    ras_to.bands.eutran_bands[idx] = (RIL_EutranBands) (*bands)[idx];
+                    ras_to.bands.eutran_bands[idx] =
+                            static_cast<RIL_EutranBands>(eutranBands[idx]);
                 }
                 break;
-            case V1_5::RadioAccessNetworks::NGRAN:
-                ras_to.bands_length = ras_from.bands.ngranBands().size();
-                bands = (std::vector<uint32_t> *) &ras_from.bands;
-                // safe to copy to geran_bands because it's a union member
+            }
+            case V1_5::RadioAccessNetworks::NGRAN: {
+                hidl_vec<V1_5::NgranBands> ngranBands = ras_from.bands.ngranBands();
+                ras_to.bands_length = MIN(ngranBands.size(), MAX_BANDS);
+                // safe to copy to ngran_bands because it's a union member
                 for (size_t idx = 0; idx < ras_to.bands_length; ++idx) {
-                    ras_to.bands.ngran_bands[idx] = (RIL_NgranBands) (*bands)[idx];
+                    ras_to.bands.ngran_bands[idx] =
+                            static_cast<RIL_NgranBands>(ngranBands[idx]);
                 }
                 break;
+            }
             default:
                 sendErrorResponse(pRI, RIL_E_INVALID_ARGUMENTS);
                 return -1;
@@ -4435,17 +4443,15 @@
     return Void();
 }
 
-Return<void> RadioImpl_1_6::setupDataCall_1_6(int32_t serial ,
-        ::android::hardware::radio::V1_5::AccessNetwork /* accessNetwork */,
+Return<void> RadioImpl_1_6::setupDataCall_1_6(
+        int32_t serial, ::android::hardware::radio::V1_5::AccessNetwork /* accessNetwork */,
         const ::android::hardware::radio::V1_5::DataProfileInfo& dataProfileInfo,
         bool roamingAllowed, ::android::hardware::radio::V1_2::DataRequestReason /* reason */,
         const hidl_vec<::android::hardware::radio::V1_5::LinkAddress>& /* addresses */,
-        const hidl_vec<hidl_string>& /* dnses */,
-        int32_t /* pduSessionId */,
+        const hidl_vec<hidl_string>& /* dnses */, int32_t /* pduSessionId */,
         const ::android::hardware::radio::V1_6::OptionalSliceInfo& /* sliceInfo */,
-        const ::android::hardware::radio::V1_6::OptionalTrafficDescriptor& /*trafficDescriptor*/,
+        const ::android::hardware::radio::V1_6::OptionalTrafficDescriptor& trafficDescriptor,
         bool matchAllRuleAllowed) {
-
 #if VDBG
     RLOGD("setupDataCall_1_6: serial %d", serial);
 #endif
@@ -4459,6 +4465,16 @@
         }
         return Void();
     }
+
+    if (trafficDescriptor.getDiscriminator() ==
+                V1_6::OptionalTrafficDescriptor::hidl_discriminator::value &&
+        trafficDescriptor.value().osAppId.getDiscriminator() ==
+                V1_6::OptionalOsAppId::hidl_discriminator::value) {
+        osAppIdVec = trafficDescriptor.value().osAppId.value().osAppId;
+    } else {
+        osAppIdVec = {};
+    }
+
     dispatchStrings(serial, mSlotId, RIL_REQUEST_SETUP_DATA_CALL, true, 16,
         std::to_string((int) RadioTechnology::UNKNOWN + 2).c_str(),
         std::to_string((int) dataProfileInfo.profileId).c_str(),
@@ -4941,7 +4957,7 @@
                 appStatus[i].pin2 = (PinState) rilAppStatus[i].pin2;
             }
         }
-        if (radioService[slotId]->mRadioResponseV1_5 != NULL) {
+        if (p_cur && radioService[slotId]->mRadioResponseV1_5 != NULL) {
             ::android::hardware::radio::V1_2::CardStatus cardStatusV1_2;
             ::android::hardware::radio::V1_4::CardStatus cardStatusV1_4;
             ::android::hardware::radio::V1_5::CardStatus cardStatusV1_5;
@@ -4972,7 +4988,7 @@
             Return<void> retStatus = radioService[slotId]->mRadioResponseV1_5->
                     getIccCardStatusResponse_1_5(responseInfo, cardStatusV1_5);
             radioService[slotId]->checkReturnStatus(retStatus);
-        } else if (radioService[slotId]->mRadioResponseV1_4 != NULL) {
+        } else if (p_cur && radioService[slotId]->mRadioResponseV1_4 != NULL) {
             ::android::hardware::radio::V1_2::CardStatus cardStatusV1_2;
             ::android::hardware::radio::V1_4::CardStatus cardStatusV1_4;
             cardStatusV1_2.base = cardStatus;
@@ -4982,7 +4998,7 @@
             Return<void> retStatus = radioService[slotId]->mRadioResponseV1_4->
                     getIccCardStatusResponse_1_4(responseInfo, cardStatusV1_4);
             radioService[slotId]->checkReturnStatus(retStatus);
-        } else if (radioService[slotId]->mRadioResponseV1_3 != NULL) {
+        } else if (p_cur && radioService[slotId]->mRadioResponseV1_3 != NULL) {
             ::android::hardware::radio::V1_2::CardStatus cardStatusV1_2;
             cardStatusV1_2.base = cardStatus;
             cardStatusV1_2.physicalSlotId = -1;
@@ -4990,7 +5006,7 @@
             Return<void> retStatus = radioService[slotId]->mRadioResponseV1_3->
                     getIccCardStatusResponse_1_2(responseInfo, cardStatusV1_2);
             radioService[slotId]->checkReturnStatus(retStatus);
-        } else if (radioService[slotId]->mRadioResponseV1_2 != NULL) {
+        } else if (p_cur && radioService[slotId]->mRadioResponseV1_2 != NULL) {
             ::android::hardware::radio::V1_2::CardStatus cardStatusV1_2;
             cardStatusV1_2.base = cardStatus;
             cardStatusV1_2.physicalSlotId = -1;
@@ -11057,8 +11073,8 @@
         ::android::hardware::radio::V1_5::LinkAddress la;
         la.address = hidl_string(tok);
         la.properties = 0;
-        la.deprecationTime = 0;
-        la.expirationTime = 0;
+        la.deprecationTime = INT64_MAX;  // LinkAddress.java LIFETIME_PERMANENT = Long.MAX_VALUE
+        la.expirationTime = INT64_MAX;  // --"--
         linkAddresses.push_back(la);
     }
 
@@ -11086,8 +11102,8 @@
         ::android::hardware::radio::V1_5::LinkAddress la;
         la.address = hidl_string(tok);
         la.properties = 0;
-        la.deprecationTime = 0;
-        la.expirationTime = 0;
+        la.deprecationTime = INT64_MAX;  // LinkAddress.java LIFETIME_PERMANENT = Long.MAX_VALUE
+        la.expirationTime = INT64_MAX;  // --"--
         linkAddresses.push_back(la);
     }
 
@@ -11101,16 +11117,6 @@
     std::vector<::android::hardware::radio::V1_6::TrafficDescriptor> trafficDescriptors;
     ::android::hardware::radio::V1_6::TrafficDescriptor trafficDescriptor;
     ::android::hardware::radio::V1_6::OsAppId osAppId;
-
-    std::vector<uint8_t> osAppIdVec;
-    osAppIdVec.push_back('o');
-    osAppIdVec.push_back('s');
-    osAppIdVec.push_back('A');
-    osAppIdVec.push_back('p');
-    osAppIdVec.push_back('p');
-    osAppIdVec.push_back('I');
-    osAppIdVec.push_back('d');
-
     osAppId.osAppId = osAppIdVec;
     trafficDescriptor.osAppId.value(osAppId);
     trafficDescriptors.push_back(trafficDescriptor);
@@ -13227,7 +13233,8 @@
                     radioService[slotId]->mRadioIndicationV1_6->currentPhysicalChannelConfigs_1_6(
                             RadioIndicationType::UNSOLICITED, physChanConfig);
             radioService[slotId]->checkReturnStatus(retStatus);
-            {
+            // checkReturnStatus() call might set mRadioIndicationV1_6 to NULL
+            if (radioService[slotId]->mRadioIndicationV1_6 != NULL) {
                 // Just send the link estimate along with physical channel config, as it has
                 // at least the downlink bandwidth.
                 // Note: the bandwidth is just some hardcoded value, as there is not way to get
@@ -13257,7 +13264,8 @@
                     radioService[slotId]->mRadioIndicationV1_4->currentPhysicalChannelConfigs_1_4(
                             RadioIndicationType::UNSOLICITED, physChanConfig);
             radioService[slotId]->checkReturnStatus(retStatus);
-            {
+            // checkReturnStatus() call might set mRadioIndicationV1_4 to NULL
+            if (radioService[slotId]->mRadioIndicationV1_4 != NULL) {
                 // Just send the link estimate along with physical channel config, as it has
                 // at least the downlink bandwidth.
                 // Note: the bandwidth is just some hardcoded value, as there is not way to get
@@ -13272,7 +13280,7 @@
                                 RadioIndicationType::UNSOLICITED, lce);
                 radioService[slotId]->checkReturnStatus(retStatus);
             }
-        } else {
+        } else if (radioService[slotId]->mRadioIndicationV1_2 != NULL) {
             hidl_vec<V1_2::PhysicalChannelConfig> physChanConfig;
             physChanConfig.resize(1);
             physChanConfig[0].status = (V1_2::CellConnectionStatus)configs[0];
@@ -13282,7 +13290,8 @@
                     radioService[slotId]->mRadioIndicationV1_2->currentPhysicalChannelConfigs(
                             RadioIndicationType::UNSOLICITED, physChanConfig);
             radioService[slotId]->checkReturnStatus(retStatus);
-            {
+            // checkReturnStatus() call might set mRadioIndicationV1_2 to NULL
+            if (radioService[slotId]->mRadioIndicationV1_2 != NULL) {
                 // Just send the link estimate along with physical channel config, as it has
                 // at least the downlink bandwidth.
                 // Note: the bandwidth is just some hardcoded value, as there is not way to get
@@ -13416,7 +13425,7 @@
                 serviceNames[i], i);
 
         // use a compat shim to convert HIDL interface to AIDL and publish it
-        // PLEASE NOTE this is a temporary solution
+        // TODO(bug 220004469): replace with a full AIDL implementation
         auto radioHidl = radioService[i];
         const auto slot = serviceNames[i];
         auto context = std::make_shared<compat::DriverContext>();
@@ -13427,14 +13436,17 @@
         publishRadioHal<cf::ril::RefRadioNetwork>(context, radioHidl, callbackMgr, slot);
         publishRadioHal<compat::RadioSim>(context, radioHidl, callbackMgr, slot);
         publishRadioHal<compat::RadioVoice>(context, radioHidl, callbackMgr, slot);
-
+        publishRadioHal<cf::ril::RefRadioIms>(context, radioHidl, callbackMgr, slot);
+        publishRadioHal<cf::ril::RefImsMedia>(context, radioHidl, callbackMgr,
+                                              std::string("default"));
+        publishRadioHal<cf::ril::RefRadioModem>(context, radioHidl, callbackMgr, slot);
+        publishRadioHal<cf::ril::RefRadioSim>(context, radioHidl, callbackMgr, slot);
         RLOGD("registerService: OemHook is enabled = %s", kOemHookEnabled ? "true" : "false");
         if (kOemHookEnabled) {
             oemHookService[i] = new OemHookImpl;
             oemHookService[i]->mSlotId = i;
             // status = oemHookService[i]->registerAsService(serviceNames[i]);
         }
-
         ret = pthread_rwlock_unlock(radioServiceRwlockPtr);
         CHECK_EQ(ret, 0);
     }
diff --git a/guest/hals/ril/reference-libril/sap_service.cpp b/guest/hals/ril/reference-libril/sap_service.cpp
index cd5b137..4689a1c 100644
--- a/guest/hals/ril/reference-libril/sap_service.cpp
+++ b/guest/hals/ril/reference-libril/sap_service.cpp
@@ -16,10 +16,11 @@
 
 #define LOG_TAG "RIL_SAP"
 
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
 #include <android/hardware/radio/1.1/ISap.h>
+#include <libradiocompat/Sap.h>
 
-#include <hwbinder/IPCThreadState.h>
-#include <hwbinder/ProcessState.h>
 #include <sap_service.h>
 #include "pb_decode.h"
 #include "pb_encode.h"
@@ -139,6 +140,12 @@
     }
     va_end(ap);
     Return<void> retStatus;
+
+    if (sapCallback == NULL) {
+        RLOGE("sendFailedResponse: sapCallback == NULL; msgId = %d; token = %d", msgId, token);
+        return;
+    }
+
     switch(msgId) {
         case MsgId_RIL_SIM_SAP_CONNECT:
             retStatus = sapCallback->connectResponse(token, SapConnectRsp::CONNECT_FAILURE, 0);
@@ -925,6 +932,8 @@
 
 void sap::registerService(const RIL_RadioFunctions *callbacks) {
     using namespace android::hardware;
+    namespace compat = android::hardware::radio::compat;
+
     int simCount = 1;
     const char *serviceNames[] = {
         android::RIL_getServiceName()
@@ -960,7 +969,17 @@
         sapService[i]->slotId = i;
         sapService[i]->rilSocketId = socketIds[i];
         RLOGD("registerService: starting ISap %s for slotId %d", serviceNames[i], i);
-        android::status_t status = sapService[i]->registerAsService(serviceNames[i]);
-        RLOGD("registerService: started ISap %s status %d", serviceNames[i], status);
+
+        // use a compat shim to convert HIDL interface to AIDL and publish it
+        // TODO(bug 220004469): replace with a full AIDL implementation
+        static auto aidlHal = ndk::SharedRefBase::make<compat::Sap>(sapService[i]);
+        const auto instance = compat::Sap::descriptor + "/"s + std::string(serviceNames[i]);
+        const auto status = AServiceManager_addService(aidlHal->asBinder().get(), instance.c_str());
+        if (status == STATUS_OK) {
+            RLOGD("registerService addService: instance %s, status %d", instance.c_str(), status);
+        } else {
+            RLOGE("failed to register sapService for instance %s, status %d", instance.c_str(),
+                  status);
+        }
     }
 }
diff --git a/guest/hals/ril/reference-ril/atchannel.c b/guest/hals/ril/reference-ril/atchannel.c
index 2e2d8b5..06d5c53 100644
--- a/guest/hals/ril/reference-ril/atchannel.c
+++ b/guest/hals/ril/reference-ril/atchannel.c
@@ -29,7 +29,7 @@
 #include <time.h>
 #include <unistd.h>
 
-#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 #define LOG_TAG "AT"
 #include <utils/Log.h>
 
diff --git a/guest/hals/ril/reference-ril/reference-ril.c b/guest/hals/ril/reference-ril/reference-ril.c
index f3d01e4..9fdf80d 100644
--- a/guest/hals/ril/reference-ril/reference-ril.c
+++ b/guest/hals/ril/reference-ril/reference-ril.c
@@ -379,7 +379,7 @@
 static char *s_stkUnsolResponse = NULL;
 
 // Next available handle for keep alive session
-static uint32_t s_session_handle = 1;
+static int32_t s_session_handle = 1;
 
 typedef enum {
     STK_UNSOL_EVENT_UNKNOWN,
@@ -549,7 +549,8 @@
         radioInterfaceName);
   struct ifreq request;
   int status = 0;
-  int sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP);
+  int family = strchr(addr, ':') ? AF_INET6 : AF_INET;
+  int sock = socket(family, SOCK_DGRAM, 0);
   if (sock == -1) {
     RLOGE("Failed to open interface socket: %s (%d)", strerror(errno), errno);
     return;
@@ -566,11 +567,30 @@
     *pch = '\0';
   }
 
-  struct sockaddr_in *sin = (struct sockaddr_in *)&request.ifr_addr;
-  sin->sin_family = AF_INET;
-  sin->sin_addr.s_addr = inet_addr(myaddr);
-  if (ioctl(sock, SIOCSIFADDR, &request) < 0) {
-    RLOGE("%s: failed.", __func__);
+  if (family == AF_INET) {
+    struct sockaddr_in *sin = (struct sockaddr_in *)&request.ifr_addr;
+    sin->sin_family = AF_INET;
+    sin->sin_addr.s_addr = inet_addr(myaddr);
+    if (ioctl(sock, SIOCSIFADDR, &request) < 0) {
+      RLOGE("%s: SIOCSIFADDR IPv4 failed.", __func__);
+    }
+  } else {
+    if (ioctl(sock, SIOGIFINDEX, &request) < 0) {
+      RLOGE("%s: SIOCGIFINDEX failed.", __func__);
+    }
+
+    struct in6_ifreq req6 = {
+       // struct in6_addr ifr6_addr;
+       .ifr6_prefixlen = 64,  // __u32
+       .ifr6_ifindex = request.ifr_ifindex,  // int
+    };
+    if (inet_pton(AF_INET6, myaddr, &req6.ifr6_addr) != 1) {
+      RLOGE("%s: inet_pton(AF_INET6, '%s') failed.", __func__, myaddr);
+    }
+
+    if (ioctl(sock, SIOCSIFADDR, &req6) < 0) {
+      RLOGE("%s: SIOCSIFADDR IPv6 failed.", __func__);
+    }
   }
 
   close(sock);
@@ -4322,9 +4342,15 @@
     RIL_onRequestComplete(t, RIL_E_GENERIC_FAILURE, NULL, 0);
 }
 
-static void requestStartKeepalive(RIL_Token t) {
+static void requestStartKeepalive(void* data, size_t datalen __unused, RIL_Token t) {
+    RIL_KeepaliveRequest* kaRequest = (RIL_KeepaliveRequest*)data;
+    if (kaRequest->cid > MAX_PDP) {
+        RLOGE("Invalid cid for keepalive!");
+        RIL_onRequestComplete(t, RIL_E_INVALID_ARGUMENTS, NULL, 0);
+        return;
+    }
     RIL_KeepaliveStatus resp;
-    resp.sessionHandle = s_session_handle++;
+    resp.sessionHandle = __sync_fetch_and_add(&s_session_handle, 1);
     resp.code = KEEPALIVE_ACTIVE;
     RIL_onRequestComplete(t, RIL_E_SUCCESS, &resp, sizeof(resp));
 }
@@ -5116,10 +5142,22 @@
             RIL_onRequestComplete(t, RIL_E_SUCCESS, NULL, 0);
             break;
         case RIL_REQUEST_START_KEEPALIVE:
-            requestStartKeepalive(t);
+            requestStartKeepalive(data, datalen, t);
             break;
         case RIL_REQUEST_STOP_KEEPALIVE:
-            RIL_onRequestComplete(t, RIL_E_SUCCESS, NULL, 0);
+            if (data == NULL || datalen != sizeof(int)) {
+                RIL_onRequestComplete(t, RIL_E_INTERNAL_ERR, NULL, 0);
+                break;
+            }
+            int sessionHandle = *(int*)(data);
+            if ((int32_t)sessionHandle < s_session_handle) {
+                // check that the session handle is one we've assigned previously
+                // note that this doesn't handle duplicate stop requests properly
+                RIL_onRequestComplete(t, RIL_E_SUCCESS, NULL, 0);
+            } else {
+                RLOGE("Invalid session handle for keepalive!");
+                RIL_onRequestComplete(t, RIL_E_INVALID_ARGUMENTS, NULL, 0);
+            }
             break;
         case RIL_REQUEST_SET_UNSOLICITED_RESPONSE_FILTER:
             RIL_onRequestComplete(t, RIL_E_SUCCESS, NULL, 0);
diff --git a/guest/hals/uwb/uwb-service.rc b/guest/hals/uwb/uwb-service.rc
new file mode 100644
index 0000000..eb9d205
--- /dev/null
+++ b/guest/hals/uwb/uwb-service.rc
@@ -0,0 +1,3 @@
+service vendor.uwb_hal /vendor/bin/hw/android.hardware.uwb-service /dev/hvc9
+    class hal
+    user uwb
diff --git a/guest/libs/wpa_supplicant_8_lib/Android.bp b/guest/libs/wpa_supplicant_8_lib/Android.bp
index d09457e..2187c6e 100644
--- a/guest/libs/wpa_supplicant_8_lib/Android.bp
+++ b/guest/libs/wpa_supplicant_8_lib/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/guest/libs/wpa_supplicant_8_lib/driver_cmd_nl80211.c b/guest/libs/wpa_supplicant_8_lib/driver_cmd_nl80211.c
index 7181498..509479b 100644
--- a/guest/libs/wpa_supplicant_8_lib/driver_cmd_nl80211.c
+++ b/guest/libs/wpa_supplicant_8_lib/driver_cmd_nl80211.c
@@ -37,8 +37,6 @@
                                   size_t buf_len) {
   struct i802_bss* bss = priv;
   struct wpa_driver_nl80211_data* drv = bss->drv;
-  struct ifreq ifr;
-  android_wifi_priv_cmd priv_cmd;
   int ret = 0;
 
   D("%s: called", __FUNCTION__);
diff --git a/guest/monitoring/cuttlefish_service/java/com/android/google/gce/gceservice/GceService.java b/guest/monitoring/cuttlefish_service/java/com/android/google/gce/gceservice/GceService.java
index ef0aa03..e6cfa8d 100644
--- a/guest/monitoring/cuttlefish_service/java/com/android/google/gce/gceservice/GceService.java
+++ b/guest/monitoring/cuttlefish_service/java/com/android/google/gce/gceservice/GceService.java
@@ -29,6 +29,7 @@
 import android.util.Log;
 import android.os.IBinder;
 import android.view.Display;
+import android.view.Surface;
 import android.view.WindowManager;
 import java.io.FileDescriptor;
 import java.io.PrintWriter;
@@ -129,7 +130,18 @@
     }
 
     private int getRotation() {
-      return mWindowManager.getDefaultDisplay().getRotation();
+      int rot = mWindowManager.getDefaultDisplay().getRotation();
+      switch (rot) {
+        case Surface.ROTATION_0:
+          return 0;
+        case Surface.ROTATION_90:
+          return 90;
+        case Surface.ROTATION_180:
+          return 180;
+        case Surface.ROTATION_270:
+          return 270;
+      }
+      throw new IllegalStateException("Rotation should be one of 0,90,180,270");
     }
 
     @Override
diff --git a/guest/monitoring/tombstone_transmit/Android.bp b/guest/monitoring/tombstone_transmit/Android.bp
index 149274c..86daf8b 100644
--- a/guest/monitoring/tombstone_transmit/Android.bp
+++ b/guest/monitoring/tombstone_transmit/Android.bp
@@ -37,6 +37,7 @@
     ],
     static_libs: [
         "libcuttlefish_fs_product",
+        "libcuttlefish_utils_product",
     ],
     defaults: [
         "tombstone_transmit_defaults",
@@ -53,11 +54,15 @@
     ],
     static_libs: [
         "libcuttlefish_fs",
+        "libcuttlefish_utils",
     ],
     defaults: [
         "tombstone_transmit_defaults",
         "cuttlefish_base",
     ],
+    cflags: [
+        "-DMICRODROID",
+    ],
 }
 
 cc_binary {
diff --git a/guest/monitoring/tombstone_transmit/tombstone_transmit.cpp b/guest/monitoring/tombstone_transmit/tombstone_transmit.cpp
index 0853b14..cea7aa6 100644
--- a/guest/monitoring/tombstone_transmit/tombstone_transmit.cpp
+++ b/guest/monitoring/tombstone_transmit/tombstone_transmit.cpp
@@ -28,6 +28,7 @@
 #include <gflags/gflags.h>
 
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/inotify.h"
 #include "common/libs/utils/subprocess.h"
 
 static const char TOMBSTONE_DIR[] = "/data/tombstones/";
@@ -54,50 +55,46 @@
   return file_create_notification_handle;
 }
 
-#define INOTIFY_MAX_EVENT_SIZE (sizeof(struct inotify_event) + NAME_MAX + 1)
-static std::vector<std::string> get_next_tombstones_path_blocking(int fd) {
-  char event_readout[INOTIFY_MAX_EVENT_SIZE];
-  int bytes_parsed = 0;
-  std::vector<std::string> tombstone_paths;
-  // Each successful read can contain one or more of inotify_event events
-  // Note: read() on inotify returns 'whole' events, will never partially
-  // populate the buffer.
-  int event_read_out_length = read(fd, event_readout, INOTIFY_MAX_EVENT_SIZE);
-
-  if(event_read_out_length == -1) {
-    ALOGE("%s: Couldn't read out inotify event due to error: '%s' (%d)",
-      __FUNCTION__, strerror(errno), errno);
-    return std::vector<std::string>();
-  }
-
-  while (bytes_parsed < event_read_out_length) {
-    struct inotify_event* event =
-        reinterpret_cast<inotify_event*>(event_readout + bytes_parsed);
-    bytes_parsed += sizeof(struct inotify_event) + event->len;
-
-    // No file name was present
-    if (event->len == 0) {
-      ALOGE("%s: inotify event didn't contain filename", __FUNCTION__);
-      continue;
-    }
-    if (!(event->mask & IN_CREATE)) {
-      ALOGE("%s: inotify event didn't pertain to file creation", __FUNCTION__);
-      continue;
-    }
-    tombstone_paths.push_back(std::string(TOMBSTONE_DIR) +
-                              std::string(event->name));
-  }
-
-  return tombstone_paths;
-}
-
 DEFINE_uint32(port,
               static_cast<uint32_t>(
                   property_get_int64("ro.boot.vsock_tombstone_port", 0)),
               "VSOCK port to send tombstones to");
 DEFINE_uint32(cid, 2, "VSOCK CID to send logcat output to");
+DEFINE_bool(remove_tombstones_after_transmitting, false,
+            "Whether to remove the tombstone from VM after transmitting it");
 #define TOMBSTONE_BUFFER_SIZE (1024)
 
+static void tombstone_send_to_host(const std::string& ts_path) {
+  auto log_fd =
+      cuttlefish::SharedFD::VsockClient(FLAGS_cid, FLAGS_port, SOCK_STREAM);
+  std::ifstream ifs(ts_path);
+  char buffer[TOMBSTONE_BUFFER_SIZE];
+  size_t num_transfers = 0;
+  size_t num_bytes_read = 0;
+  while (log_fd->IsOpen() && ifs.is_open() && !ifs.eof()) {
+    ifs.read(buffer, sizeof(buffer));
+    num_bytes_read += ifs.gcount();
+    log_fd->Write(buffer, ifs.gcount());
+    num_transfers++;
+  }
+
+  if (!log_fd->IsOpen()) {
+    auto error = log_fd->StrError();
+    ALOGE("Unable to connect to vsock:%u:%u: %s", FLAGS_cid, FLAGS_port,
+          error.c_str());
+  } else if (!ifs.is_open()) {
+    ALOGE("%s closed in the middle of readout.", ts_path.c_str());
+  } else {
+    LOG(INFO) << num_bytes_read << " bytes transferred from "
+              << ts_path.c_str() << " over " << num_transfers << " "
+              << TOMBSTONE_BUFFER_SIZE << " byte sized transfers";
+  }
+
+  if (FLAGS_remove_tombstones_after_transmitting) {
+    remove(ts_path.c_str());
+  }
+}
+
 int main(int argc, char** argv) {
   gflags::ParseCommandLineFlags(&argc, &argv, true);
 
@@ -111,34 +108,16 @@
 
   LOG(INFO) << "tombstone watcher successfully initialized";
 
-  while (true) {
-    std::vector<std::string> ts_paths =
-        get_next_tombstones_path_blocking(file_create_notification_handle);
-    for (auto& ts_path : ts_paths) {
-      auto log_fd =
-          cuttlefish::SharedFD::VsockClient(FLAGS_cid, FLAGS_port, SOCK_STREAM);
-      std::ifstream ifs(ts_path);
-      char buffer[TOMBSTONE_BUFFER_SIZE];
-      uint num_transfers = 0;
-      int num_bytes_read = 0;
-      while (log_fd->IsOpen() && ifs.is_open() && !ifs.eof()) {
-        ifs.read(buffer, sizeof(buffer));
-        num_bytes_read += ifs.gcount();
-        log_fd->Write(buffer, ifs.gcount());
-        num_transfers++;
-      }
+#ifdef MICRODROID
+  property_set("tombstone_transmit.init_done", "true");
+#endif
 
-      if (!log_fd->IsOpen()) {
-        auto error = log_fd->StrError();
-        ALOGE("Unable to connect to vsock:%u:%u: %s", FLAGS_cid, FLAGS_port,
-              error.c_str());
-      } else if (!ifs.is_open()) {
-        ALOGE("%s closed in the middle of readout.", ts_path.c_str());
-      } else {
-        LOG(INFO) << num_bytes_read << " chars transferred from "
-                  << ts_path.c_str() << " over " << num_transfers << " "
-                  << TOMBSTONE_BUFFER_SIZE << " byte sized transfers";
-      }
+  while (true) {
+    std::vector<std::string> ts_names =
+        cuttlefish::GetCreatedFileListFromInotifyFd(
+            file_create_notification_handle);
+    for (auto& ts_name : ts_names) {
+      tombstone_send_to_host(std::string(TOMBSTONE_DIR) + ts_name);
     }
   }
 
diff --git a/guest/services/cf_satellite_service/Android.bp b/guest/services/cf_satellite_service/Android.bp
new file mode 100644
index 0000000..770c68b
--- /dev/null
+++ b/guest/services/cf_satellite_service/Android.bp
@@ -0,0 +1,43 @@
+// Copyright 2023 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+android_library {
+    name: "CFSatelliteService-core",
+    srcs: [
+        "src/**/*.java",
+    ],
+    static_libs: [
+        "android-support-annotations",
+    ],
+    libs: [
+        "telephony-common",
+    ],
+}
+
+android_app {
+    name: "CFSatelliteService",
+    system_ext_specific: true,
+    platform_apis: true,
+    manifest: "AndroidManifest.xml",
+    static_libs: [
+        "CFSatelliteService-core",
+    ],
+    owner: "google",
+    privileged: true,
+    certificate: "platform",
+}
diff --git a/guest/services/cf_satellite_service/AndroidManifest.xml b/guest/services/cf_satellite_service/AndroidManifest.xml
new file mode 100644
index 0000000..17ff714
--- /dev/null
+++ b/guest/services/cf_satellite_service/AndroidManifest.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+          xmlns:tools="http://schemas.android.com/tools"
+          package="com.google.android.telephony.satellite">
+    <application>
+        <service android:name=".CFSatelliteService"
+                 android:exported="true"
+                 android:directBootAware="true"
+                 android:persistent="true"
+                 android:permission="android.permission.BIND_SATELLITE_SERVICE">
+            <intent-filter>
+                <action android:name="android.telephony.satellite.SatelliteService" />
+            </intent-filter>
+        </service>
+    </application>
+</manifest>
diff --git a/guest/services/cf_satellite_service/src/com/google/android/telephony/satellite/CFSatelliteService.java b/guest/services/cf_satellite_service/src/com/google/android/telephony/satellite/CFSatelliteService.java
new file mode 100644
index 0000000..db9cce1
--- /dev/null
+++ b/guest/services/cf_satellite_service/src/com/google/android/telephony/satellite/CFSatelliteService.java
@@ -0,0 +1,328 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.android.telephony.satellite;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.content.Intent;
+import android.os.Binder;
+import android.os.IBinder;
+import android.telephony.satellite.stub.ISatelliteCapabilitiesConsumer;
+import android.telephony.satellite.stub.ISatelliteListener;
+import android.telephony.satellite.stub.NTRadioTechnology;
+import android.telephony.satellite.stub.PointingInfo;
+import android.telephony.satellite.stub.SatelliteCapabilities;
+import android.telephony.satellite.stub.SatelliteDatagram;
+import android.telephony.satellite.stub.SatelliteError;
+import android.telephony.satellite.stub.SatelliteImplBase;
+import android.telephony.satellite.stub.SatelliteModemState;
+import android.telephony.satellite.stub.SatelliteService;
+
+import com.android.internal.telephony.IBooleanConsumer;
+import com.android.internal.telephony.IIntegerConsumer;
+import com.android.internal.util.FunctionalUtils;
+import com.android.telephony.Rlog;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Executor;
+
+public class CFSatelliteService extends SatelliteImplBase {
+    private static final String TAG = "CFSatelliteService";
+
+    // Hardcoded values below
+    private static final int SATELLITE_ALWAYS_VISIBLE = 0;
+    /** SatelliteCapabilities constant indicating that the radio technology is proprietary. */
+    private static final int[] SUPPORTED_RADIO_TECHNOLOGIES =
+            new int[] {NTRadioTechnology.PROPRIETARY};
+    /** SatelliteCapabilities constant indicating that pointing to satellite is required. */
+    private static final boolean POINTING_TO_SATELLITE_REQUIRED = true;
+    /** SatelliteCapabilities constant indicating the maximum number of characters per datagram. */
+    private static final int MAX_BYTES_PER_DATAGRAM = 339;
+
+    @NonNull private final Map<IBinder, ISatelliteListener> mListeners = new HashMap<>();
+
+    private boolean mIsCommunicationAllowedInLocation;
+    private boolean mIsEnabled;
+    private boolean mIsProvisioned;
+    private boolean mIsSupported;
+    private int mModemState;
+
+    /**
+     * Create CFSatelliteService using the Executor specified for methods being called from
+     * the framework.
+     *
+     * @param executor The executor for the framework to use when executing satellite methods.
+     */
+    public CFSatelliteService(@NonNull Executor executor) {
+        super(executor);
+        mIsCommunicationAllowedInLocation = true;
+        mIsEnabled = false;
+        mIsProvisioned = false;
+        mIsSupported = true;
+        mModemState = SatelliteModemState.SATELLITE_MODEM_STATE_OFF;
+    }
+
+    /**
+     * Zero-argument constructor to prevent service binding exception.
+     */
+    public CFSatelliteService() {
+        this(Runnable::run);
+    }
+
+    @Override
+    public IBinder onBind(Intent intent) {
+        if (SatelliteService.SERVICE_INTERFACE.equals(intent.getAction())) {
+            logd("CFSatelliteService bound");
+            return new CFSatelliteService().getBinder();
+        }
+        return null;
+    }
+
+    @Override
+    public void onCreate() {
+        super.onCreate();
+        logd("onCreate");
+    }
+
+    @Override
+    public void onDestroy() {
+        super.onDestroy();
+        logd("onDestroy");
+    }
+
+    @Override
+    public void setSatelliteListener(@NonNull ISatelliteListener listener) {
+        logd("setSatelliteListener");
+        mListeners.put(listener.asBinder(), listener);
+    }
+
+    @Override
+    public void requestSatelliteListeningEnabled(boolean enable, int timeout,
+            @NonNull IIntegerConsumer errorCallback) {
+        logd("requestSatelliteListeningEnabled");
+        if (!verifySatelliteModemState(errorCallback)) {
+            return;
+        }
+        if (enable) {
+            updateSatelliteModemState(SatelliteModemState.SATELLITE_MODEM_STATE_LISTENING);
+        } else {
+            updateSatelliteModemState(SatelliteModemState.SATELLITE_MODEM_STATE_IDLE);
+        }
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void requestSatelliteEnabled(boolean enableSatellite, boolean enableDemoMode,
+            @NonNull IIntegerConsumer errorCallback) {
+        logd("requestSatelliteEnabled");
+        if (enableSatellite) {
+            enableSatellite(errorCallback);
+        } else {
+            disableSatellite(errorCallback);
+        }
+    }
+
+    private void enableSatellite(@NonNull IIntegerConsumer errorCallback) {
+        mIsEnabled = true;
+        updateSatelliteModemState(SatelliteModemState.SATELLITE_MODEM_STATE_IDLE);
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    private void disableSatellite(@NonNull IIntegerConsumer errorCallback) {
+        mIsEnabled = false;
+        updateSatelliteModemState(SatelliteModemState.SATELLITE_MODEM_STATE_OFF);
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void requestIsSatelliteEnabled(@NonNull IIntegerConsumer errorCallback,
+            @NonNull IBooleanConsumer callback) {
+        logd("requestIsSatelliteEnabled");
+        runWithExecutor(() -> callback.accept(mIsEnabled));
+    }
+
+    @Override
+    public void requestIsSatelliteSupported(@NonNull IIntegerConsumer errorCallback,
+            @NonNull IBooleanConsumer callback) {
+        logd("requestIsSatelliteSupported");
+        runWithExecutor(() -> callback.accept(mIsSupported));
+    }
+
+    @Override
+    public void requestSatelliteCapabilities(@NonNull IIntegerConsumer errorCallback,
+            @NonNull ISatelliteCapabilitiesConsumer callback) {
+        logd("requestSatelliteCapabilities");
+        SatelliteCapabilities capabilities = new SatelliteCapabilities();
+        capabilities.supportedRadioTechnologies = SUPPORTED_RADIO_TECHNOLOGIES;
+        capabilities.isPointingRequired = POINTING_TO_SATELLITE_REQUIRED;
+        capabilities.maxBytesPerOutgoingDatagram = MAX_BYTES_PER_DATAGRAM;
+        runWithExecutor(() -> callback.accept(capabilities));
+    }
+
+    @Override
+    public void startSendingSatellitePointingInfo(@NonNull IIntegerConsumer errorCallback) {
+        logd("startSendingSatellitePointingInfo");
+        if (!verifySatelliteModemState(errorCallback)) {
+            return;
+        }
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void stopSendingSatellitePointingInfo(@NonNull IIntegerConsumer errorCallback) {
+        logd("stopSendingSatellitePointingInfo");
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void provisionSatelliteService(@NonNull String token, @NonNull byte[] provisionData,
+            @NonNull IIntegerConsumer errorCallback) {
+        logd("provisionSatelliteService");
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+        updateSatelliteProvisionState(true);
+    }
+
+    @Override
+    public void deprovisionSatelliteService(@NonNull String token,
+            @NonNull IIntegerConsumer errorCallback) {
+        logd("deprovisionSatelliteService");
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+        updateSatelliteProvisionState(false);
+    }
+
+    @Override
+    public void requestIsSatelliteProvisioned(@NonNull IIntegerConsumer errorCallback,
+            @NonNull IBooleanConsumer callback) {
+        logd("requestIsSatelliteProvisioned");
+        runWithExecutor(() -> callback.accept(mIsProvisioned));
+    }
+
+    @Override
+    public void pollPendingSatelliteDatagrams(@NonNull IIntegerConsumer errorCallback) {
+        logd("pollPendingSatelliteDatagrams");
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void sendSatelliteDatagram(@NonNull SatelliteDatagram datagram, boolean isEmergency,
+            @NonNull IIntegerConsumer errorCallback) {
+        logd("sendSatelliteDatagram");
+        runWithExecutor(() -> errorCallback.accept(SatelliteError.ERROR_NONE));
+    }
+
+    @Override
+    public void requestSatelliteModemState(@NonNull IIntegerConsumer errorCallback,
+            @NonNull IIntegerConsumer callback) {
+        logd("requestSatelliteModemState");
+        runWithExecutor(() -> callback.accept(mModemState));
+    }
+
+    @Override
+    public void requestIsSatelliteCommunicationAllowedForCurrentLocation(
+            @NonNull IIntegerConsumer errorCallback, @NonNull IBooleanConsumer callback) {
+        logd("requestIsSatelliteCommunicationAllowedForCurrentLocation");
+        if (mIsCommunicationAllowedInLocation) {
+            runWithExecutor(() -> callback.accept(true));
+        } else {
+            runWithExecutor(() -> callback.accept(false));
+        }
+    }
+
+    @Override
+    public void requestTimeForNextSatelliteVisibility(@NonNull IIntegerConsumer errorCallback,
+            @NonNull IIntegerConsumer callback) {
+        logd("requestTimeForNextSatelliteVisibility");
+        runWithExecutor(() -> callback.accept(SATELLITE_ALWAYS_VISIBLE));
+    }
+
+    /**
+     * Helper method to verify that the satellite modem is properly configured to receive requests.
+     *
+     * @param errorCallback The callback to notify of any errors preventing satellite requests.
+     * @return {@code true} if the satellite modem is configured to receive requests and
+     *         {@code false} if it is not.
+     */
+    private boolean verifySatelliteModemState(@NonNull IIntegerConsumer errorCallback) {
+        if (!mIsSupported) {
+            runWithExecutor(() -> errorCallback.accept(SatelliteError.REQUEST_NOT_SUPPORTED));
+            return false;
+        }
+        if (!mIsProvisioned) {
+            runWithExecutor(() -> errorCallback.accept(SatelliteError.SERVICE_NOT_PROVISIONED));
+            return false;
+        }
+        if (!mIsEnabled) {
+            runWithExecutor(() -> errorCallback.accept(SatelliteError.INVALID_MODEM_STATE));
+            return false;
+        }
+        return true;
+    }
+
+    /**
+     * Update the satellite modem state and notify listeners if it changed.
+     *
+     * @param modemState The {@link SatelliteModemState} to update.
+     */
+    private void updateSatelliteModemState(int modemState) {
+        if (modemState == mModemState) {
+            return;
+        }
+        logd("updateSatelliteModemState: mListeners.size=" + mListeners.size());
+        mListeners.values().forEach(listener -> runWithExecutor(() ->
+                listener.onSatelliteModemStateChanged(modemState)));
+        mModemState = modemState;
+    }
+
+    /**
+     * Update the satellite provision state and notify listeners if it changed.
+     *
+     * @param isProvisioned {@code true} if the satellite is currently provisioned and
+     *                      {@code false} if it is not.
+     */
+    private void updateSatelliteProvisionState(boolean isProvisioned) {
+        if (isProvisioned == mIsProvisioned) {
+            return;
+        }
+        logd("updateSatelliteProvisionState: mListeners.size=" + mListeners.size());
+        mIsProvisioned = isProvisioned;
+        mListeners.values().forEach(listener -> runWithExecutor(() ->
+                listener.onSatelliteProvisionStateChanged(mIsProvisioned)));
+    }
+
+    /**
+     * Execute the given runnable using the executor that this service was created with.
+     *
+     * @param r A runnable that can throw an exception.
+     */
+    private void runWithExecutor(@NonNull FunctionalUtils.ThrowingRunnable r) {
+        mExecutor.execute(() -> Binder.withCleanCallingIdentity(r));
+    }
+
+    /**
+     * Log the message to the radio buffer with {@code DEBUG} priority.
+     *
+     * @param log The message to log.
+     */
+    private static void logd(@NonNull String log) {
+        Rlog.d(TAG, log);
+    }
+}
diff --git a/guest/services/wifi/Android.bp b/guest/services/wifi/Android.bp
index 4a5fd53..21c51eb 100644
--- a/guest/services/wifi/Android.bp
+++ b/guest/services/wifi/Android.bp
@@ -17,16 +17,16 @@
 }
 
 genrule {
-    name: "init.wifi.sh_apex_srcs",
+    name: "init.wifi_apex_srcs",
     srcs: ["init.wifi.sh"],
-    out: ["init.wifi.sh_apex"],
+    out: ["init.wifi_apex.sh"],
     cmd: "sed -E 's/\\/vendor\\/bin\\/mac802/\\/apex\\/com.android.wifi.hal\\/bin\\/mac802/' $(in) > $(out)",
 }
 
 sh_binary {
-    name: "init.wifi.sh_apex",
-    src: ":init.wifi.sh_apex_srcs",
-    filename: "init.wifi.sh",
+    name: "init.wifi_apex",
+    src: ":init.wifi_apex_srcs",
+    filename: "init.wifi",
     vendor: true,
     installable: false,
     init_rc: [
@@ -35,7 +35,7 @@
 }
 
 sh_binary {
-    name: "init.wifi.sh",
+    name: "init.wifi",
     src: "init.wifi.sh",
     vendor: true,
     init_rc: [
diff --git a/guest/services/wifi/init.wifi.sh b/guest/services/wifi/init.wifi.sh
index a23f174..a360a28 100755
--- a/guest/services/wifi/init.wifi.sh
+++ b/guest/services/wifi/init.wifi.sh
@@ -1,7 +1,21 @@
 #!/vendor/bin/sh
 
+# Copyright 2021 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 wifi_mac_prefix=`getprop ro.boot.wifi_mac_prefix`
 if [ -n "$wifi_mac_prefix" ]; then
-    /vendor/bin/mac80211_create_radios 2 $wifi_mac_prefix || exit 1
+    /vendor/bin/mac80211_create_radios --enable-pmsr 2 $wifi_mac_prefix || exit 1
 fi
 
diff --git a/guest/services/wifi/init.wifi.sh.rc b/guest/services/wifi/init.wifi.sh.rc
index 9a0daee..e187f33 100644
--- a/guest/services/wifi/init.wifi.sh.rc
+++ b/guest/services/wifi/init.wifi.sh.rc
@@ -1,5 +1,5 @@
 
-service init_wifi_sh /vendor/bin/init.wifi.sh
+service init_wifi_sh /vendor/bin/init.wifi
     class late_start
     user root
     group root wakelock wifi
diff --git a/host/commands/append_squashfs_overlay/Android.bp b/host/commands/append_squashfs_overlay/Android.bp
index 882bcb9..a5b6833 100644
--- a/host/commands/append_squashfs_overlay/Android.bp
+++ b/host/commands/append_squashfs_overlay/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -9,4 +24,14 @@
     rustlibs: [
         "libclap",
     ],
-}
\ No newline at end of file
+}
+
+rust_test_host {
+    name: "append_squashfs_overlay.test",
+    crate_name: "append_squashfs_overlay",
+    srcs: ["src/main.rs"],
+    rustlibs: [
+        "libclap",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/host/commands/append_squashfs_overlay/src/main.rs b/host/commands/append_squashfs_overlay/src/main.rs
index 281aef8..68664f3 100644
--- a/host/commands/append_squashfs_overlay/src/main.rs
+++ b/host/commands/append_squashfs_overlay/src/main.rs
@@ -18,9 +18,9 @@
 //! The tool ignores the existing overlay image in src, that is, the overlay image could be replaced with a new overlay image.
 use std::fs::File;
 use std::io::{copy, Error, ErrorKind, Read, Result, Seek, SeekFrom};
-use std::path::Path;
+use std::path::{Path, PathBuf};
 
-use clap::{App, Arg};
+use clap::{builder::ValueParser, Arg, ArgAction, Command};
 
 // https://dr-emann.github.io/squashfs/squashfs.html
 const BYTES_USED_FIELD_POS: u64 = (32 * 5 + 16 * 6 + 64) / 8;
@@ -64,32 +64,45 @@
     let mut dest = File::create(dest)?;
     let mut overlay = File::open(overlay)?;
 
-    src.seek(SeekFrom::Start(0))?;
+    src.rewind()?;
     let mut src_handle = src.take(align_size(bytes_used, ROOTDEV_OVERLAY_ALIGN));
     copy(&mut src_handle, &mut dest)?;
     copy(&mut overlay, &mut dest)?;
     Ok(())
 }
 
-fn main() -> Result<()> {
-    let matches = App::new("append_squashfs_overlay")
-        .arg(Arg::with_name("src").required(true))
-        .arg(Arg::with_name("overlay").required(true))
-        .arg(Arg::with_name("dest").required(true))
+fn clap_command() -> Command {
+    Command::new("append_squashfs_overlay")
+        .arg(Arg::new("src").value_parser(ValueParser::path_buf()).required(true))
+        .arg(Arg::new("overlay").value_parser(ValueParser::path_buf()).required(true))
+        .arg(Arg::new("dest").value_parser(ValueParser::path_buf()).required(true))
         .arg(
-            Arg::with_name("overwrite")
-                .short("w")
+            Arg::new("overwrite")
+                .short('w')
                 .required(false)
-                .takes_value(false)
+                .action(ArgAction::SetTrue)
                 .help("whether the tool overwrite dest or not"),
         )
-        .get_matches();
+}
 
-    let src = matches.value_of("src").unwrap().as_ref();
-    let overlay = matches.value_of("overlay").unwrap().as_ref();
-    let dest = matches.value_of("dest").unwrap().as_ref();
-    let overwrite = matches.is_present("overwrite");
+fn main() -> Result<()> {
+    let matches = clap_command().get_matches();
+
+    let src = matches.get_one::<PathBuf>("src").unwrap().as_ref();
+    let overlay = matches.get_one::<PathBuf>("overlay").unwrap().as_ref();
+    let dest = matches.get_one::<PathBuf>("dest").unwrap().as_ref();
+    let overwrite = matches.get_flag("overwrite");
 
     merge_fs(src, overlay, dest, overwrite)?;
     Ok(())
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn verify_args() {
+        clap_command().debug_assert();
+    }
+}
diff --git a/host/commands/assemble_cvd/Android.bp b/host/commands/assemble_cvd/Android.bp
index a2d4ac3..046a2cc 100644
--- a/host/commands/assemble_cvd/Android.bp
+++ b/host/commands/assemble_cvd/Android.bp
@@ -31,6 +31,7 @@
         "flag_feature.cpp",
         "misc_info.cc",
         "super_image_mixer.cc",
+        "vendor_dlkm_utils.cc",
     ],
     header_libs: [
         "bootimg_headers",
@@ -53,15 +54,36 @@
         "libext2_uuid",
         "libimage_aggregator",
         "libsparse",
+        "libcuttlefish_display_flags",
+        "libcuttlefish_graphics_configuration",
         "libcuttlefish_graphics_detector",
         "libcuttlefish_host_config",
         "libcuttlefish_host_config_adb",
+        "libcuttlefish_host_config_fastboot",
+        "libcuttlefish_launch_cvd_proto",
         "libcuttlefish_vm_manager",
         "libgflags",
     ],
     required: [
         "mkenvimage_slim",
         "lz4",
+        "avbtool",
+        "mkuserimg_mke2fs",
     ],
     defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
 }
+
+cc_library {
+    name: "libcuttlefish_display_flags",
+    srcs: [
+        "display_flags.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libjsoncpp",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+    ],
+    defaults: ["cuttlefish_host"],
+}
diff --git a/host/commands/assemble_cvd/README.md b/host/commands/assemble_cvd/README.md
new file mode 100644
index 0000000..f424ced
--- /dev/null
+++ b/host/commands/assemble_cvd/README.md
@@ -0,0 +1,13 @@
+Host filesystem setup to prepare for running a Cuttlefish device.
+
+Supports rewriting `super.img`, `boot.img`, and `initramfs.img` based on
+`launch_cvd` arguments. In a multi-device configuration, there is only one
+`assemble_cvd` invocation, but multiple `run_cvd` invocations, one per device.
+
+The disk files produced by the Android build system are arranged into a
+"[composite disk]" and protected with a read-on-write [qcow2] overlay.
+
+[![Disk diagram](./doc/disk.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/assemble_cvd/doc/disk.svg)
+
+[composite disk]: https://chromium.googlesource.com/chromiumos/platform/crosvm/+/refs/heads/main/disk/src/composite.rs
+[qcow2]: https://en.wikipedia.org/wiki/Qcow
diff --git a/host/commands/assemble_cvd/alloc.cc b/host/commands/assemble_cvd/alloc.cc
index 62b1b78..c7896ae 100644
--- a/host/commands/assemble_cvd/alloc.cc
+++ b/host/commands/assemble_cvd/alloc.cc
@@ -37,9 +37,13 @@
   config.mobile_tap.resource_id = 0;
   config.mobile_tap.session_id = 0;
 
-  config.wireless_tap.name = StrForInstance("cvd-wtap-", num);
-  config.wireless_tap.resource_id = 0;
-  config.wireless_tap.session_id = 0;
+  config.bridged_wireless_tap.name = StrForInstance("cvd-wtap-", num);
+  config.bridged_wireless_tap.resource_id = 0;
+  config.bridged_wireless_tap.session_id = 0;
+
+  config.non_bridged_wireless_tap.name = StrForInstance("cvd-wifiap-", num);
+  config.non_bridged_wireless_tap.resource_id = 0;
+  config.non_bridged_wireless_tap.session_id = 0;
 
   config.ethernet_tap.name = StrForInstance("cvd-etap-", num);
   config.ethernet_tap.resource_id = 0;
@@ -66,6 +70,8 @@
   request_list.append(req);
   req["iface_type"] = "wtap";
   request_list.append(req);
+  req["iface_type"] = "wifiap";
+  request_list.append(req);
   req["iface_type"] = "etap";
   request_list.append(req);
 
@@ -96,6 +102,7 @@
   Json::Value resp_list = resp["response_list"];
   Json::Value mtap_resp;
   Json::Value wtap_resp;
+  Json::Value wifiap_resp;
   Json::Value etap_resp;
   for (Json::Value::ArrayIndex i = 0; i != resp_list.size(); ++i) {
     auto ty = StrToIfaceTy(resp_list[i]["iface_type"].asString());
@@ -109,6 +116,10 @@
         wtap_resp = resp_list[i];
         break;
       }
+      case IfaceType::wifiap: {
+        wifiap_resp = resp_list[i];
+        break;
+      }
       case IfaceType::etap: {
         etap_resp = resp_list[i];
         break;
@@ -127,6 +138,10 @@
     LOG(ERROR) << "Missing wtap response from allocd";
     return std::nullopt;
   }
+  if (!wifiap_resp.isMember("iface_type")) {
+    LOG(ERROR) << "Missing wifiap response from allocd";
+    return std::nullopt;
+  }
   if (!etap_resp.isMember("iface_type")) {
     LOG(ERROR) << "Missing etap response from allocd";
     return std::nullopt;
@@ -136,9 +151,14 @@
   config.mobile_tap.resource_id = mtap_resp["resource_id"].asUInt();
   config.mobile_tap.session_id = session_id;
 
-  config.wireless_tap.name = wtap_resp["iface_name"].asString();
-  config.wireless_tap.resource_id = wtap_resp["resource_id"].asUInt();
-  config.wireless_tap.session_id = session_id;
+  config.bridged_wireless_tap.name = wtap_resp["iface_name"].asString();
+  config.bridged_wireless_tap.resource_id = wtap_resp["resource_id"].asUInt();
+  config.bridged_wireless_tap.session_id = session_id;
+
+  config.non_bridged_wireless_tap.name = wifiap_resp["iface_name"].asString();
+  config.non_bridged_wireless_tap.resource_id =
+      wifiap_resp["resource_id"].asUInt();
+  config.non_bridged_wireless_tap.session_id = session_id;
 
   config.ethernet_tap.name = etap_resp["iface_name"].asString();
   config.ethernet_tap.resource_id = etap_resp["resource_id"].asUInt();
diff --git a/host/commands/assemble_cvd/alloc.h b/host/commands/assemble_cvd/alloc.h
index d74c2b2..12844e0 100644
--- a/host/commands/assemble_cvd/alloc.h
+++ b/host/commands/assemble_cvd/alloc.h
@@ -29,7 +29,8 @@
 
 struct IfaceConfig {
   IfaceData mobile_tap;
-  IfaceData wireless_tap;
+  IfaceData bridged_wireless_tap;
+  IfaceData non_bridged_wireless_tap;
   IfaceData ethernet_tap;
 };
 
diff --git a/host/commands/assemble_cvd/assemble_cvd.cc b/host/commands/assemble_cvd/assemble_cvd.cc
index 70607fa..9d285ec 100644
--- a/host/commands/assemble_cvd/assemble_cvd.cc
+++ b/host/commands/assemble_cvd/assemble_cvd.cc
@@ -15,8 +15,10 @@
 
 #include <iostream>
 
-#include <android-base/strings.h>
 #include <android-base/logging.h>
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
 #include <gflags/gflags.h>
 
 #include "common/libs/fs/shared_buf.h"
@@ -29,42 +31,62 @@
 #include "host/commands/assemble_cvd/disk_flags.h"
 #include "host/commands/assemble_cvd/flag_feature.h"
 #include "host/commands/assemble_cvd/flags.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/libs/config/adb/adb.h"
 #include "host/libs/config/config_flag.h"
 #include "host/libs/config/custom_actions.h"
+#include "host/libs/config/fastboot/fastboot.h"
 #include "host/libs/config/fetcher_config.h"
+#include "host/libs/config/inject.h"
 
 using cuttlefish::StringFromEnv;
 
-DEFINE_string(assembly_dir, StringFromEnv("HOME", ".") + "/cuttlefish_assembly",
+DEFINE_string(assembly_dir, CF_DEFAULTS_ASSEMBLY_DIR,
               "A directory to put generated files common between instances");
-DEFINE_string(instance_dir, StringFromEnv("HOME", ".") + "/cuttlefish",
+DEFINE_string(instance_dir, CF_DEFAULTS_INSTANCE_DIR,
               "This is a directory that will hold the cuttlefish generated"
               "files, including both instance-specific and common files");
-DEFINE_bool(resume, true, "Resume using the disk from the last session, if "
-                          "possible. i.e., if --noresume is passed, the disk "
-                          "will be reset to the state it was initially launched "
-                          "in. This flag is ignored if the underlying partition "
-                          "images have been updated since the first launch.");
-DEFINE_int32(modem_simulator_count, 1,
-             "Modem simulator count corresponding to maximum sim number");
+DEFINE_bool(resume, CF_DEFAULTS_RESUME,
+            "Resume using the disk from the last session, if "
+            "possible. i.e., if --noresume is passed, the disk "
+            "will be reset to the state it was initially launched "
+            "in. This flag is ignored if the underlying partition "
+            "images have been updated since the first launch.");
+
+DECLARE_bool(use_overlay);
 
 namespace cuttlefish {
 namespace {
 
 std::string kFetcherConfigFile = "fetcher_config.json";
 
-FetcherConfig FindFetcherConfig(const std::vector<std::string>& files) {
+struct LocatedFetcherConfig {
   FetcherConfig fetcher_config;
+  std::optional<std::string> working_dir;
+};
+
+LocatedFetcherConfig FindFetcherConfig(const std::vector<std::string>& files) {
+  LocatedFetcherConfig located_fetcher_config;
   for (const auto& file : files) {
     if (android::base::EndsWith(file, kFetcherConfigFile)) {
-      if (fetcher_config.LoadFromFile(file)) {
-        return fetcher_config;
+      std::string home_directory = StringFromEnv("HOME", CurrentDirectory());
+      std::string fetcher_file = file;
+      if (!FileExists(file) &&
+          FileExists(home_directory + "/" + fetcher_file)) {
+        LOG(INFO) << "Found " << fetcher_file << " in HOME directory ('"
+                  << home_directory << "') and not current working directory";
+
+        located_fetcher_config.working_dir = home_directory;
+        fetcher_file = home_directory + "/" + fetcher_file;
+      }
+
+      if (located_fetcher_config.fetcher_config.LoadFromFile(fetcher_file)) {
+        return located_fetcher_config;
       }
       LOG(ERROR) << "Could not load fetcher config file.";
     }
   }
-  return fetcher_config;
+  return located_fetcher_config;
 }
 
 std::string GetLegacyConfigFilePath(const CuttlefishConfig& config) {
@@ -96,10 +118,13 @@
 
 Result<void> CreateLegacySymlinks(
     const CuttlefishConfig::InstanceSpecific& instance) {
-  std::string log_files[] = {
-      "kernel.log",  "launcher.log",        "logcat",
-      "metrics.log", "modem_simulator.log", "crosvm_openwrt.log",
-  };
+  std::string log_files[] = {"kernel.log",
+                             "launcher.log",
+                             "logcat",
+                             "metrics.log",
+                             "modem_simulator.log",
+                             "crosvm_openwrt.log",
+                             "crosvm_openwrt_boot.log"};
   for (const auto& log_file : log_files) {
     auto symlink_location = instance.PerInstancePath(log_file.c_str());
     auto log_target = "logs/" + log_file;  // Relative path
@@ -128,11 +153,24 @@
     return CF_ERRNO("symlink(\"" << instance.instance_dir() << "\", \""
                                  << legacy_instance_path << "\") failed");
   }
+
+  const auto mac80211_uds_name = "vhost_user_mac80211";
+
+  const auto mac80211_uds_path =
+      instance.PerInstanceInternalUdsPath(mac80211_uds_name);
+  const auto legacy_mac80211_uds_path =
+      instance.PerInstanceInternalPath(mac80211_uds_name);
+
+  if (symlink(mac80211_uds_path.c_str(), legacy_mac80211_uds_path.c_str())) {
+    return CF_ERRNO("symlink(\"" << mac80211_uds_path << "\", \""
+                                 << legacy_mac80211_uds_path << "\") failed");
+  }
+
   return {};
 }
 
 Result<const CuttlefishConfig*> InitFilesystemAndCreateConfig(
-    FetcherConfig fetcher_config, KernelConfig kernel_config,
+    FetcherConfig fetcher_config, const std::vector<GuestConfig>& guest_configs,
     fruit::Injector<>& injector) {
   std::string runtime_dir_parent = AbsolutePath(FLAGS_instance_dir);
   while (runtime_dir_parent[runtime_dir_parent.size() - 1] == '/') {
@@ -158,12 +196,36 @@
     // SaveConfig line below. Don't launch cuttlefish subprocesses between these
     // two operations, as those will assume they can read the config object from
     // disk.
-    auto config = InitializeCuttlefishConfiguration(FLAGS_instance_dir,
-                                                    FLAGS_modem_simulator_count,
-                                                    kernel_config, injector);
+    auto config = CF_EXPECT(
+        InitializeCuttlefishConfiguration(FLAGS_instance_dir, guest_configs,
+                                          injector, fetcher_config),
+        "cuttlefish configuration initialization failed");
+
+    // take the max value of modem_simulator_instance_number in each instance
+    // which is used for preserving/deleting iccprofile_for_simX.xml files
+    int modem_simulator_count = 0;
+
     std::set<std::string> preserving;
-    auto os_builder = OsCompositeDiskBuilder(config);
-    bool creating_os_disk = CF_EXPECT(os_builder.WillRebuildCompositeDisk());
+    bool creating_os_disk = false;
+    // if any device needs to rebuild its composite disk,
+    // then don't preserve any files and delete everything.
+    for (const auto& instance : config.Instances()) {
+      auto os_builder = OsCompositeDiskBuilder(config, instance);
+      creating_os_disk |= CF_EXPECT(os_builder.WillRebuildCompositeDisk());
+      if (instance.ap_boot_flow() != CuttlefishConfig::InstanceSpecific::APBootFlow::None) {
+        auto ap_builder = ApCompositeDiskBuilder(config, instance);
+        creating_os_disk |= CF_EXPECT(ap_builder.WillRebuildCompositeDisk());
+      }
+      if (instance.modem_simulator_instance_number() > modem_simulator_count) {
+        modem_simulator_count = instance.modem_simulator_instance_number();
+      }
+    }
+    // TODO(schuffelen): Add smarter decision for when to delete runtime files.
+    // Files like NVChip are tightly bound to Android keymint and should be
+    // deleted when userdata is reset. However if the user has ever run without
+    // the overlay, then we want to keep this until userdata.img was externally
+    // replaced.
+    creating_os_disk &= FLAGS_use_overlay;
     if (FLAGS_resume && creating_os_disk) {
       LOG(INFO) << "Requested resuming a previous session (the default behavior) "
                 << "but the base images have changed under the overlay, making the "
@@ -177,6 +239,7 @@
       preserving.insert("os_composite.img");
       preserving.insert("sdcard.img");
       preserving.insert("boot_repacked.img");
+      preserving.insert("vendor_dlkm_repacked.img");
       preserving.insert("vendor_boot_repacked.img");
       preserving.insert("access-kregistry");
       preserving.insert("hwcomposer-pmem");
@@ -192,7 +255,7 @@
       preserving.insert("uboot_env.img");
       preserving.insert("factory_reset_protected.img");
       std::stringstream ss;
-      for (int i = 0; i < FLAGS_modem_simulator_count; i++) {
+      for (int i = 0; i < modem_simulator_count; i++) {
         ss.clear();
         ss << "iccprofile_for_sim" << i << ".xml";
         preserving.insert(ss.str());
@@ -203,26 +266,22 @@
                               config.instance_dirs()),
               "Failed to clean prior files");
 
+    auto defaultGroup = "cvdnetwork";
+    const mode_t defaultMode = S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH;
+
     CF_EXPECT(EnsureDirectoryExists(config.root_dir()));
     CF_EXPECT(EnsureDirectoryExists(config.assembly_dir()));
     CF_EXPECT(EnsureDirectoryExists(config.instances_dir()));
+    CF_EXPECT(EnsureDirectoryExists(config.instances_uds_dir(), defaultMode,
+                                    defaultGroup));
+
+    LOG(INFO) << "Path for instance UDS: " << config.instances_uds_dir();
+
     if (log->LinkAtCwd(config.AssemblyPath("assemble_cvd.log"))) {
       LOG(ERROR) << "Unable to persist assemble_cvd log at "
                   << config.AssemblyPath("assemble_cvd.log")
                   << ": " << log->StrError();
     }
-
-    auto disk_config = GetOsCompositeDiskConfig();
-    if (auto it = std::find_if(disk_config.begin(), disk_config.end(),
-                               [](const auto& partition) {
-                                 return partition.label == "ap_rootfs";
-                               });
-        it != disk_config.end()) {
-      auto ap_image_idx = std::distance(disk_config.begin(), it) + 1;
-      std::stringstream ss;
-      ss << "/dev/vda" << ap_image_idx;
-      config.set_ap_image_dev_path(ss.str());
-    }
     for (const auto& instance : config.Instances()) {
       // Create instance directory if it doesn't exist.
       CF_EXPECT(EnsureDirectoryExists(instance.instance_dir()));
@@ -233,13 +292,22 @@
       auto recording_dir = instance.instance_dir() + "/recording";
       CF_EXPECT(EnsureDirectoryExists(recording_dir));
       CF_EXPECT(EnsureDirectoryExists(instance.PerInstanceLogPath("")));
+
+      CF_EXPECT(EnsureDirectoryExists(instance.instance_uds_dir(), defaultMode,
+                                      defaultGroup));
+      CF_EXPECT(EnsureDirectoryExists(instance.instance_internal_uds_dir(),
+                                      defaultMode, defaultGroup));
+      CF_EXPECT(EnsureDirectoryExists(instance.PerInstanceGrpcSocketPath(""),
+                                      defaultMode, defaultGroup));
+
       // TODO(schuffelen): Move this code somewhere better
       CF_EXPECT(CreateLegacySymlinks(instance));
     }
     CF_EXPECT(SaveConfig(config), "Failed to initialize configuration");
   }
 
-  // Do this early so that the config object is ready for anything that needs it
+  // Do this early so that the config object is ready for anything that needs
+  // it
   auto config = CuttlefishConfig::Get();
   CF_EXPECT(config != nullptr, "Failed to obtain config singleton");
 
@@ -250,7 +318,8 @@
     CF_EXPECT(RemoveFile(FLAGS_assembly_dir),
               "Failed to remove file" << FLAGS_assembly_dir);
   }
-  if (symlink(config->assembly_dir().c_str(), FLAGS_assembly_dir.c_str())) {
+  if (symlink(config->assembly_dir().c_str(),
+              FLAGS_assembly_dir.c_str())) {
     return CF_ERRNO("symlink(\"" << config->assembly_dir() << "\", \""
                                  << FLAGS_assembly_dir << "\") failed");
   }
@@ -263,7 +332,8 @@
   }
   if (symlink(first_instance.c_str(), double_legacy_instance_dir.c_str())) {
     return CF_ERRNO("symlink(\"" << first_instance << "\", \""
-                                 << double_legacy_instance_dir << "\") failed");
+                                 << double_legacy_instance_dir
+                                 << "\") failed");
   }
 
   CF_EXPECT(CreateDynamicDiskFiles(fetcher_config, *config));
@@ -292,6 +362,9 @@
       .install(AdbConfigComponent)
       .install(AdbConfigFlagComponent)
       .install(AdbConfigFragmentComponent)
+      .install(FastbootConfigComponent)
+      .install(FastbootConfigFlagComponent)
+      .install(FastbootConfigFragmentComponent)
       .install(GflagsComponent)
       .install(ConfigFlagComponent)
       .install(CustomActionsComponent);
@@ -321,9 +394,18 @@
   }
   std::vector<std::string> input_files = android::base::Split(input_files_str, "\n");
 
-  FetcherConfig fetcher_config = FindFetcherConfig(input_files);
+  LocatedFetcherConfig located_fetcher_config = FindFetcherConfig(input_files);
+  if (located_fetcher_config.working_dir) {
+    LOG(INFO) << "Changing current working dircetory to '"
+              << *located_fetcher_config.working_dir << "'";
+    CF_EXPECT(chdir((*located_fetcher_config.working_dir).c_str()) == 0,
+              "Unable to change working dir to '"
+                  << *located_fetcher_config.working_dir
+                  << "': " << strerror(errno));
+  }
+
   // set gflags defaults to point to kernel/RD from fetcher config
-  ExtractKernelParamsFromFetcherConfig(fetcher_config);
+  ExtractKernelParamsFromFetcherConfig(located_fetcher_config.fetcher_config);
 
   auto args = ArgsToVec(argc - 1, argv + 1);
 
@@ -348,6 +430,11 @@
   }
 
   fruit::Injector<> injector(FlagsComponent);
+
+  for (auto& late_injected : injector.getMultibindings<LateInjected>()) {
+    CF_EXPECT(late_injected->LateInject(injector));
+  }
+
   auto flag_features = injector.getMultibindings<FlagFeature>();
   CF_EXPECT(FlagFeature::ProcessFlags(flag_features, args),
             "Failed to parse flags.");
@@ -366,13 +453,13 @@
   // gflags either consumes all arguments that start with - or leaves all of
   // them in place, and either errors out on unknown flags or accepts any flags.
 
-  auto kernel_config =
-      CF_EXPECT(GetKernelConfigAndSetDefaults(), "Failed to parse arguments");
+  auto guest_configs =
+      CF_EXPECT(GetGuestConfigAndSetDefaults(), "Failed to parse arguments");
 
-  auto config =
-      CF_EXPECT(InitFilesystemAndCreateConfig(std::move(fetcher_config),
-                                              kernel_config, injector),
-                "Failed to create config");
+  auto config = CF_EXPECT(InitFilesystemAndCreateConfig(
+                              std::move(located_fetcher_config.fetcher_config),
+                              guest_configs, injector),
+                          "Failed to create config");
 
   std::cout << GetConfigFilePath(*config) << "\n";
   std::cout << std::flush;
@@ -384,6 +471,10 @@
 
 int main(int argc, char** argv) {
   auto res = cuttlefish::AssembleCvdMain(argc, argv);
-  CHECK(res.ok()) << "assemble_cvd failed: \n" << res.error();
-  return *res;
+  if (res.ok()) {
+    return *res;
+  }
+  LOG(ERROR) << "assemble_cvd failed: \n" << res.error().Message();
+  LOG(DEBUG) << "assemble_cvd failed: \n" << res.error().Trace();
+  abort();
 }
diff --git a/host/commands/assemble_cvd/boot_config.cc b/host/commands/assemble_cvd/boot_config.cc
index 7c9f8b1..ce22b5e 100644
--- a/host/commands/assemble_cvd/boot_config.cc
+++ b/host/commands/assemble_cvd/boot_config.cc
@@ -38,7 +38,6 @@
 
 using cuttlefish::vm_manager::CrosvmManager;
 
-DECLARE_bool(pause_in_bootloader);
 DECLARE_string(vm_manager);
 
 // Taken from external/avb/avbtool.py; this define is not in the headers
@@ -47,26 +46,66 @@
 namespace cuttlefish {
 namespace {
 
-size_t WriteEnvironment(const CuttlefishConfig& config,
+// The ordering of tap devices we're passing to crosvm / qemu is important
+// Ethernet tap device is the second one (eth1) we're passing ATM
+static constexpr char kUbootPrimaryEth[] = "eth1";
+
+void WritePausedEntrypoint(std::ostream& env, const char* entrypoint,
+                           const CuttlefishConfig::InstanceSpecific& instance) {
+  if (instance.pause_in_bootloader()) {
+    env << "if test $paused -ne 1; then paused=1; else " << entrypoint << "; fi";
+  } else {
+    env << entrypoint;
+  }
+
+  env << '\0';
+}
+
+void WriteAndroidEnvironment(
+    std::ostream& env,
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  WritePausedEntrypoint(env, "run bootcmd_android", instance);
+
+  if (!instance.boot_slot().empty()) {
+    env << "android_slot_suffix=_" << instance.boot_slot() << '\0';
+  }
+  env << '\0';
+}
+
+void WriteEFIEnvironment(
+    std::ostream& env, const CuttlefishConfig::InstanceSpecific& instance) {
+  // TODO(b/256602611): get rid of loadddr hardcode. make sure loadddr
+  // env setup in the bootloader.
+  WritePausedEntrypoint(env,
+    "load virtio 0:${devplist} 0x80200000 efi/boot/bootaa64.efi "
+    "&& bootefi 0x80200000 ${fdtcontroladdr}; "
+    "load virtio 0:${devplist} 0x02400000 efi/boot/bootia32.efi && "
+    "bootefi 0x02400000 ${fdtcontroladdr}", instance
+  );
+}
+
+size_t WriteEnvironment(const CuttlefishConfig::InstanceSpecific& instance,
+                        const CuttlefishConfig::InstanceSpecific::BootFlow& flow,
                         const std::string& kernel_args,
                         const std::string& env_path) {
   std::ostringstream env;
 
+  env << "ethprime=" << kUbootPrimaryEth << '\0';
   if (!kernel_args.empty()) {
     env << "uenvcmd=setenv bootargs \"$cbootargs " << kernel_args << "\" && ";
   } else {
     env << "uenvcmd=setenv bootargs \"$cbootargs\" && ";
   }
-  if (FLAGS_pause_in_bootloader) {
-    env << "if test $paused -ne 1; then paused=1; else run bootcmd_android; fi";
-  } else {
-    env << "run bootcmd_android";
+
+  switch (flow) {
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Android:
+      WriteAndroidEnvironment(env, instance);
+      break;
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Linux:
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Fuchsia:
+      WriteEFIEnvironment(env, instance);
+      break;
   }
-  env << '\0';
-  if (!config.boot_slot().empty()) {
-    env << "android_slot_suffix=_" << config.boot_slot() << '\0';
-  }
-  env << '\0';
 
   std::string env_str = env.str();
   std::ofstream file_out(env_path.c_str(), std::ios::binary);
@@ -90,37 +129,86 @@
 
   // SetupFeature
   std::string Name() const override { return "InitBootloaderEnvPartitionImpl"; }
-  bool Enabled() const override { return !config_.protected_vm(); }
+  bool Enabled() const override { return !instance_.protected_vm(); }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
   bool Setup() override {
-    auto boot_env_image_path = instance_.uboot_env_image_path();
-    auto tmp_boot_env_image_path = boot_env_image_path + ".tmp";
+    if (instance_.ap_boot_flow() == CuttlefishConfig::InstanceSpecific::APBootFlow::Grub) {
+      if (!PrepareBootEnvImage(instance_.ap_uboot_env_image_path(),
+          CuttlefishConfig::InstanceSpecific::BootFlow::Linux)) {
+        return false;
+      }
+    }
+    if (!PrepareBootEnvImage(instance_.uboot_env_image_path(), instance_.boot_flow())) {
+      return false;
+    }
+
+    return true;
+  }
+
+  std::unordered_map<std::string, std::string> ReplaceKernelBootArgs(
+      const std::unordered_map<std::string, std::string>& args) {
+    std::unordered_map<std::string, std::string> ret;
+    std::transform(std::begin(args), std::end(args),
+                   std::inserter(ret, ret.end()), [](const auto& kv) {
+                     const auto& k = kv.first;
+                     const auto& v = kv.second;
+                     return std::make_pair(
+                         android::base::StringReplace(k, " kernel.", " ", true),
+                         v);
+                   });
+    return ret;
+  }
+
+  bool PrepareBootEnvImage(const std::string& image_path,
+                           const CuttlefishConfig::InstanceSpecific::BootFlow& flow) {
+    auto tmp_boot_env_image_path = image_path + ".tmp";
     auto uboot_env_path = instance_.PerInstancePath("mkenvimg_input");
-    auto kernel_cmdline =
-        android::base::Join(KernelCommandLineFromConfig(config_), " ");
+    auto kernel_cmdline = android::base::Join(
+        KernelCommandLineFromConfig(config_, instance_), " ");
     // If the bootconfig isn't supported in the guest kernel, the bootconfig
     // args need to be passed in via the uboot env. This won't be an issue for
     // protect kvm which is running a kernel with bootconfig support.
-    if (!config_.bootconfig_supported()) {
-      auto bootconfig_args = android::base::Join(
-          BootconfigArgsFromConfig(config_, instance_), " ");
+    if (!instance_.bootconfig_supported()) {
+      auto bootconfig_args_result =
+          BootconfigArgsFromConfig(config_, instance_);
+      if (!bootconfig_args_result.ok()) {
+        LOG(ERROR) << "Unable to get bootconfig args from config: "
+                   << bootconfig_args_result.error().Message();
+        return false;
+      }
+      auto bootconfig_args = std::move(bootconfig_args_result.value());
+
       // "androidboot.hardware" kernel parameter has changed to "hardware" in
       // bootconfig and needs to be replaced before being used in the kernel
       // cmdline.
-      bootconfig_args = android::base::StringReplace(
-          bootconfig_args, " hardware=", " androidboot.hardware=", true);
+      auto bootconfig_hardware_it = bootconfig_args.find("hardware");
+      if (bootconfig_hardware_it != bootconfig_args.end()) {
+        bootconfig_args["androidboot.hardware"] =
+            bootconfig_hardware_it->second;
+        bootconfig_args.erase(bootconfig_hardware_it);
+      }
+
       // TODO(b/182417593): Until we pass the module parameters through
       // modules.options, we pass them through bootconfig using
       // 'kernel.<key>=<value>' But if we don't support bootconfig, we need to
       // rename them back to the old cmdline version
-      bootconfig_args =
-          android::base::StringReplace(bootconfig_args, " kernel.", " ", true);
+      bootconfig_args = ReplaceKernelBootArgs(bootconfig_args);
+
+      auto bootconfig_result =
+          BootconfigArgsString(bootconfig_args, " ");
+      if (!bootconfig_result.ok()) {
+        LOG(ERROR) << "Unable to get bootconfig args string from config: "
+                   << bootconfig_result.error().Message();
+        return false;
+      }
+
       kernel_cmdline += " ";
-      kernel_cmdline += bootconfig_args;
+      kernel_cmdline += bootconfig_result.value();
     }
-    if (!WriteEnvironment(config_, kernel_cmdline, uboot_env_path)) {
+
+    if (!WriteEnvironment(instance_, flow, kernel_cmdline, uboot_env_path)) {
       LOG(ERROR) << "Unable to write out plaintext env '" << uboot_env_path
                  << ".'";
       return false;
@@ -163,9 +251,9 @@
       return false;
     }
 
-    if (!FileExists(boot_env_image_path) ||
-        ReadFile(boot_env_image_path) != ReadFile(tmp_boot_env_image_path)) {
-      if (!RenameFile(tmp_boot_env_image_path, boot_env_image_path)) {
+    if (!FileExists(image_path) ||
+        ReadFile(image_path) != ReadFile(tmp_boot_env_image_path)) {
+      if (!RenameFile(tmp_boot_env_image_path, image_path).ok()) {
         LOG(ERROR) << "Unable to delete the old env image.";
         return false;
       }
diff --git a/host/commands/assemble_cvd/boot_image_utils.cc b/host/commands/assemble_cvd/boot_image_utils.cc
index ec9e93f..ff749a3 100644
--- a/host/commands/assemble_cvd/boot_image_utils.cc
+++ b/host/commands/assemble_cvd/boot_image_utils.cc
@@ -21,12 +21,14 @@
 #include <unistd.h>
 
 #include <fstream>
+#include <regex>
 #include <sstream>
 
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 
 const char TMP_EXTENSION[] = ".tmp";
@@ -58,7 +60,7 @@
 bool DeleteTmpFileIfNotChanged(const std::string& tmp_file, const std::string& current_file) {
   if (!FileExists(current_file) ||
       ReadFile(current_file) != ReadFile(tmp_file)) {
-    if (!RenameFile(tmp_file, current_file)) {
+    if (!RenameFile(tmp_file, current_file).ok()) {
       LOG(ERROR) << "Unable to delete " << current_file;
       return false;
     }
@@ -75,22 +77,9 @@
                          const std::string& original_ramdisk_path,
                          const std::string& new_ramdisk_path,
                          const std::string& build_dir) {
-  int success = execute({"/bin/bash", "-c", HostBinaryPath("lz4") + " -c -d -l " +
-                        original_ramdisk_path + " > " + original_ramdisk_path + CPIO_EXT});
-  CHECK(success == 0) << "Unable to run lz4. Exited with status " << success;
-
+  int success = 0;
   const std::string ramdisk_stage_dir = build_dir + "/" + TMP_RD_DIR;
-  success =
-      mkdir(ramdisk_stage_dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
-  CHECK(success == 0) << "Could not mkdir \"" << ramdisk_stage_dir
-                      << "\", error was " << strerror(errno);
-
-  success = execute(
-      {"/bin/bash", "-c",
-       "(cd " + ramdisk_stage_dir + " && while " + HostBinaryPath("toybox") +
-           " cpio -idu; do :; done) < " + original_ramdisk_path + CPIO_EXT});
-  CHECK(success == 0) << "Unable to run cd or cpio. Exited with status "
-                      << success;
+  UnpackRamdisk(original_ramdisk_path, ramdisk_stage_dir);
 
   success = execute({"rm", "-rf", ramdisk_stage_dir + "/lib/modules"});
   CHECK(success == 0) << "Could not rmdir \"lib/modules\" in TMP_RD_DIR. "
@@ -117,6 +106,39 @@
 
 }  // namespace
 
+void PackRamdisk(const std::string& ramdisk_stage_dir,
+                 const std::string& output_ramdisk) {
+  int success = execute({"/bin/bash", "-c",
+                         HostBinaryPath("mkbootfs") + " " + ramdisk_stage_dir +
+                             " > " + output_ramdisk + CPIO_EXT});
+  CHECK(success == 0) << "Unable to run cd or cpio. Exited with status "
+                      << success;
+
+  success = execute({"/bin/bash", "-c",
+                     HostBinaryPath("lz4") + " -c -l -12 --favor-decSpeed " +
+                         output_ramdisk + CPIO_EXT + " > " + output_ramdisk});
+  CHECK(success == 0) << "Unable to run lz4. Exited with status " << success;
+}
+
+void UnpackRamdisk(const std::string& original_ramdisk_path,
+                   const std::string& ramdisk_stage_dir) {
+  int success =
+      execute({"/bin/bash", "-c",
+               HostBinaryPath("lz4") + " -c -d -l " + original_ramdisk_path +
+                   " > " + original_ramdisk_path + CPIO_EXT});
+  CHECK(success == 0) << "Unable to run lz4. Exited with status " << success;
+  const auto ret = EnsureDirectoryExists(ramdisk_stage_dir);
+  CHECK(ret.ok()) << ret.error().Message();
+
+  success = execute(
+      {"/bin/bash", "-c",
+       "(cd " + ramdisk_stage_dir + " && while " + HostBinaryPath("toybox") +
+           " cpio -idu; do :; done) < " + original_ramdisk_path + CPIO_EXT});
+  CHECK(success == 0) << "Unable to run cd or cpio. Exited with status "
+                      << success;
+}
+
+
 bool UnpackBootImage(const std::string& boot_image_path,
                      const std::string& unpack_dir) {
   auto unpack_path = HostBinaryPath("unpack_bootimg");
@@ -336,7 +358,8 @@
 
 void RepackGem5BootImage(const std::string& initrd_path,
                          const std::string& bootconfig_path,
-                         const std::string& unpack_dir) {
+                         const std::string& unpack_dir,
+                         const std::string& input_ramdisk_path) {
   // Simulate per-instance what the bootloader would usually do
   // Since on other devices this runs every time, just do it here every time
   std::ofstream final_rd(initrd_path,
@@ -344,7 +367,14 @@
 
   std::ifstream boot_ramdisk(unpack_dir + "/ramdisk",
                              std::ios_base::binary);
-  std::ifstream vendor_boot_ramdisk(unpack_dir +
+  std::string new_ramdisk_path = unpack_dir + "/vendor_ramdisk_repacked";
+  // Test to make sure new ramdisk hasn't already been repacked if input ramdisk is provided
+  if (FileExists(input_ramdisk_path) && !FileExists(new_ramdisk_path)) {
+    RepackVendorRamdisk(input_ramdisk_path,
+                        unpack_dir + "/" + CONCATENATED_VENDOR_RAMDISK,
+                        new_ramdisk_path, unpack_dir);
+  }
+  std::ifstream vendor_boot_ramdisk(FileExists(new_ramdisk_path) ? new_ramdisk_path : unpack_dir +
                                     "/concatenated_vendor_ramdisk",
                                     std::ios_base::binary);
 
@@ -397,4 +427,33 @@
   final_rd << "#BOOTCONFIG\n";
   final_rd.close();
 }
+
+Result<std::string> ReadAndroidVersionFromBootImage(
+    const std::string& boot_image_path) {
+  // temp dir path length is chosen to be larger than sun_path_length (108)
+  char tmp_dir[200];
+  sprintf(tmp_dir, "%s/XXXXXX", StringFromEnv("TEMP", "/tmp").c_str());
+  char* unpack_dir = mkdtemp(tmp_dir);
+  if (!unpack_dir) {
+    return CF_ERR("boot image unpack dir could not be created");
+  }
+  bool unpack_status = UnpackBootImage(boot_image_path, unpack_dir);
+  if (!unpack_status) {
+    RecursivelyRemoveDirectory(unpack_dir);
+    return CF_ERR("\"" + boot_image_path + "\" boot image unpack into \"" +
+                  unpack_dir + "\" failed");
+  }
+
+  // dirty hack to read out boot params
+  size_t dir_path_len = strlen(tmp_dir);
+  std::string boot_params = ReadFile(strcat(unpack_dir, "/boot_params"));
+  unpack_dir[dir_path_len] = '\0';
+
+  RecursivelyRemoveDirectory(unpack_dir);
+  std::string os_version = ExtractValue(boot_params, "os version: ");
+  CF_EXPECT(os_version != "", "Could not extract os version from \"" + boot_image_path + "\"");
+  std::regex re("[1-9][0-9]*.[0-9]+.[0-9]+");
+  CF_EXPECT(std::regex_match(os_version, re), "Version string is not a valid version \"" + os_version + "\"");
+  return os_version;
+}
 } // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/boot_image_utils.h b/host/commands/assemble_cvd/boot_image_utils.h
index c5a2d1b..6890810 100644
--- a/host/commands/assemble_cvd/boot_image_utils.h
+++ b/host/commands/assemble_cvd/boot_image_utils.h
@@ -18,7 +18,14 @@
 #include <string>
 #include <vector>
 
+#include "common/libs/utils/result.h"
+
 namespace cuttlefish {
+
+// Taken from external/avb/libavb/avb_slot_verify.c; this define is not in the
+// headers
+static constexpr size_t VBMETA_MAX_SIZE = 65536ul;
+
 bool RepackBootImage(const std::string& new_kernel_path,
                      const std::string& boot_image_path,
                      const std::string& new_boot_image_path,
@@ -38,5 +45,13 @@
     const std::string& vendor_boot_image_path, const std::string& unpack_dir);
 void RepackGem5BootImage(const std::string& initrd_path,
                          const std::string& bootconfig_path,
-                         const std::string& unpack_dir);
+                         const std::string& unpack_dir,
+                         const std::string& input_ramdisk_path);
+Result<std::string> ReadAndroidVersionFromBootImage(
+    const std::string& boot_image_path);
+
+void UnpackRamdisk(const std::string& original_ramdisk_path,
+                   const std::string& ramdisk_stage_dir);
+void PackRamdisk(const std::string& ramdisk_stage_dir,
+                 const std::string& output_ramdisk);
 }
diff --git a/host/commands/assemble_cvd/disk_flags.cc b/host/commands/assemble_cvd/disk_flags.cc
index c06d7d7..dc6e4f2 100644
--- a/host/commands/assemble_cvd/disk_flags.cc
+++ b/host/commands/assemble_cvd/disk_flags.cc
@@ -17,6 +17,8 @@
 #include "host/commands/assemble_cvd/disk_flags.h"
 
 #include <android-base/logging.h>
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
 #include <android-base/strings.h>
 #include <fruit/fruit.h>
 #include <gflags/gflags.h>
@@ -25,233 +27,385 @@
 #include <fstream>
 
 #include "common/libs/fs/shared_buf.h"
-#include "common/libs/utils/environment.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/size_utils.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/commands/assemble_cvd/boot_config.h"
 #include "host/commands/assemble_cvd/boot_image_utils.h"
 #include "host/commands/assemble_cvd/disk_builder.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/commands/assemble_cvd/super_image_mixer.h"
+#include "host/commands/assemble_cvd/vendor_dlkm_utils.h"
 #include "host/libs/config/bootconfig_args.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/data_image.h"
-#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/config/inject.h"
+#include "host/libs/config/instance_nums.h"
 #include "host/libs/vm_manager/gem5_manager.h"
 
-// Taken from external/avb/libavb/avb_slot_verify.c; this define is not in the headers
-#define VBMETA_MAX_SIZE 65536ul
+
 // Taken from external/avb/avbtool.py; this define is not in the headers
 #define MAX_AVB_METADATA_SIZE 69632ul
 
 DECLARE_string(system_image_dir);
 
-DEFINE_string(boot_image, "",
+DEFINE_string(boot_image, CF_DEFAULTS_BOOT_IMAGE,
               "Location of cuttlefish boot image. If empty it is assumed to be "
               "boot.img in the directory specified by -system_image_dir.");
 DEFINE_string(
-    init_boot_image, "",
+    init_boot_image, CF_DEFAULTS_INIT_BOOT_IMAGE,
     "Location of cuttlefish init boot image. If empty it is assumed to "
     "be init_boot.img in the directory specified by -system_image_dir.");
-DEFINE_string(data_image, "", "Location of the data partition image.");
-DEFINE_string(super_image, "", "Location of the super partition image.");
-DEFINE_string(misc_image, "",
+DEFINE_string(data_image, CF_DEFAULTS_DATA_IMAGE,
+              "Location of the data partition image.");
+DEFINE_string(super_image, CF_DEFAULTS_SUPER_IMAGE,
+              "Location of the super partition image.");
+DEFINE_string(misc_image, CF_DEFAULTS_MISC_IMAGE,
               "Location of the misc partition image. If the image does not "
               "exist, a blank new misc partition image is created.");
-DEFINE_string(metadata_image, "", "Location of the metadata partition image "
+DEFINE_string(misc_info_txt, "", "Location of the misc_info.txt file.");
+DEFINE_string(metadata_image, CF_DEFAULTS_METADATA_IMAGE,
+              "Location of the metadata partition image "
               "to be generated.");
-DEFINE_string(vendor_boot_image, "",
-              "Location of cuttlefish vendor boot image. If empty it is assumed to "
-              "be vendor_boot.img in the directory specified by -system_image_dir.");
-DEFINE_string(vbmeta_image, "",
+DEFINE_string(
+    vendor_boot_image, CF_DEFAULTS_VENDOR_BOOT_IMAGE,
+    "Location of cuttlefish vendor boot image. If empty it is assumed to "
+    "be vendor_boot.img in the directory specified by -system_image_dir.");
+DEFINE_string(vbmeta_image, CF_DEFAULTS_VBMETA_IMAGE,
               "Location of cuttlefish vbmeta image. If empty it is assumed to "
               "be vbmeta.img in the directory specified by -system_image_dir.");
-DEFINE_string(vbmeta_system_image, "",
-              "Location of cuttlefish vbmeta_system image. If empty it is assumed to "
-              "be vbmeta_system.img in the directory specified by -system_image_dir.");
-DEFINE_string(otheros_esp_image, "",
-              "Location of cuttlefish esp image. If the image does not exist, "
-              "and --otheros_root_image is specified, an esp partition image "
-              "is created with default bootloaders.");
-DEFINE_string(otheros_kernel_path, "",
-              "Location of cuttlefish otheros kernel.");
-DEFINE_string(otheros_initramfs_path, "",
-              "Location of cuttlefish otheros initramfs.img.");
-DEFINE_string(otheros_root_image, "",
-              "Location of cuttlefish otheros root filesystem image.");
+DEFINE_string(
+    vbmeta_system_image, CF_DEFAULTS_VBMETA_SYSTEM_IMAGE,
+    "Location of cuttlefish vbmeta_system image. If empty it is assumed to "
+    "be vbmeta_system.img in the directory specified by -system_image_dir.");
+DEFINE_string(
+    vbmeta_vendor_dlkm_image, CF_DEFAULTS_VBMETA_VENDOR_DLKM_IMAGE,
+    "Location of cuttlefish vbmeta_vendor_dlkm image. If empty it is assumed "
+    "to "
+    "be vbmeta_vendor_dlkm.img in the directory specified by "
+    "-system_image_dir.");
 
-DEFINE_int32(blank_metadata_image_mb, 16,
-             "The size of the blank metadata image to generate, MB.");
-DEFINE_int32(blank_sdcard_image_mb, 2048,
-             "If enabled, the size of the blank sdcard image to generate, MB.");
+DEFINE_string(linux_kernel_path, CF_DEFAULTS_LINUX_KERNEL_PATH,
+              "Location of linux kernel for cuttlefish otheros flow.");
+DEFINE_string(linux_initramfs_path, CF_DEFAULTS_LINUX_INITRAMFS_PATH,
+              "Location of linux initramfs.img for cuttlefish otheros flow.");
+DEFINE_string(linux_root_image, CF_DEFAULTS_LINUX_ROOT_IMAGE,
+              "Location of linux root filesystem image for cuttlefish otheros flow.");
+
+DEFINE_string(fuchsia_zedboot_path, CF_DEFAULTS_FUCHSIA_ZEDBOOT_PATH,
+              "Location of fuchsia zedboot path for cuttlefish otheros flow.");
+DEFINE_string(fuchsia_multiboot_bin_path, CF_DEFAULTS_FUCHSIA_MULTIBOOT_BIN_PATH,
+              "Location of fuchsia multiboot bin path for cuttlefish otheros flow.");
+DEFINE_string(fuchsia_root_image, CF_DEFAULTS_FUCHSIA_ROOT_IMAGE,
+              "Location of fuchsia root filesystem image for cuttlefish otheros flow.");
+
+DEFINE_string(custom_partition_path, CF_DEFAULTS_CUSTOM_PARTITION_PATH,
+              "Location of custom image that will be passed as a \"custom\" partition"
+              "to rootfs and can be used by /dev/block/by-name/custom");
+
+DEFINE_string(blank_metadata_image_mb, CF_DEFAULTS_BLANK_METADATA_IMAGE_MB,
+              "The size of the blank metadata image to generate, MB.");
+DEFINE_string(
+    blank_sdcard_image_mb, CF_DEFAULTS_BLANK_SDCARD_IMAGE_MB,
+    "If enabled, the size of the blank sdcard image to generate, MB.");
 
 DECLARE_string(ap_rootfs_image);
 DECLARE_string(bootloader);
-DECLARE_bool(use_sdcard);
 DECLARE_string(initramfs_path);
 DECLARE_string(kernel_path);
 DECLARE_bool(resume);
-DECLARE_bool(protected_vm);
+DECLARE_bool(use_overlay);
 
 namespace cuttlefish {
 
+using APBootFlow = CuttlefishConfig::InstanceSpecific::APBootFlow;
 using vm_manager::Gem5Manager;
 
 Result<void> ResolveInstanceFiles() {
   CF_EXPECT(!FLAGS_system_image_dir.empty(),
             "--system_image_dir must be specified.");
 
-  // If user did not specify location of either of these files, expect them to
-  // be placed in --system_image_dir location.
-  std::string default_boot_image = FLAGS_system_image_dir + "/boot.img";
+  std::vector<std::string> system_image_dir =
+      android::base::Split(FLAGS_system_image_dir, ",");
+  std::string default_boot_image = "";
+  std::string default_init_boot_image = "";
+  std::string default_data_image = "";
+  std::string default_metadata_image = "";
+  std::string default_super_image = "";
+  std::string default_misc_image = "";
+  std::string default_misc_info_txt = "";
+  std::string default_vendor_boot_image = "";
+  std::string default_vbmeta_image = "";
+  std::string default_vbmeta_system_image = "";
+  std::string default_vbmeta_vendor_dlkm_image = "";
+
+  std::string cur_system_image_dir;
+  std::string comma_str = "";
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  for (int instance_index = 0; instance_index < instance_nums.size(); instance_index++) {
+    if (instance_index < system_image_dir.size()) {
+      cur_system_image_dir = system_image_dir[instance_index];
+    } else {
+      // legacy variable or out of boundary. Vectorize by copy [0] to all instances
+      cur_system_image_dir = system_image_dir[0];
+    }
+    if (instance_index > 0) {
+      comma_str = ",";
+    }
+
+    // If user did not specify location of either of these files, expect them to
+    // be placed in --system_image_dir location.
+    default_boot_image += comma_str + cur_system_image_dir + "/boot.img";
+    default_init_boot_image += comma_str + cur_system_image_dir + "/init_boot.img";
+    default_data_image += comma_str + cur_system_image_dir + "/userdata.img";
+    default_metadata_image += comma_str + cur_system_image_dir + "/metadata.img";
+    default_super_image += comma_str + cur_system_image_dir + "/super.img";
+    default_misc_image += comma_str + cur_system_image_dir + "/misc.img";
+    default_misc_info_txt +=
+        comma_str + cur_system_image_dir + "/misc_info.txt";
+    default_vendor_boot_image += comma_str + cur_system_image_dir + "/vendor_boot.img";
+    default_vbmeta_image += comma_str + cur_system_image_dir + "/vbmeta.img";
+    default_vbmeta_system_image += comma_str + cur_system_image_dir + "/vbmeta_system.img";
+    default_vbmeta_vendor_dlkm_image +=
+        comma_str + cur_system_image_dir + "/vbmeta_vendor_dlkm.img";
+  }
   SetCommandLineOptionWithMode("boot_image", default_boot_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_init_boot_image =
-      FLAGS_system_image_dir + "/init_boot.img";
   SetCommandLineOptionWithMode("init_boot_image",
                                default_init_boot_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_data_image = FLAGS_system_image_dir + "/userdata.img";
   SetCommandLineOptionWithMode("data_image", default_data_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_metadata_image = FLAGS_system_image_dir + "/metadata.img";
   SetCommandLineOptionWithMode("metadata_image", default_metadata_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_super_image = FLAGS_system_image_dir + "/super.img";
   SetCommandLineOptionWithMode("super_image", default_super_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_misc_image = FLAGS_system_image_dir + "/misc.img";
   SetCommandLineOptionWithMode("misc_image", default_misc_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_esp_image = FLAGS_system_image_dir + "/esp.img";
-  SetCommandLineOptionWithMode("otheros_esp_image", default_esp_image.c_str(),
+  SetCommandLineOptionWithMode("misc_info_txt", default_misc_info_txt.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_vendor_boot_image = FLAGS_system_image_dir
-                                        + "/vendor_boot.img";
   SetCommandLineOptionWithMode("vendor_boot_image",
                                default_vendor_boot_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_vbmeta_image = FLAGS_system_image_dir + "/vbmeta.img";
   SetCommandLineOptionWithMode("vbmeta_image", default_vbmeta_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
-  std::string default_vbmeta_system_image = FLAGS_system_image_dir
-                                          + "/vbmeta_system.img";
   SetCommandLineOptionWithMode("vbmeta_system_image",
                                default_vbmeta_system_image.c_str(),
                                google::FlagSettingMode::SET_FLAGS_DEFAULT);
+  SetCommandLineOptionWithMode("vbmeta_vendor_dlkm_image",
+                               default_vbmeta_vendor_dlkm_image.c_str(),
+                               google::FlagSettingMode::SET_FLAGS_DEFAULT);
 
   return {};
 }
 
-std::vector<ImagePartition> GetOsCompositeDiskConfig() {
+std::vector<ImagePartition> linux_composite_disk_config(
+    const CuttlefishConfig::InstanceSpecific& instance) {
   std::vector<ImagePartition> partitions;
+
   partitions.push_back(ImagePartition{
-      .label = "misc",
-      .image_file_path = AbsolutePath(FLAGS_misc_image),
-      .read_only = true,
+      .label = "linux_esp",
+      .image_file_path = AbsolutePath(instance.otheros_esp_image_path()),
+      .type = kEfiSystemPartition,
+      .read_only = FLAGS_use_overlay,
   });
   partitions.push_back(ImagePartition{
-      .label = "boot_a",
-      .image_file_path = AbsolutePath(FLAGS_boot_image),
-      .read_only = true,
+      .label = "linux_root",
+      .image_file_path = AbsolutePath(instance.linux_root_image()),
+      .read_only = FLAGS_use_overlay,
   });
-  partitions.push_back(ImagePartition{
-      .label = "boot_b",
-      .image_file_path = AbsolutePath(FLAGS_boot_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "init_boot_a",
-      .image_file_path = AbsolutePath(FLAGS_init_boot_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "init_boot_b",
-      .image_file_path = AbsolutePath(FLAGS_init_boot_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vendor_boot_a",
-      .image_file_path = AbsolutePath(FLAGS_vendor_boot_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vendor_boot_b",
-      .image_file_path = AbsolutePath(FLAGS_vendor_boot_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vbmeta_a",
-      .image_file_path = AbsolutePath(FLAGS_vbmeta_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vbmeta_b",
-      .image_file_path = AbsolutePath(FLAGS_vbmeta_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vbmeta_system_a",
-      .image_file_path = AbsolutePath(FLAGS_vbmeta_system_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "vbmeta_system_b",
-      .image_file_path = AbsolutePath(FLAGS_vbmeta_system_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "super",
-      .image_file_path = AbsolutePath(FLAGS_super_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "userdata",
-      .image_file_path = AbsolutePath(FLAGS_data_image),
-      .read_only = true,
-  });
-  partitions.push_back(ImagePartition{
-      .label = "metadata",
-      .image_file_path = AbsolutePath(FLAGS_metadata_image),
-      .read_only = true,
-  });
-  if (!FLAGS_otheros_root_image.empty()) {
-    partitions.push_back(ImagePartition{
-        .label = "otheros_esp",
-        .image_file_path = AbsolutePath(FLAGS_otheros_esp_image),
-        .type = kEfiSystemPartition,
-        .read_only = true,
-    });
-    partitions.push_back(ImagePartition{
-        .label = "otheros_root",
-        .image_file_path = AbsolutePath(FLAGS_otheros_root_image),
-        .read_only = true,
-    });
-  }
-  if (!FLAGS_ap_rootfs_image.empty()) {
-    partitions.push_back(ImagePartition{
-        .label = "ap_rootfs",
-        .image_file_path = AbsolutePath(FLAGS_ap_rootfs_image),
-        .read_only = true,
-    });
-  }
+
   return partitions;
 }
 
-DiskBuilder OsCompositeDiskBuilder(const CuttlefishConfig& config) {
+std::vector<ImagePartition> fuchsia_composite_disk_config(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::vector<ImagePartition> partitions;
+
+  partitions.push_back(ImagePartition{
+      .label = "fuchsia_esp",
+      .image_file_path = AbsolutePath(instance.otheros_esp_image_path()),
+      .type = kEfiSystemPartition,
+      .read_only = FLAGS_use_overlay,
+  });
+
+  return partitions;
+}
+
+std::vector<ImagePartition> android_composite_disk_config(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::vector<ImagePartition> partitions;
+
+  partitions.push_back(ImagePartition{
+      .label = "misc",
+      .image_file_path = AbsolutePath(instance.new_misc_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "boot_a",
+      .image_file_path = AbsolutePath(instance.new_boot_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "boot_b",
+      .image_file_path = AbsolutePath(instance.new_boot_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  const auto init_boot_path = instance.init_boot_image();
+  if (FileExists(init_boot_path)) {
+    partitions.push_back(ImagePartition{
+        .label = "init_boot_a",
+        .image_file_path = AbsolutePath(init_boot_path),
+        .read_only = FLAGS_use_overlay,
+    });
+    partitions.push_back(ImagePartition{
+        .label = "init_boot_b",
+        .image_file_path = AbsolutePath(init_boot_path),
+        .read_only = FLAGS_use_overlay,
+    });
+  }
+  partitions.push_back(ImagePartition{
+      .label = "vendor_boot_a",
+      .image_file_path = AbsolutePath(instance.new_vendor_boot_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vendor_boot_b",
+      .image_file_path = AbsolutePath(instance.new_vendor_boot_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vbmeta_a",
+      .image_file_path = AbsolutePath(instance.vbmeta_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vbmeta_b",
+      .image_file_path = AbsolutePath(instance.vbmeta_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vbmeta_system_a",
+      .image_file_path = AbsolutePath(instance.vbmeta_system_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vbmeta_system_b",
+      .image_file_path = AbsolutePath(instance.vbmeta_system_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  auto vbmeta_vendor_dlkm_img = instance.new_vbmeta_vendor_dlkm_image();
+  if (!FileExists(vbmeta_vendor_dlkm_img)) {
+    vbmeta_vendor_dlkm_img = instance.vbmeta_vendor_dlkm_image();
+  }
+  if (FileExists(vbmeta_vendor_dlkm_img)) {
+    partitions.push_back(ImagePartition{
+        .label = "vbmeta_vendor_dlkm_a",
+        .image_file_path = AbsolutePath(vbmeta_vendor_dlkm_img),
+        .read_only = FLAGS_use_overlay,
+    });
+    partitions.push_back(ImagePartition{
+        .label = "vbmeta_vendor_dlkm_b",
+        .image_file_path = AbsolutePath(vbmeta_vendor_dlkm_img),
+        .read_only = FLAGS_use_overlay,
+    });
+  }
+  auto super_image = instance.new_super_image();
+  if (!FileExists(super_image)) {
+    super_image = instance.super_image();
+  }
+  partitions.push_back(ImagePartition{
+      .label = "super",
+      .image_file_path = AbsolutePath(super_image),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "userdata",
+      .image_file_path = AbsolutePath(instance.data_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  partitions.push_back(ImagePartition{
+      .label = "metadata",
+      .image_file_path = AbsolutePath(instance.new_metadata_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+  const auto custom_partition_path = instance.custom_partition_path();
+  if (!custom_partition_path.empty()) {
+    partitions.push_back(ImagePartition{
+        .label = "custom",
+        .image_file_path = AbsolutePath(custom_partition_path),
+        .read_only = FLAGS_use_overlay,
+    });
+  }
+
+  return partitions;
+}
+
+std::vector<ImagePartition> GetApCompositeDiskConfig(const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::vector<ImagePartition> partitions;
+
+  if (instance.ap_boot_flow() == APBootFlow::Grub) {
+    partitions.push_back(ImagePartition{
+        .label = "ap_esp",
+        .image_file_path = AbsolutePath(instance.ap_esp_image_path()),
+        .read_only = FLAGS_use_overlay,
+    });
+  }
+
+  partitions.push_back(ImagePartition{
+      .label = "ap_rootfs",
+      .image_file_path = AbsolutePath(config.ap_rootfs_image()),
+      .read_only = FLAGS_use_overlay,
+  });
+
+  return partitions;
+}
+
+std::vector<ImagePartition> GetOsCompositeDiskConfig(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+
+  switch (instance.boot_flow()) {
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Android:
+      return android_composite_disk_config(instance);
+      break;
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Linux:
+      return linux_composite_disk_config(instance);
+      break;
+    case CuttlefishConfig::InstanceSpecific::BootFlow::Fuchsia:
+      return fuchsia_composite_disk_config(instance);
+      break;
+  }
+}
+
+DiskBuilder OsCompositeDiskBuilder(const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance) {
   return DiskBuilder()
-      .Partitions(GetOsCompositeDiskConfig())
+      .Partitions(GetOsCompositeDiskConfig(instance))
       .VmManager(config.vm_manager())
-      .CrosvmPath(config.crosvm_binary())
-      .ConfigPath(config.AssemblyPath("os_composite_disk_config.txt"))
-      .HeaderPath(config.AssemblyPath("os_composite_gpt_header.img"))
-      .FooterPath(config.AssemblyPath("os_composite_gpt_footer.img"))
-      .CompositeDiskPath(config.os_composite_disk_path())
+      .CrosvmPath(instance.crosvm_binary())
+      .ConfigPath(instance.PerInstancePath("os_composite_disk_config.txt"))
+      .HeaderPath(instance.PerInstancePath("os_composite_gpt_header.img"))
+      .FooterPath(instance.PerInstancePath("os_composite_gpt_footer.img"))
+      .CompositeDiskPath(instance.os_composite_disk_path())
+      .ResumeIfPossible(FLAGS_resume);
+}
+
+DiskBuilder ApCompositeDiskBuilder(const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  return DiskBuilder()
+      .Partitions(GetApCompositeDiskConfig(config, instance))
+      .VmManager(config.vm_manager())
+      .CrosvmPath(instance.crosvm_binary())
+      .ConfigPath(instance.PerInstancePath("ap_composite_disk_config.txt"))
+      .HeaderPath(instance.PerInstancePath("ap_composite_gpt_header.img"))
+      .FooterPath(instance.PerInstancePath("ap_composite_gpt_footer.img"))
+      .CompositeDiskPath(instance.ap_composite_disk_path())
       .ResumeIfPossible(FLAGS_resume);
 }
 
 std::vector<ImagePartition> persistent_composite_disk_config(
-    const CuttlefishConfig& config,
     const CuttlefishConfig::InstanceSpecific& instance) {
   std::vector<ImagePartition> partitions;
 
@@ -266,14 +420,14 @@
       .label = "vbmeta",
       .image_file_path = AbsolutePath(instance.vbmeta_path()),
   });
-  if (!FLAGS_protected_vm) {
+  if (!instance.protected_vm()) {
     partitions.push_back(ImagePartition{
         .label = "frp",
         .image_file_path =
             AbsolutePath(instance.factory_reset_protected_path()),
     });
   }
-  if (config.bootconfig_supported()) {
+  if (instance.bootconfig_supported()) {
     partitions.push_back(ImagePartition{
         .label = "bootconfig",
         .image_file_path = AbsolutePath(instance.persistent_bootconfig_path()),
@@ -282,8 +436,27 @@
   return partitions;
 }
 
+std::vector<ImagePartition> persistent_ap_composite_disk_config(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::vector<ImagePartition> partitions;
+
+  // Note that if the position of uboot_env changes, the environment for
+  // u-boot must be updated as well (see boot_config.cc and
+  // cuttlefish.fragment in external/u-boot).
+  partitions.push_back(ImagePartition{
+      .label = "uboot_env",
+      .image_file_path = AbsolutePath(instance.ap_uboot_env_image_path()),
+  });
+  partitions.push_back(ImagePartition{
+      .label = "vbmeta",
+      .image_file_path = AbsolutePath(instance.ap_vbmeta_path()),
+  });
+
+  return partitions;
+}
+
 static uint64_t AvailableSpaceAtPath(const std::string& path) {
-  struct statvfs vfs;
+  struct statvfs vfs {};
   if (statvfs(path.c_str(), &vfs) != 0) {
     int error_num = errno;
     LOG(ERROR) << "Could not find space available at " << path << ", error was "
@@ -294,34 +467,90 @@
   return static_cast<uint64_t>(vfs.f_frsize) * vfs.f_bavail;
 }
 
-class BootImageRepacker : public SetupFeature {
+class KernelRamdiskRepacker : public SetupFeature {
  public:
-  INJECT(BootImageRepacker(const CuttlefishConfig& config)) : config_(config) {}
+  INJECT(
+      KernelRamdiskRepacker(const CuttlefishConfig& config,
+                            const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
 
   // SetupFeature
-  std::string Name() const override { return "BootImageRepacker"; }
+  std::string Name() const override { return "KernelRamdiskRepacker"; }
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
   bool Enabled() const override {
     // If we are booting a protected VM, for now, assume that image repacking
     // isn't trusted. Repacking requires resigning the image and keys from an
     // android host aren't trusted.
-    return !config_.protected_vm();
+    return !instance_.protected_vm();
   }
 
  protected:
+  bool RepackVendorDLKM(const std::string& superimg_build_dir,
+                        const std::string& vendor_dlkm_build_dir,
+                        const std::string& ramdisk_path) {
+    const auto new_vendor_dlkm_img =
+        superimg_build_dir + "/vendor_dlkm_repacked.img";
+    const auto tmp_vendor_dlkm_img = new_vendor_dlkm_img + ".tmp";
+    if (!EnsureDirectoryExists(vendor_dlkm_build_dir).ok()) {
+      LOG(ERROR) << "Failed to create directory " << vendor_dlkm_build_dir;
+      return false;
+    }
+    const auto ramdisk_stage_dir = instance_.instance_dir() + "/ramdisk_staged";
+    if (!SplitRamdiskModules(ramdisk_path, ramdisk_stage_dir,
+                             vendor_dlkm_build_dir)) {
+      LOG(ERROR) << "Failed to move ramdisk modules to vendor_dlkm";
+      return false;
+    }
+    // TODO(b/149866755) For now, we assume that vendor_dlkm is ext4. Add
+    // logic to handle EROFS once the feature stablizes.
+    if (!BuildVendorDLKM(vendor_dlkm_build_dir, false, tmp_vendor_dlkm_img)) {
+      LOG(ERROR) << "Failed to build vendor_dlkm image from "
+                 << vendor_dlkm_build_dir;
+      return false;
+    }
+    if (ReadFile(tmp_vendor_dlkm_img) == ReadFile(new_vendor_dlkm_img)) {
+      LOG(INFO) << "vendor_dlkm unchanged, skip super image rebuilding.";
+      return true;
+    }
+    if (!RenameFile(tmp_vendor_dlkm_img, new_vendor_dlkm_img).ok()) {
+      return false;
+    }
+    const auto new_super_img = instance_.new_super_image();
+    if (!Copy(instance_.super_image(), new_super_img)) {
+      PLOG(ERROR) << "Failed to copy super image " << instance_.super_image()
+                  << " to " << new_super_img;
+      return false;
+    }
+    if (!RepackSuperWithVendorDLKM(new_super_img, new_vendor_dlkm_img)) {
+      LOG(ERROR) << "Failed to repack super image with new vendor dlkm image.";
+      return false;
+    }
+    if (!RebuildVbmetaVendor(new_vendor_dlkm_img,
+                             instance_.new_vbmeta_vendor_dlkm_image())) {
+      LOG(ERROR) << "Failed to rebuild vbmeta vendor.";
+      return false;
+    }
+    SetCommandLineOptionWithMode("super_image", new_super_img.c_str(),
+                                 google::FlagSettingMode::SET_FLAGS_DEFAULT);
+    SetCommandLineOptionWithMode(
+        "vbmeta_vendor_dlkm_image",
+        instance_.new_vbmeta_vendor_dlkm_image().c_str(),
+        google::FlagSettingMode::SET_FLAGS_DEFAULT);
+    return true;
+  }
   bool Setup() override {
-    if (!FileHasContent(FLAGS_boot_image)) {
-      LOG(ERROR) << "File not found: " << FLAGS_boot_image;
+    if (!FileHasContent(instance_.boot_image())) {
+      LOG(ERROR) << "File not found: " << instance_.boot_image();
       return false;
     }
     // The init_boot partition is be optional for testing boot.img
     // with the ramdisk inside.
-    if (!FileHasContent(FLAGS_init_boot_image)) {
-      LOG(WARNING) << "File not found: " << FLAGS_init_boot_image;
+    if (!FileHasContent(instance_.init_boot_image())) {
+      LOG(WARNING) << "File not found: " << instance_.init_boot_image();
     }
 
-    if (!FileHasContent(FLAGS_vendor_boot_image)) {
-      LOG(ERROR) << "File not found: " << FLAGS_vendor_boot_image;
+    if (!FileHasContent(instance_.vendor_boot_image())) {
+      LOG(ERROR) << "File not found: " << instance_.vendor_boot_image();
       return false;
     }
 
@@ -330,13 +559,12 @@
     // large to be repacked. Skip repack of boot.img on Gem5, as we need to be
     // able to extract the ramdisk.img in a later stage and so this step must
     // not fail (..and the repacked kernel wouldn't be used anyway).
-    if (FLAGS_kernel_path.size() &&
+    if (instance_.kernel_path().size() &&
         config_.vm_manager() != Gem5Manager::name()) {
-      const std::string new_boot_image_path =
-          config_.AssemblyPath("boot_repacked.img");
+      const std::string new_boot_image_path = instance_.new_boot_image();
       bool success =
-          RepackBootImage(FLAGS_kernel_path, FLAGS_boot_image,
-                          new_boot_image_path, config_.assembly_dir());
+          RepackBootImage(instance_.kernel_path(), instance_.boot_image(),
+                          new_boot_image_path, instance_.instance_dir());
       if (!success) {
         LOG(ERROR) << "Failed to regenerate the boot image with the new kernel";
         return false;
@@ -345,15 +573,28 @@
                                    google::FlagSettingMode::SET_FLAGS_DEFAULT);
     }
 
-    if (FLAGS_kernel_path.size() || FLAGS_initramfs_path.size()) {
+    if (instance_.kernel_path().size() || instance_.initramfs_path().size()) {
       const std::string new_vendor_boot_image_path =
-          config_.AssemblyPath("vendor_boot_repacked.img");
+          instance_.new_vendor_boot_image();
       // Repack the vendor boot images if kernels and/or ramdisks are passed in.
-      if (FLAGS_initramfs_path.size()) {
+      if (instance_.initramfs_path().size()) {
+        const auto superimg_build_dir = instance_.instance_dir() + "/superimg";
+        const auto ramdisk_repacked =
+            instance_.instance_dir() + "/ramdisk_repacked";
+        if (!Copy(instance_.initramfs_path(), ramdisk_repacked)) {
+          LOG(ERROR) << "Failed to copy " << instance_.initramfs_path()
+                     << " to " << ramdisk_repacked;
+          return false;
+        }
+        const auto vendor_dlkm_build_dir = superimg_build_dir + "/vendor_dlkm";
+        if (!RepackVendorDLKM(superimg_build_dir, vendor_dlkm_build_dir,
+                              ramdisk_repacked)) {
+          return false;
+        }
         bool success = RepackVendorBootImage(
-            FLAGS_initramfs_path, FLAGS_vendor_boot_image,
+            ramdisk_repacked, instance_.vendor_boot_image(),
             new_vendor_boot_image_path, config_.assembly_dir(),
-            config_.bootconfig_supported());
+            instance_.bootconfig_supported());
         if (!success) {
           LOG(ERROR) << "Failed to regenerate the vendor boot image with the "
                         "new ramdisk";
@@ -362,8 +603,8 @@
           // If it's just the kernel, repack the vendor boot image without a
           // ramdisk.
           bool success = RepackVendorBootImageWithEmptyRamdisk(
-              FLAGS_vendor_boot_image, new_vendor_boot_image_path,
-              config_.assembly_dir(), config_.bootconfig_supported());
+              instance_.vendor_boot_image(), new_vendor_boot_image_path,
+              config_.assembly_dir(), instance_.bootconfig_supported());
           if (!success) {
             LOG(ERROR) << "Failed to regenerate the vendor boot image without "
                           "a ramdisk";
@@ -380,15 +621,14 @@
 
  private:
   const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class Gem5ImageUnpacker : public SetupFeature {
  public:
-  INJECT(Gem5ImageUnpacker(
-      const CuttlefishConfig& config,
-      BootImageRepacker& bir))
-      : config_(config),
-        bir_(bir) {}
+  INJECT(Gem5ImageUnpacker(const CuttlefishConfig& config,
+                           KernelRamdiskRepacker& bir))
+      : config_(config), bir_(bir) {}
 
   // SetupFeature
   std::string Name() const override { return "Gem5ImageUnpacker"; }
@@ -405,7 +645,10 @@
   }
 
  protected:
-  bool Setup() override {
+  Result<void> ResultSetup() override {
+    const CuttlefishConfig::InstanceSpecific& instance_ =
+        config_.ForDefaultInstance();
+
     /* Unpack the original or repacked boot and vendor boot ramdisks, so that
      * we have access to the baked bootconfig and raw compressed ramdisks.
      * This allows us to emulate what a bootloader would normally do, which
@@ -415,74 +658,51 @@
      * does the parts which are instance agnostic.
      */
 
-    if (!FileHasContent(FLAGS_boot_image)) {
-      LOG(ERROR) << "File not found: " << FLAGS_boot_image;
-      return false;
-    }
-    // The init_boot partition is be optional for testing boot.img
-    // with the ramdisk inside.
-    if (!FileHasContent(FLAGS_init_boot_image)) {
-      LOG(WARNING) << "File not found: " << FLAGS_init_boot_image;
-    }
-
-    if (!FileHasContent(FLAGS_vendor_boot_image)) {
-      LOG(ERROR) << "File not found: " << FLAGS_vendor_boot_image;
-      return false;
-    }
+    CF_EXPECT(FileHasContent(instance_.boot_image()), instance_.boot_image());
 
     const std::string unpack_dir = config_.assembly_dir();
-
-    bool success = UnpackBootImage(FLAGS_init_boot_image, unpack_dir);
-    if (!success) {
-      LOG(ERROR) << "Failed to extract the init boot image";
-      return false;
+    // The init_boot partition is be optional for testing boot.img
+    // with the ramdisk inside.
+    if (!FileHasContent(instance_.init_boot_image())) {
+      LOG(WARNING) << "File not found: " << instance_.init_boot_image();
+    } else {
+      CF_EXPECT(UnpackBootImage(instance_.init_boot_image(), unpack_dir),
+                "Failed to extract the init boot image");
     }
 
-    success = UnpackVendorBootImageIfNotUnpacked(FLAGS_vendor_boot_image,
-                                                 unpack_dir);
-    if (!success) {
-      LOG(ERROR) << "Failed to extract the vendor boot image";
-      return false;
-    }
+    CF_EXPECT(FileHasContent(instance_.vendor_boot_image()),
+              instance_.vendor_boot_image());
+
+    CF_EXPECT(UnpackVendorBootImageIfNotUnpacked(instance_.vendor_boot_image(),
+                                                 unpack_dir),
+              "Failed to extract the vendor boot image");
 
     // Assume the user specified a kernel manually which is a vmlinux
-    std::ofstream kernel(unpack_dir + "/kernel", std::ios_base::binary |
-                                                 std::ios_base::trunc);
-    std::ifstream vmlinux(FLAGS_kernel_path, std::ios_base::binary);
-    kernel << vmlinux.rdbuf();
-    kernel.close();
+    CF_EXPECT(cuttlefish::Copy(instance_.kernel_path(), unpack_dir + "/kernel"));
 
     // Gem5 needs the bootloader binary to be a specific directory structure
     // to find it. Create a 'binaries' directory and copy it into there
     const std::string binaries_dir = unpack_dir + "/binaries";
-    if (mkdir(binaries_dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) < 0
-        && errno != EEXIST) {
-      PLOG(ERROR) << "Failed to create dir: \"" << binaries_dir << "\" ";
-      return false;
-    }
-    std::ofstream bootloader(binaries_dir + "/" +
-                             cpp_basename(FLAGS_bootloader),
-                             std::ios_base::binary | std::ios_base::trunc);
-    std::ifstream src_bootloader(FLAGS_bootloader, std::ios_base::binary);
-    bootloader << src_bootloader.rdbuf();
-    bootloader.close();
+    CF_EXPECT(mkdir(binaries_dir.c_str(),
+                    S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) == 0 ||
+                  errno == EEXIST,
+              "\"" << binaries_dir << "\": " << strerror(errno));
+    CF_EXPECT(cuttlefish::Copy(instance_.bootloader(),
+        binaries_dir + "/" + cpp_basename(instance_.bootloader())));
 
     // Gem5 also needs the ARM version of the bootloader, even though it
     // doesn't use it. It'll even open it to check it's a valid ELF file.
     // Work around this by copying such a named file from the same directory
-    std::ofstream boot_arm(binaries_dir + "/boot.arm",
-                           std::ios_base::binary | std::ios_base::trunc);
-    std::ifstream src_boot_arm(cpp_dirname(FLAGS_bootloader) + "/boot.arm",
-                               std::ios_base::binary);
-    boot_arm << src_boot_arm.rdbuf();
-    boot_arm.close();
+    CF_EXPECT(cuttlefish::Copy(
+        cpp_dirname(instance_.bootloader()) + "/boot.arm",
+        binaries_dir + "/boot.arm"));
 
-    return true;
+    return {};
   }
 
  private:
   const CuttlefishConfig& config_;
-  BootImageRepacker& bir_;
+  KernelRamdiskRepacker& bir_;
 };
 
 class GeneratePersistentBootconfig : public SetupFeature {
@@ -497,60 +717,56 @@
     return "GeneratePersistentBootconfig";
   }
   bool Enabled() const override {
-    return (!config_.protected_vm());
+    return (!instance_.protected_vm());
   }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override {
+  Result<void> ResultSetup() override {
     //  Cuttlefish for the time being won't be able to support OTA from a
     //  non-bootconfig kernel to a bootconfig-kernel (or vice versa) IF the
     //  device is stopped (via stop_cvd). This is rarely an issue since OTA
     //  testing run on cuttlefish is done within one launch cycle of the device.
     //  If this ever becomes an issue, this code will have to be rewritten.
-    if(!config_.bootconfig_supported()) {
-      return true;
+    if(!instance_.bootconfig_supported()) {
+      return {};
     }
-
     const auto bootconfig_path = instance_.persistent_bootconfig_path();
     if (!FileExists(bootconfig_path)) {
-      if (!CreateBlankImage(bootconfig_path, 1 /* mb */, "none")) {
-        LOG(ERROR) << "Failed to create image at " << bootconfig_path;
-        return false;
-      }
+      CF_EXPECT(CreateBlankImage(bootconfig_path, 1 /* mb */, "none"),
+                "Failed to create image at " << bootconfig_path);
     }
 
     auto bootconfig_fd = SharedFD::Open(bootconfig_path, O_RDWR);
-    if (!bootconfig_fd->IsOpen()) {
-      LOG(ERROR) << "Unable to open bootconfig file: "
-                 << bootconfig_fd->StrError();
-      return false;
-    }
+    CF_EXPECT(bootconfig_fd->IsOpen(),
+              "Unable to open bootconfig file: " << bootconfig_fd->StrError());
 
-    const std::string bootconfig =
-        android::base::Join(BootconfigArgsFromConfig(config_, instance_),
-                            "\n") +
-        "\n";
+    const auto bootconfig_args =
+        CF_EXPECT(BootconfigArgsFromConfig(config_, instance_));
+    const auto bootconfig =
+        CF_EXPECT(BootconfigArgsString(bootconfig_args, "\n")) + "\n";
+
+    LOG(DEBUG) << "bootconfig size is " << bootconfig.size();
     ssize_t bytesWritten = WriteAll(bootconfig_fd, bootconfig);
-    LOG(DEBUG) << "bootconfig size is " << bytesWritten;
-    if (bytesWritten != bootconfig.size()) {
-      LOG(ERROR) << "Failed to write contents of bootconfig to \""
-                 << bootconfig_path << "\"";
-      return false;
-    }
+    CF_EXPECT(WriteAll(bootconfig_fd, bootconfig) == bootconfig.size(),
+              "Failed to write bootconfig to \"" << bootconfig_path << "\"");
     LOG(DEBUG) << "Bootconfig parameters from vendor boot image and config are "
                << ReadFile(bootconfig_path);
 
-    if (bootconfig_fd->Truncate(bytesWritten) != 0) {
-      LOG(ERROR) << "`truncate --size=" << bytesWritten << " bytes "
-                 << bootconfig_path << "` failed:" << bootconfig_fd->StrError();
-      return false;
-    }
+    CF_EXPECT(bootconfig_fd->Truncate(bootconfig.size()) == 0,
+              "`truncate --size=" << bootconfig.size() << " bytes "
+                                  << bootconfig_path
+                                  << "` failed:" << bootconfig_fd->StrError());
 
-    if (config_.vm_manager() != Gem5Manager::name()) {
+    if (config_.vm_manager() == Gem5Manager::name()) {
+      const off_t bootconfig_size_bytes_gem5 =
+          AlignToPowerOf2(bytesWritten, PARTITION_SIZE_SHIFT);
+      CF_EXPECT(bootconfig_fd->Truncate(bootconfig_size_bytes_gem5) == 0);
+      bootconfig_fd->Close();
+    } else {
       bootconfig_fd->Close();
       const off_t bootconfig_size_bytes = AlignToPowerOf2(
-          MAX_AVB_METADATA_SIZE + bytesWritten, PARTITION_SIZE_SHIFT);
+          MAX_AVB_METADATA_SIZE + bootconfig.size(), PARTITION_SIZE_SHIFT);
 
       auto avbtool_path = HostBinaryPath("avbtool");
       Command bootconfig_hash_footer_cmd(avbtool_path);
@@ -567,18 +783,11 @@
       bootconfig_hash_footer_cmd.AddParameter("--algorithm");
       bootconfig_hash_footer_cmd.AddParameter("SHA256_RSA4096");
       int success = bootconfig_hash_footer_cmd.Start().Wait();
-      if (success != 0) {
-        LOG(ERROR) << "Unable to run append hash footer. Exited with status "
-                   << success;
-        return false;
-      }
-    } else {
-      const off_t bootconfig_size_bytes_gem5 = AlignToPowerOf2(
-          bytesWritten, PARTITION_SIZE_SHIFT);
-      bootconfig_fd->Truncate(bootconfig_size_bytes_gem5);
-      bootconfig_fd->Close();
+      CF_EXPECT(
+          success == 0,
+          "Unable to run append hash footer. Exited with status " << success);
     }
-    return true;
+    return {};
   }
 
   const CuttlefishConfig& config_;
@@ -588,12 +797,10 @@
 class GeneratePersistentVbmeta : public SetupFeature {
  public:
   INJECT(GeneratePersistentVbmeta(
-      const CuttlefishConfig& config,
       const CuttlefishConfig::InstanceSpecific& instance,
       InitBootloaderEnvPartition& bootloader_env,
       GeneratePersistentBootconfig& bootconfig))
-      : config_(config),
-        instance_(instance),
+      : instance_(instance),
         bootloader_env_(bootloader_env),
         bootconfig_(bootconfig) {}
 
@@ -602,7 +809,7 @@
     return "GeneratePersistentVbmeta";
   }
   bool Enabled() const override {
-    return (!config_.protected_vm());
+    return true;
   }
 
  private:
@@ -614,11 +821,27 @@
   }
 
   bool Setup() override {
+    if (!instance_.protected_vm()) {
+      if (!PrepareVBMetaImage(instance_.vbmeta_path(), instance_.bootconfig_supported())) {
+        return false;
+      }
+    }
+
+    if (instance_.ap_boot_flow() == APBootFlow::Grub) {
+      if (!PrepareVBMetaImage(instance_.ap_vbmeta_path(), false)) {
+        return false;
+      }
+    }
+
+    return true;
+  }
+
+  bool PrepareVBMetaImage(const std::string& path, bool has_boot_config) {
     auto avbtool_path = HostBinaryPath("avbtool");
     Command vbmeta_cmd(avbtool_path);
     vbmeta_cmd.AddParameter("make_vbmeta_image");
     vbmeta_cmd.AddParameter("--output");
-    vbmeta_cmd.AddParameter(instance_.vbmeta_path());
+    vbmeta_cmd.AddParameter(path);
     vbmeta_cmd.AddParameter("--algorithm");
     vbmeta_cmd.AddParameter("SHA256_RSA4096");
     vbmeta_cmd.AddParameter("--key");
@@ -629,7 +852,7 @@
     vbmeta_cmd.AddParameter("uboot_env:1:" +
                             DefaultHostArtifactsPath("etc/cvd.avbpubkey"));
 
-    if (config_.bootconfig_supported()) {
+    if (has_boot_config) {
         vbmeta_cmd.AddParameter("--chain_partition");
         vbmeta_cmd.AddParameter("bootconfig:2:" +
                                 DefaultHostArtifactsPath("etc/cvd.avbpubkey"));
@@ -642,25 +865,24 @@
       return false;
     }
 
-    if (FileSize(instance_.vbmeta_path()) > VBMETA_MAX_SIZE) {
-      LOG(ERROR) << "Generated vbmeta - " << instance_.vbmeta_path()
+    const auto vbmeta_size = FileSize(path);
+    if (vbmeta_size > VBMETA_MAX_SIZE) {
+      LOG(ERROR) << "Generated vbmeta - " << path
                  << " is larger than the expected " << VBMETA_MAX_SIZE
                  << ". Stopping.";
       return false;
     }
-    if (FileSize(instance_.vbmeta_path()) != VBMETA_MAX_SIZE) {
-      auto fd = SharedFD::Open(instance_.vbmeta_path(), O_RDWR);
+    if (vbmeta_size != VBMETA_MAX_SIZE) {
+      auto fd = SharedFD::Open(path, O_RDWR);
       if (!fd->IsOpen() || fd->Truncate(VBMETA_MAX_SIZE) != 0) {
         LOG(ERROR) << "`truncate --size=" << VBMETA_MAX_SIZE << " "
-                   << instance_.vbmeta_path() << "` "
-                   << "failed: " << fd->StrError();
+                   << path << "` failed: " << fd->StrError();
         return false;
       }
     }
     return true;
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
   InitBootloaderEnvPartition& bootloader_env_;
   GeneratePersistentBootconfig& bootconfig_;
@@ -668,7 +890,9 @@
 
 class InitializeMetadataImage : public SetupFeature {
  public:
-  INJECT(InitializeMetadataImage()) {}
+  INJECT(InitializeMetadataImage(
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeMetadataImage"; }
@@ -677,28 +901,30 @@
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
   Result<void> ResultSetup() override {
-    if (FileExists(FLAGS_metadata_image)) {
+    if (FileExists(instance_.metadata_image()) &&
+        FileSize(instance_.metadata_image()) == instance_.blank_metadata_image_mb() << 20) {
       return {};
     }
 
-    CF_EXPECT(CreateBlankImage(FLAGS_metadata_image,
-                               FLAGS_blank_metadata_image_mb, "none"),
-              "Failed to create \"" << FLAGS_metadata_image << "\" with size "
-                                    << FLAGS_blank_metadata_image_mb);
+    CF_EXPECT(CreateBlankImage(instance_.new_metadata_image(),
+                               instance_.blank_metadata_image_mb(), "none"),
+              "Failed to create \"" << instance_.new_metadata_image()
+                                    << "\" with size "
+                                    << instance_.blank_metadata_image_mb());
     return {};
   }
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class InitializeAccessKregistryImage : public SetupFeature {
  public:
   INJECT(InitializeAccessKregistryImage(
-      const CuttlefishConfig& config,
       const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeAccessKregistryImage"; }
-  bool Enabled() const override { return !config_.protected_vm(); }
+  bool Enabled() const override { return !instance_.protected_vm(); }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
@@ -712,20 +938,21 @@
     return {};
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class InitializeHwcomposerPmemImage : public SetupFeature {
  public:
   INJECT(InitializeHwcomposerPmemImage(
-      const CuttlefishConfig& config,
       const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeHwcomposerPmemImage"; }
-  bool Enabled() const override { return !config_.protected_vm(); }
+  bool Enabled() const override {
+    return instance_.hwcomposer() != kHwComposerNone &&
+           !instance_.protected_vm();
+  }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
@@ -739,19 +966,17 @@
     return {};
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class InitializePstore : public SetupFeature {
  public:
-  INJECT(InitializePstore(const CuttlefishConfig& config,
-                          const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+  INJECT(InitializePstore(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializePstore"; }
-  bool Enabled() const override { return !config_.protected_vm(); }
+  bool Enabled() const override { return !instance_.protected_vm(); }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
@@ -765,20 +990,18 @@
     return {};
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class InitializeSdCard : public SetupFeature {
  public:
-  INJECT(InitializeSdCard(const CuttlefishConfig& config,
-                          const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+  INJECT(InitializeSdCard(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeSdCard"; }
   bool Enabled() const override {
-    return FLAGS_use_sdcard && !config_.protected_vm();
+    return instance_.use_sdcard() && !instance_.protected_vm();
   }
 
  private:
@@ -788,25 +1011,23 @@
       return {};
     }
     CF_EXPECT(CreateBlankImage(instance_.sdcard_path(),
-                               FLAGS_blank_sdcard_image_mb, "sdcard"),
+                               instance_.blank_sdcard_image_mb(), "sdcard"),
               "Failed to create \"" << instance_.sdcard_path() << "\"");
     return {};
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class InitializeFactoryResetProtected : public SetupFeature {
  public:
   INJECT(InitializeFactoryResetProtected(
-      const CuttlefishConfig& config,
       const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeSdCard"; }
-  bool Enabled() const override { return !config_.protected_vm(); }
+  bool Enabled() const override { return !instance_.protected_vm(); }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
@@ -820,7 +1041,6 @@
     return {};
   }
 
-  const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
@@ -849,21 +1069,35 @@
     };
   }
   Result<void> ResultSetup() override {
-    auto ipath = [this](const std::string& path) -> std::string {
+    const auto ipath = [this](const std::string& path) -> std::string {
       return instance_.PerInstancePath(path.c_str());
     };
     auto persistent_disk_builder =
         DiskBuilder()
-            .Partitions(persistent_composite_disk_config(config_, instance_))
+            .Partitions(persistent_composite_disk_config(instance_))
             .VmManager(config_.vm_manager())
-            .CrosvmPath(config_.crosvm_binary())
+            .CrosvmPath(instance_.crosvm_binary())
             .ConfigPath(ipath("persistent_composite_disk_config.txt"))
             .HeaderPath(ipath("persistent_composite_gpt_header.img"))
             .FooterPath(ipath("persistent_composite_gpt_footer.img"))
             .CompositeDiskPath(instance_.persistent_composite_disk_path())
             .ResumeIfPossible(FLAGS_resume);
-
     CF_EXPECT(persistent_disk_builder.BuildCompositeDiskIfNecessary());
+
+    if (instance_.ap_boot_flow() == APBootFlow::Grub) {
+      auto persistent_ap_disk_builder =
+        DiskBuilder()
+            .Partitions(persistent_ap_composite_disk_config(instance_))
+            .VmManager(config_.vm_manager())
+            .CrosvmPath(instance_.crosvm_binary())
+            .ConfigPath(ipath("ap_persistent_composite_disk_config.txt"))
+            .HeaderPath(ipath("ap_persistent_composite_gpt_header.img"))
+            .FooterPath(ipath("ap_persistent_composite_gpt_footer.img"))
+            .CompositeDiskPath(instance_.persistent_ap_composite_disk_path())
+            .ResumeIfPossible(FLAGS_resume);
+      CF_EXPECT(persistent_ap_disk_builder.BuildCompositeDiskIfNecessary());
+    }
+
     return {};
   }
 
@@ -875,7 +1109,9 @@
 
 class VbmetaEnforceMinimumSize : public SetupFeature {
  public:
-  INJECT(VbmetaEnforceMinimumSize()) {}
+  INJECT(VbmetaEnforceMinimumSize(
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   std::string Name() const override { return "VbmetaEnforceMinimumSize"; }
   bool Enabled() const override { return true; }
@@ -886,8 +1122,11 @@
     // libavb expects to be able to read the maximum vbmeta size, so we must
     // provide a partition which matches this or the read will fail
     for (const auto& vbmeta_image :
-         {FLAGS_vbmeta_image, FLAGS_vbmeta_system_image}) {
-      if (FileSize(vbmeta_image) != VBMETA_MAX_SIZE) {
+         {instance_.vbmeta_image(), instance_.vbmeta_system_image(),
+          instance_.vbmeta_vendor_dlkm_image()}) {
+      // In some configurations of cuttlefish, the vendor dlkm vbmeta image does
+      // not exist
+      if (FileExists(vbmeta_image) && FileSize(vbmeta_image) != VBMETA_MAX_SIZE) {
         auto fd = SharedFD::Open(vbmeta_image, O_RDWR);
         CF_EXPECT(fd->IsOpen(), "Could not open \"" << vbmeta_image << "\": "
                                                     << fd->StrError());
@@ -898,11 +1137,15 @@
     }
     return {};
   }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 class BootloaderPresentCheck : public SetupFeature {
  public:
-  INJECT(BootloaderPresentCheck()) {}
+  INJECT(BootloaderPresentCheck(
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   std::string Name() const override { return "BootloaderPresentCheck"; }
   bool Enabled() const override { return true; }
@@ -910,31 +1153,30 @@
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
   Result<void> ResultSetup() override {
-    CF_EXPECT(FileHasContent(FLAGS_bootloader),
-              "File not found: " << FLAGS_bootloader);
+    CF_EXPECT(FileHasContent(instance_.bootloader()),
+              "File not found: " << instance_.bootloader());
     return {};
   }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
-static fruit::Component<> DiskChangesComponent(const FetcherConfig* fetcher,
-                                               const CuttlefishConfig* config) {
+static fruit::Component<> DiskChangesComponent(
+    const FetcherConfig* fetcher, const CuttlefishConfig* config,
+    const CuttlefishConfig::InstanceSpecific* instance) {
   return fruit::createComponent()
       .bindInstance(*fetcher)
       .bindInstance(*config)
+      .bindInstance(*instance)
       .addMultibinding<SetupFeature, InitializeMetadataImage>()
-      .addMultibinding<SetupFeature, BootImageRepacker>()
+      .addMultibinding<SetupFeature, KernelRamdiskRepacker>()
       .addMultibinding<SetupFeature, VbmetaEnforceMinimumSize>()
       .addMultibinding<SetupFeature, BootloaderPresentCheck>()
       .addMultibinding<SetupFeature, Gem5ImageUnpacker>()
-      .install(FixedMiscImagePathComponent, &FLAGS_misc_image)
       .install(InitializeMiscImageComponent)
-      .install(FixedDataImagePathComponent, &FLAGS_data_image)
-      .install(InitializeDataImageComponent)
       // Create esp if necessary
-      .install(InitializeEspImageComponent, &FLAGS_otheros_esp_image,
-               &FLAGS_otheros_kernel_path, &FLAGS_otheros_initramfs_path,
-               &FLAGS_otheros_root_image, config)
-      .install(SuperImageRebuilderComponent, &FLAGS_super_image);
+      .install(InitializeEspImageComponent)
+      .install(SuperImageRebuilderComponent);
 }
 
 static fruit::Component<> DiskChangesPerInstanceComponent(
@@ -952,53 +1194,332 @@
       .addMultibinding<SetupFeature, GeneratePersistentBootconfig>()
       .addMultibinding<SetupFeature, GeneratePersistentVbmeta>()
       .addMultibinding<SetupFeature, InitializeInstanceCompositeDisk>()
+      .install(InitializeDataImageComponent)
       .install(InitBootloaderEnvPartitionComponent);
 }
 
+Result<void> DiskImageFlagsVectorization(CuttlefishConfig& config, const FetcherConfig& fetcher_config) {
+  std::vector<std::string> boot_image =
+      android::base::Split(FLAGS_boot_image, ",");
+  std::vector<std::string> init_boot_image =
+      android::base::Split(FLAGS_init_boot_image, ",");
+  std::vector<std::string> data_image =
+      android::base::Split(FLAGS_data_image, ",");
+  std::vector<std::string> super_image =
+      android::base::Split(FLAGS_super_image, ",");
+  std::vector<std::string> misc_image =
+      android::base::Split(FLAGS_misc_image, ",");
+  std::vector<std::string> misc_info =
+      android::base::Split(FLAGS_misc_info_txt, ",");
+  std::vector<std::string> metadata_image =
+      android::base::Split(FLAGS_metadata_image, ",");
+  std::vector<std::string> vendor_boot_image =
+      android::base::Split(FLAGS_vendor_boot_image, ",");
+  std::vector<std::string> vbmeta_image =
+      android::base::Split(FLAGS_vbmeta_image, ",");
+  std::vector<std::string> vbmeta_system_image =
+      android::base::Split(FLAGS_vbmeta_system_image, ",");
+  auto vbmeta_vendor_dlkm_image =
+      android::base::Split(FLAGS_vbmeta_vendor_dlkm_image, ",");
+
+  std::vector<std::string> linux_kernel_path =
+      android::base::Split(FLAGS_linux_kernel_path, ",");
+  std::vector<std::string> linux_initramfs_path =
+      android::base::Split(FLAGS_linux_initramfs_path, ",");
+  std::vector<std::string> linux_root_image =
+      android::base::Split(FLAGS_linux_root_image, ",");
+
+  std::vector<std::string> fuchsia_zedboot_path =
+      android::base::Split(FLAGS_fuchsia_zedboot_path, ",");
+  std::vector<std::string> fuchsia_multiboot_bin_path =
+      android::base::Split(FLAGS_fuchsia_multiboot_bin_path, ",");
+  std::vector<std::string> fuchsia_root_image =
+      android::base::Split(FLAGS_fuchsia_root_image, ",");
+
+  std::vector<std::string> custom_partition_path =
+      android::base::Split(FLAGS_custom_partition_path, ",");
+
+  std::vector<std::string> bootloader =
+      android::base::Split(FLAGS_bootloader, ",");
+  std::vector<std::string> initramfs_path =
+      android::base::Split(FLAGS_initramfs_path, ",");
+  std::vector<std::string> kernel_path =
+      android::base::Split(FLAGS_kernel_path, ",");
+
+  std::vector<std::string> blank_metadata_image_mb =
+      android::base::Split(FLAGS_blank_metadata_image_mb, ",");
+  std::vector<std::string> blank_sdcard_image_mb =
+      android::base::Split(FLAGS_blank_sdcard_image_mb, ",");
+
+  std::string cur_kernel_path;
+  std::string cur_initramfs_path;
+  std::string cur_boot_image;
+  std::string cur_vendor_boot_image;
+  std::string cur_super_image;
+  std::string cur_metadata_image;
+  std::string cur_misc_image;
+  int cur_blank_metadata_image_mb{};
+  int value{};
+  int instance_index = 0;
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  for (const auto& num : instance_nums) {
+    auto instance = config.ForInstance(num);
+    if (instance_index >= misc_image.size()) {
+      // legacy variable. Vectorize by copy [0] to all instances
+      cur_misc_image = misc_image[0];
+    } else {
+      cur_misc_image = misc_image[instance_index];
+    }
+    instance.set_misc_image(cur_misc_image);
+    if (instance_index >= misc_info.size()) {
+      instance.set_misc_info_txt(misc_info[0]);
+    } else {
+      instance.set_misc_info_txt(misc_info[instance_index]);
+    }
+    if (instance_index >= boot_image.size()) {
+      cur_boot_image = boot_image[0];
+    } else {
+      cur_boot_image = boot_image[instance_index];
+    }
+    instance.set_boot_image(cur_boot_image);
+    instance.set_new_boot_image(cur_boot_image);
+
+    if (instance_index >= init_boot_image.size()) {
+      instance.set_init_boot_image(init_boot_image[0]);
+    } else {
+      instance.set_init_boot_image(init_boot_image[instance_index]);
+    }
+    if (instance_index >= vendor_boot_image.size()) {
+      cur_vendor_boot_image = vendor_boot_image[0];
+    } else {
+      cur_vendor_boot_image = vendor_boot_image[instance_index];
+    }
+    instance.set_vendor_boot_image(cur_vendor_boot_image);
+    instance.set_new_vendor_boot_image(cur_vendor_boot_image);
+
+    if (instance_index >= vbmeta_image.size()) {
+      instance.set_vbmeta_image(vbmeta_image[0]);
+    } else {
+      instance.set_vbmeta_image(vbmeta_image[instance_index]);
+    }
+    if (instance_index >= vbmeta_system_image.size()) {
+      instance.set_vbmeta_system_image(vbmeta_system_image[0]);
+    } else {
+      instance.set_vbmeta_system_image(vbmeta_system_image[instance_index]);
+    }
+    if (instance_index >= vbmeta_system_image.size()) {
+      instance.set_vbmeta_vendor_dlkm_image(vbmeta_vendor_dlkm_image[0]);
+    } else {
+      instance.set_vbmeta_vendor_dlkm_image(
+          vbmeta_vendor_dlkm_image[instance_index]);
+    }
+    if (instance_index >= super_image.size()) {
+      cur_super_image = super_image[0];
+    } else {
+      cur_super_image = super_image[instance_index];
+    }
+    instance.set_super_image(cur_super_image);
+    if (instance_index >= data_image.size()) {
+      instance.set_data_image(data_image[0]);
+    } else {
+      instance.set_data_image(data_image[instance_index]);
+    }
+    if (instance_index >= metadata_image.size()) {
+      cur_metadata_image = metadata_image[0];
+    } else {
+      cur_metadata_image = metadata_image[instance_index];
+    }
+    instance.set_metadata_image(cur_metadata_image);
+    if (instance_index >= linux_kernel_path.size()) {
+      instance.set_linux_kernel_path(linux_kernel_path[0]);
+    } else {
+      instance.set_linux_kernel_path(linux_kernel_path[instance_index]);
+    }
+    if (instance_index >= linux_initramfs_path.size()) {
+      instance.set_linux_initramfs_path(linux_initramfs_path[0]);
+    } else {
+      instance.set_linux_initramfs_path(linux_initramfs_path[instance_index]);
+    }
+    if (instance_index >= linux_root_image.size()) {
+      instance.set_linux_root_image(linux_root_image[0]);
+    } else {
+      instance.set_linux_root_image(linux_root_image[instance_index]);
+    }
+    if (instance_index >= fuchsia_zedboot_path.size()) {
+      instance.set_fuchsia_zedboot_path(fuchsia_zedboot_path[0]);
+    } else {
+      instance.set_fuchsia_zedboot_path(fuchsia_zedboot_path[instance_index]);
+    }
+    if (instance_index >= fuchsia_multiboot_bin_path.size()) {
+      instance.set_fuchsia_multiboot_bin_path(fuchsia_multiboot_bin_path[0]);
+    } else {
+      instance.set_fuchsia_multiboot_bin_path(fuchsia_multiboot_bin_path[instance_index]);
+    }
+    if (instance_index >= fuchsia_root_image.size()) {
+      instance.set_fuchsia_root_image(fuchsia_root_image[0]);
+    } else {
+      instance.set_fuchsia_root_image(fuchsia_root_image[instance_index]);
+    }
+    if (instance_index >= custom_partition_path.size()) {
+      instance.set_custom_partition_path(custom_partition_path[0]);
+    } else {
+      instance.set_custom_partition_path(custom_partition_path[instance_index]);
+    }
+    if (instance_index >= bootloader.size()) {
+      instance.set_bootloader(bootloader[0]);
+    } else {
+      instance.set_bootloader(bootloader[instance_index]);
+    }
+    if (instance_index >= kernel_path.size()) {
+      cur_kernel_path = kernel_path[0];
+    } else {
+      cur_kernel_path = kernel_path[instance_index];
+    }
+    instance.set_kernel_path(cur_kernel_path);
+    if (instance_index >= initramfs_path.size()) {
+      cur_initramfs_path = initramfs_path[0];
+    } else {
+      cur_initramfs_path = initramfs_path[instance_index];
+    }
+    instance.set_initramfs_path(cur_initramfs_path);
+
+    if (instance_index >= blank_metadata_image_mb.size()) {
+      CHECK(android::base::ParseInt(blank_metadata_image_mb[0],
+                                    &value))
+          << "Invalid 'blank_metadata_image_mb' "
+          << blank_metadata_image_mb[0];
+    } else {
+      CHECK(android::base::ParseInt(blank_metadata_image_mb[instance_index],
+                                    &value))
+          << "Invalid 'blank_metadata_image_mb' "
+          << blank_metadata_image_mb[instance_index];
+    }
+    instance.set_blank_metadata_image_mb(value);
+    cur_blank_metadata_image_mb = value;
+
+    if (instance_index >= blank_sdcard_image_mb.size()) {
+      CHECK(android::base::ParseInt(blank_sdcard_image_mb[0],
+                                    &value))
+          << "Invalid 'blank_sdcard_image_mb' "
+          << blank_sdcard_image_mb[0];
+    } else {
+      CHECK(android::base::ParseInt(blank_sdcard_image_mb[instance_index],
+                                    &value))
+          << "Invalid 'blank_sdcard_image_mb' "
+          << blank_sdcard_image_mb[instance_index];
+    }
+    instance.set_blank_sdcard_image_mb(value);
+
+    // Repacking a boot.img changes boot_image and vendor_boot_image paths
+    const CuttlefishConfig& const_config = const_cast<const CuttlefishConfig&>(config);
+    const CuttlefishConfig::InstanceSpecific const_instance = const_config.ForInstance(num);
+    if (cur_kernel_path.size() &&
+        config.vm_manager() != Gem5Manager::name()) {
+      const std::string new_boot_image_path =
+          const_instance.PerInstancePath("boot_repacked.img");
+      // change the new flag value to corresponding instance
+      instance.set_new_boot_image(new_boot_image_path.c_str());
+    }
+
+    if (cur_kernel_path.size() || cur_initramfs_path.size()) {
+      const std::string new_vendor_boot_image_path =
+          const_instance.PerInstancePath("vendor_boot_repacked.img");
+      // Repack the vendor boot images if kernels and/or ramdisks are passed in.
+      if (cur_initramfs_path.size()) {
+        // change the new flag value to corresponding instance
+        instance.set_new_vendor_boot_image(new_vendor_boot_image_path.c_str());
+      }
+    }
+
+    // We will need to rebuild vendor_dlkm if custom ramdisk is specified, as a
+    // result super image would need to be rebuilt as well.
+    if (SuperImageNeedsRebuilding(fetcher_config) ||
+        cur_initramfs_path.size()) {
+      const std::string new_super_image_path =
+          const_instance.PerInstancePath("super.img");
+      instance.set_new_super_image(new_super_image_path);
+    }
+
+    if (FileExists(cur_metadata_image) &&
+        FileSize(cur_metadata_image) == cur_blank_metadata_image_mb << 20) {
+      instance.set_new_metadata_image(cur_metadata_image);
+    } else {
+      const std::string new_metadata_image_path =
+          const_instance.PerInstancePath("metadata.img");
+      instance.set_new_metadata_image(new_metadata_image_path);
+    }
+    instance.set_new_vbmeta_vendor_dlkm_image(
+        const_instance.PerInstancePath("vbmeta_vendor_dlkm_repacked.img"));
+
+    if (FileHasContent(cur_misc_image)) {
+      instance.set_new_misc_image(cur_misc_image);
+    } else {
+      const std::string new_misc_image_path =
+          const_instance.PerInstancePath("misc.img");
+      instance.set_new_misc_image(new_misc_image_path);
+    }
+    instance_index++;
+  }
+  return {};
+}
+
 Result<void> CreateDynamicDiskFiles(const FetcherConfig& fetcher_config,
                                     const CuttlefishConfig& config) {
-  // TODO(schuffelen): Unify this with the other injector created in
-  // assemble_cvd.cpp
-  fruit::Injector<> injector(DiskChangesComponent, &fetcher_config, &config);
-
-  const auto& features = injector.getMultibindings<SetupFeature>();
-  CF_EXPECT(SetupFeature::RunSetup(features));
-
   for (const auto& instance : config.Instances()) {
+    // TODO(schuffelen): Unify this with the other injector created in
+    // assemble_cvd.cpp
+    fruit::Injector<> injector(DiskChangesComponent, &fetcher_config, &config,
+                               &instance);
+    for (auto& late_injected : injector.getMultibindings<LateInjected>()) {
+      CF_EXPECT(late_injected->LateInject(injector));
+    }
+
+    const auto& features = injector.getMultibindings<SetupFeature>();
+    CF_EXPECT(SetupFeature::RunSetup(features));
     fruit::Injector<> instance_injector(DiskChangesPerInstanceComponent,
                                         &fetcher_config, &config, &instance);
+    for (auto& late_injected :
+         instance_injector.getMultibindings<LateInjected>()) {
+      CF_EXPECT(late_injected->LateInject(instance_injector));
+    }
+
     const auto& instance_features =
         instance_injector.getMultibindings<SetupFeature>();
     CF_EXPECT(SetupFeature::RunSetup(instance_features),
               "instance = \"" << instance.instance_name() << "\"");
-  }
 
-  // Check if filling in the sparse image would run out of disk space.
-  auto existing_sizes = SparseFileSizes(FLAGS_data_image);
-  CF_EXPECT(existing_sizes.sparse_size > 0 || existing_sizes.disk_size > 0,
-            "Unable to determine size of \"" << FLAGS_data_image
-                                             << "\". Does this file exist?");
-  auto available_space = AvailableSpaceAtPath(FLAGS_data_image);
-  if (available_space < existing_sizes.sparse_size - existing_sizes.disk_size) {
-    // TODO(schuffelen): Duplicate this check in run_cvd when it can run on a
-    // separate machine
-    return CF_ERR("Not enough space remaining in fs containing \""
-                  << FLAGS_data_image << "\", wanted "
-                  << (existing_sizes.sparse_size - existing_sizes.disk_size)
-                  << ", got " << available_space);
-  } else {
-    LOG(DEBUG) << "Available space: " << available_space;
-    LOG(DEBUG) << "Sparse size of \"" << FLAGS_data_image
-               << "\": " << existing_sizes.sparse_size;
-    LOG(DEBUG) << "Disk size of \"" << FLAGS_data_image
-               << "\": " << existing_sizes.disk_size;
-  }
+    // Check if filling in the sparse image would run out of disk space.
+    auto existing_sizes = SparseFileSizes(instance.data_image());
+    CF_EXPECT(existing_sizes.sparse_size > 0 || existing_sizes.disk_size > 0,
+              "Unable to determine size of \"" << instance.data_image()
+                                               << "\". Does this file exist?");
+    auto available_space = AvailableSpaceAtPath(instance.data_image());
+    if (available_space <
+        existing_sizes.sparse_size - existing_sizes.disk_size) {
+      // TODO(schuffelen): Duplicate this check in run_cvd when it can run on a
+      // separate machine
+      return CF_ERR("Not enough space remaining in fs containing \""
+                    << instance.data_image() << "\", wanted "
+                    << (existing_sizes.sparse_size - existing_sizes.disk_size)
+                    << ", got " << available_space);
+    } else {
+      LOG(DEBUG) << "Available space: " << available_space;
+      LOG(DEBUG) << "Sparse size of \"" << instance.data_image()
+                 << "\": " << existing_sizes.sparse_size;
+      LOG(DEBUG) << "Disk size of \"" << instance.data_image()
+                 << "\": " << existing_sizes.disk_size;
+    }
 
-  auto os_disk_builder = OsCompositeDiskBuilder(config);
-  auto built_composite =
-      CF_EXPECT(os_disk_builder.BuildCompositeDiskIfNecessary());
-  if (built_composite) {
-    for (auto instance : config.Instances()) {
+    auto os_disk_builder = OsCompositeDiskBuilder(config, instance);
+    const auto os_built_composite = CF_EXPECT(os_disk_builder.BuildCompositeDiskIfNecessary());
+
+    auto ap_disk_builder = ApCompositeDiskBuilder(config, instance);
+    if (instance.ap_boot_flow() != APBootFlow::None) {
+      CF_EXPECT(ap_disk_builder.BuildCompositeDiskIfNecessary());
+    }
+
+    if (os_built_composite) {
       if (FileExists(instance.access_kregistry_path())) {
         CF_EXPECT(CreateBlankImage(instance.access_kregistry_path(), 2 /* mb */,
                                    "none"),
@@ -1014,15 +1535,13 @@
                   "Failed for\"" << instance.pstore_path() << "\"");
       }
     }
-  }
 
-  if (!FLAGS_protected_vm) {
-    for (auto instance : config.Instances()) {
+    if (!instance.protected_vm()) {
       os_disk_builder.OverlayPath(instance.PerInstancePath("overlay.img"));
       CF_EXPECT(os_disk_builder.BuildOverlayIfNecessary());
-      if (instance.start_ap()) {
-        os_disk_builder.OverlayPath(instance.PerInstancePath("ap_overlay.img"));
-        CF_EXPECT(os_disk_builder.BuildOverlayIfNecessary());
+      if (instance.ap_boot_flow() != APBootFlow::None) {
+        ap_disk_builder.OverlayPath(instance.PerInstancePath("ap_overlay.img"));
+        CF_EXPECT(ap_disk_builder.BuildOverlayIfNecessary());
       }
     }
   }
@@ -1037,10 +1556,9 @@
     // Gem5 Simulate per-instance what the bootloader would usually do
     // Since on other devices this runs every time, just do it here every time
     if (config.vm_manager() == Gem5Manager::name()) {
-      RepackGem5BootImage(
-          instance.PerInstancePath("initrd.img"),
-          instance.persistent_bootconfig_path(),
-          config.assembly_dir());
+      RepackGem5BootImage(instance.PerInstancePath("initrd.img"),
+                          instance.persistent_bootconfig_path(),
+                          config.assembly_dir(), instance.initramfs_path());
     }
   }
 
diff --git a/host/commands/assemble_cvd/disk_flags.h b/host/commands/assemble_cvd/disk_flags.h
index e75aa4f..1b532f6 100644
--- a/host/commands/assemble_cvd/disk_flags.h
+++ b/host/commands/assemble_cvd/disk_flags.h
@@ -32,7 +32,12 @@
 
 Result<void> CreateDynamicDiskFiles(const FetcherConfig& fetcher_config,
                                     const CuttlefishConfig& config);
-std::vector<ImagePartition> GetOsCompositeDiskConfig();
-DiskBuilder OsCompositeDiskBuilder(const CuttlefishConfig& config);
+Result<void> DiskImageFlagsVectorization(CuttlefishConfig& config, const FetcherConfig& fetcher_config);
+std::vector<ImagePartition> GetOsCompositeDiskConfig(
+    const CuttlefishConfig::InstanceSpecific& instance);
+DiskBuilder OsCompositeDiskBuilder(const CuttlefishConfig& config,
+                                   const CuttlefishConfig::InstanceSpecific& instance);
+DiskBuilder ApCompositeDiskBuilder(const CuttlefishConfig& config,
+                                   const CuttlefishConfig::InstanceSpecific& instance);
 
 } // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/display_flags.cpp b/host/commands/assemble_cvd/display_flags.cpp
new file mode 100644
index 0000000..4664311
--- /dev/null
+++ b/host/commands/assemble_cvd/display_flags.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/assemble_cvd/display_flags.h"
+
+#include <unordered_map>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+
+namespace cuttlefish {
+
+Result<std::optional<CuttlefishConfig::DisplayConfig>> ParseDisplayConfig(
+    const std::string& flag) {
+  if (flag.empty()) {
+    return std::nullopt;
+  }
+
+  std::unordered_map<std::string, std::string> props;
+
+  const std::vector<std::string> pairs = android::base::Split(flag, ",");
+  for (const std::string& pair : pairs) {
+    const std::vector<std::string> keyvalue = android::base::Split(pair, "=");
+    CF_EXPECT_EQ(keyvalue.size(), 2,
+                 "Invalid display flag key-value: \"" << flag << "\"");
+    const std::string& prop_key = keyvalue[0];
+    const std::string& prop_val = keyvalue[1];
+    props[prop_key] = prop_val;
+  }
+
+  CF_EXPECT(Contains(props, "width"),
+            "Display configuration missing 'width' in \"" << flag << "\"");
+  CF_EXPECT(Contains(props, "height"),
+            "Display configuration missing 'height' in \"" << flag << "\"");
+
+  int display_width;
+  CF_EXPECT(android::base::ParseInt(props["width"], &display_width),
+            "Display configuration invalid 'width' in \"" << flag << "\"");
+
+  int display_height;
+  CF_EXPECT(android::base::ParseInt(props["height"], &display_height),
+            "Display configuration invalid 'height' in \"" << flag << "\"");
+
+  int display_dpi = CF_DEFAULTS_DISPLAY_DPI;
+  auto display_dpi_it = props.find("dpi");
+  if (display_dpi_it != props.end()) {
+    CF_EXPECT(android::base::ParseInt(display_dpi_it->second, &display_dpi),
+              "Display configuration invalid 'dpi' in \"" << flag << "\"");
+  }
+
+  int display_refresh_rate_hz = CF_DEFAULTS_DISPLAY_REFRESH_RATE;
+  auto display_refresh_rate_hz_it = props.find("refresh_rate_hz");
+  if (display_refresh_rate_hz_it != props.end()) {
+    CF_EXPECT(android::base::ParseInt(display_refresh_rate_hz_it->second,
+                                      &display_refresh_rate_hz),
+              "Display configuration invalid 'refresh_rate_hz' in \"" << flag
+                                                                      << "\"");
+  }
+
+  return CuttlefishConfig::DisplayConfig{
+      .width = display_width,
+      .height = display_height,
+      .dpi = display_dpi,
+      .refresh_rate_hz = display_refresh_rate_hz,
+  };
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/assemble_cvd/display_flags.h b/host/commands/assemble_cvd/display_flags.h
new file mode 100644
index 0000000..92be23b
--- /dev/null
+++ b/host/commands/assemble_cvd/display_flags.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <optional>
+#include <string>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+constexpr const char kDisplayHelp[] =
+    "Comma separated key=value pairs of display properties. Supported "
+    "properties:\n"
+    " 'width': required, width of the display in pixels\n"
+    " 'height': required, height of the display in pixels\n"
+    " 'dpi': optional, default 320, density of the display\n"
+    " 'refresh_rate_hz': optional, default 60, display refresh rate in Hertz\n"
+    ". Example usage: \n"
+    "--display0=width=1280,height=720\n"
+    "--display1=width=1440,height=900,dpi=480,refresh_rate_hz=30\n";
+
+Result<std::optional<CuttlefishConfig::DisplayConfig>> ParseDisplayConfig(
+    const std::string& flag);
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/assemble_cvd/doc/disk.dot b/host/commands/assemble_cvd/doc/disk.dot
new file mode 100644
index 0000000..dd40d4c
--- /dev/null
+++ b/host/commands/assemble_cvd/doc/disk.dot
@@ -0,0 +1,64 @@
+digraph {
+  rankdir = "LR";
+
+  overlay [label = "Operating System Overlay (qcow2)", URL = "https://en.wikipedia.org/wiki/Qcow"]
+  operating_system [label = "Operating System (composite)", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions"]
+  persistent [label = "Persistent Disk (composite)", URL = "https://chromium.googlesource.com/chromiumos/platform/crosvm/+/refs/heads/main/disk/src/composite.rs"]
+
+  uboot_env [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/assemble_cvd/boot_config.cc"]
+  persistent_vbmeta [label = "vbmeta", URL = "https://android.googlesource.com/platform/external/avb/+/master/README.md"]
+  frp
+  bootconfig [URL = "https://source.android.com/docs/core/architecture/bootloader/implementing-bootconfig"]
+
+  gpt_header [label = "GPT Header", URL = "https://en.wikipedia.org/wiki/GUID_Partition_Table"]
+  misc
+  boot [label = "boot_[ab]", URL = "https://source.android.com/docs/core/architecture/bootloader/boot-image-header"]
+  init_boot [label = "init_boot_[ab]", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/generic-boot"]
+  vendor_boot [label = "vendor_boot_[ab]", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-boot-partitions"]
+  vbmeta [label = "vbmeta_[ab]", URL = "https://android.googlesource.com/platform/external/avb/+/master/README.md"]
+  vbmeta_system [label = "vbmeta_system_[ab]", URL = "https://android.googlesource.com/platform/external/avb/+/master/README.md"]
+  super [URL = "https://source.android.com/docs/core/ota/dynamic_partitions/implement"]
+  userdata
+  metadata [URL = "https://source.android.com/docs/security/features/encryption/metadata"]
+  ap_rootfs [style = "dashed"]
+  gpt_footer [label = "GPT Footer", URL = "https://en.wikipedia.org/wiki/GUID_Partition_Table"]
+
+  product [color = "green", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/product-partitions"]
+  system [color = "green", URL = "https://source.android.com/docs/core/architecture"]
+  system_ext [color = "green", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/shared-system-image"]
+  system_dlkm [color = "green", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/gki-partitions"]
+
+  odm [color = "blue", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/odm-partitions"]
+  vendor [color = "blue", URL = "https://source.android.com/docs/core/architecture"]
+  vendor_dlkm [color = "blue", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-odm-dlkm-partition"]
+  odm_dlkm [color = "blue", URL = "https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-odm-dlkm-partition"]
+
+  persistent -> uboot_env
+  persistent -> persistent_vbmeta
+  persistent -> frp
+  persistent -> bootconfig
+
+  overlay -> operating_system
+  operating_system -> gpt_header
+  operating_system -> misc
+  operating_system -> boot
+  operating_system -> init_boot
+  operating_system -> vendor_boot
+  operating_system -> vbmeta
+  operating_system -> vbmeta_system
+  operating_system -> super
+  operating_system -> userdata
+  operating_system -> metadata
+  operating_system -> ap_rootfs [style = "dashed"]
+  operating_system -> gpt_footer
+
+  super -> product [color = "green"]
+  super -> system [color = "green"]
+  super -> system_ext [color = "green"]
+  super -> system_dlkm [color = "green"]
+
+  super -> odm [color = "blue"]
+  super -> vendor [color = "blue"]
+  super -> vendor_dlkm [color = "blue"]
+  super -> odm_dlkm [color = "blue"]
+}
diff --git a/host/commands/assemble_cvd/doc/disk.png b/host/commands/assemble_cvd/doc/disk.png
new file mode 100644
index 0000000..1604b06
--- /dev/null
+++ b/host/commands/assemble_cvd/doc/disk.png
Binary files differ
diff --git a/host/commands/assemble_cvd/doc/disk.svg b/host/commands/assemble_cvd/doc/disk.svg
new file mode 100644
index 0000000..977c347
--- /dev/null
+++ b/host/commands/assemble_cvd/doc/disk.svg
@@ -0,0 +1,394 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="917pt" height="638pt"
+ viewBox="0.00 0.00 916.73 638.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 634)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-634 912.73,-634 912.73,4 -4,4"/>
+<!-- overlay -->
+<g id="node1" class="node">
+<title>overlay</title>
+<g id="a_node1"><a xlink:href="https://en.wikipedia.org/wiki/Qcow" xlink:title="Operating System Overlay (qcow2)">
+<ellipse fill="none" stroke="black" cx="137.14" cy="-315" rx="137.28" ry="18"/>
+<text text-anchor="middle" x="137.14" y="-311.3" font-family="Times,serif" font-size="14.00">Operating System Overlay (qcow2)</text>
+</a>
+</g>
+</g>
+<!-- operating_system -->
+<g id="node2" class="node">
+<title>operating_system</title>
+<g id="a_node2"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions" xlink:title="Operating System (composite)">
+<ellipse fill="none" stroke="black" cx="429.87" cy="-315" rx="119.68" ry="18"/>
+<text text-anchor="middle" x="429.87" y="-311.3" font-family="Times,serif" font-size="14.00">Operating System (composite)</text>
+</a>
+</g>
+</g>
+<!-- overlay&#45;&gt;operating_system -->
+<g id="edge5" class="edge">
+<title>overlay&#45;&gt;operating_system</title>
+<path fill="none" stroke="black" d="M274.56,-315C282.95,-315 291.38,-315 299.74,-315"/>
+<polygon fill="black" stroke="black" points="299.89,-318.5 309.89,-315 299.89,-311.5 299.89,-318.5"/>
+</g>
+<!-- gpt_header -->
+<g id="node8" class="node">
+<title>gpt_header</title>
+<g id="a_node8"><a xlink:href="https://en.wikipedia.org/wiki/GUID_Partition_Table" xlink:title="GPT Header">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-612" rx="54.69" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-608.3" font-family="Times,serif" font-size="14.00">GPT Header</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;gpt_header -->
+<g id="edge6" class="edge">
+<title>operating_system&#45;&gt;gpt_header</title>
+<path fill="none" stroke="black" d="M526.88,-325.72C535.32,-329.79 543.07,-335.11 549.45,-342 623.64,-422.11 514.81,-501.76 585.45,-585 591.93,-592.63 600.57,-598.13 609.86,-602.08"/>
+<polygon fill="black" stroke="black" points="608.7,-605.39 619.29,-605.54 611.11,-598.81 608.7,-605.39"/>
+</g>
+<!-- misc -->
+<g id="node9" class="node">
+<title>misc</title>
+<ellipse fill="none" stroke="black" cx="670.6" cy="-558" rx="27.9" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-554.3" font-family="Times,serif" font-size="14.00">misc</text>
+</g>
+<!-- operating_system&#45;&gt;misc -->
+<g id="edge7" class="edge">
+<title>operating_system&#45;&gt;misc</title>
+<path fill="none" stroke="black" d="M526.02,-325.72C534.74,-329.78 542.78,-335.09 549.45,-342 608.83,-403.53 528.8,-466.95 585.45,-531 597.32,-544.42 615.87,-551.21 632.53,-554.63"/>
+<polygon fill="black" stroke="black" points="632.14,-558.12 642.59,-556.36 633.33,-551.22 632.14,-558.12"/>
+</g>
+<!-- boot -->
+<g id="node10" class="node">
+<title>boot</title>
+<g id="a_node10"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/boot-image-header" xlink:title="boot_[ab]">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-504" rx="44.69" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-500.3" font-family="Times,serif" font-size="14.00">boot_[ab]</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;boot -->
+<g id="edge8" class="edge">
+<title>operating_system&#45;&gt;boot</title>
+<path fill="none" stroke="black" d="M524.93,-325.96C533.97,-329.96 542.39,-335.19 549.45,-342 594.16,-385.09 542.65,-432.01 585.45,-477 594.05,-486.03 605.66,-492.05 617.51,-496.06"/>
+<polygon fill="black" stroke="black" points="616.88,-499.52 627.46,-498.97 618.85,-492.8 616.88,-499.52"/>
+</g>
+<!-- init_boot -->
+<g id="node11" class="node">
+<title>init_boot</title>
+<g id="a_node11"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/generic-boot" xlink:title="init_boot_[ab]">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-450" rx="61.19" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-446.3" font-family="Times,serif" font-size="14.00">init_boot_[ab]</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;init_boot -->
+<g id="edge9" class="edge">
+<title>operating_system&#45;&gt;init_boot</title>
+<path fill="none" stroke="black" d="M522.41,-326.52C532.19,-330.38 541.46,-335.43 549.45,-342 579.88,-367.02 556.14,-396.68 585.45,-423 592.01,-428.88 599.85,-433.48 608.08,-437.08"/>
+<polygon fill="black" stroke="black" points="606.89,-440.38 617.48,-440.74 609.43,-433.85 606.89,-440.38"/>
+</g>
+<!-- vendor_boot -->
+<g id="node12" class="node">
+<title>vendor_boot</title>
+<g id="a_node12"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-boot-partitions" xlink:title="vendor_boot_[ab]">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-396" rx="73.39" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-392.3" font-family="Times,serif" font-size="14.00">vendor_boot_[ab]</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;vendor_boot -->
+<g id="edge10" class="edge">
+<title>operating_system&#45;&gt;vendor_boot</title>
+<path fill="none" stroke="black" d="M510.86,-328.37C524.07,-331.94 537.38,-336.41 549.45,-342 567.6,-350.4 567.69,-359.82 585.45,-369 593.18,-372.99 601.61,-376.54 610.05,-379.64"/>
+<polygon fill="black" stroke="black" points="608.95,-382.97 619.55,-382.96 611.26,-376.36 608.95,-382.97"/>
+</g>
+<!-- vbmeta -->
+<g id="node13" class="node">
+<title>vbmeta</title>
+<g id="a_node13"><a xlink:href="https://android.googlesource.com/platform/external/avb/+/master/README.md" xlink:title="vbmeta_[ab]">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-342" rx="55.79" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-338.3" font-family="Times,serif" font-size="14.00">vbmeta_[ab]</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;vbmeta -->
+<g id="edge11" class="edge">
+<title>operating_system&#45;&gt;vbmeta</title>
+<path fill="none" stroke="black" d="M525.9,-325.75C553.33,-328.85 582.55,-332.16 607.43,-334.97"/>
+<polygon fill="black" stroke="black" points="607.12,-338.46 617.45,-336.1 607.9,-331.5 607.12,-338.46"/>
+</g>
+<!-- vbmeta_system -->
+<g id="node14" class="node">
+<title>vbmeta_system</title>
+<g id="a_node14"><a xlink:href="https://android.googlesource.com/platform/external/avb/+/master/README.md" xlink:title="vbmeta_system_[ab]">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-288" rx="85.29" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-284.3" font-family="Times,serif" font-size="14.00">vbmeta_system_[ab]</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;vbmeta_system -->
+<g id="edge12" class="edge">
+<title>operating_system&#45;&gt;vbmeta_system</title>
+<path fill="none" stroke="black" d="M525.9,-304.25C545.41,-302.04 565.83,-299.74 584.94,-297.57"/>
+<polygon fill="black" stroke="black" points="585.56,-301.03 595.11,-296.42 584.78,-294.07 585.56,-301.03"/>
+</g>
+<!-- super -->
+<g id="node15" class="node">
+<title>super</title>
+<g id="a_node15"><a xlink:href="https://source.android.com/docs/core/ota/dynamic_partitions/implement" xlink:title="super">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-234" rx="29.8" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-230.3" font-family="Times,serif" font-size="14.00">super</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;super -->
+<g id="edge13" class="edge">
+<title>operating_system&#45;&gt;super</title>
+<path fill="none" stroke="black" d="M477.44,-298.38C508.06,-287.54 549.06,-273.19 585.45,-261 601.29,-255.69 618.88,-250.02 633.86,-245.25"/>
+<polygon fill="black" stroke="black" points="635.29,-248.46 643.76,-242.1 633.17,-241.79 635.29,-248.46"/>
+</g>
+<!-- userdata -->
+<g id="node16" class="node">
+<title>userdata</title>
+<ellipse fill="none" stroke="black" cx="670.6" cy="-180" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-176.3" font-family="Times,serif" font-size="14.00">userdata</text>
+</g>
+<!-- operating_system&#45;&gt;userdata -->
+<g id="edge14" class="edge">
+<title>operating_system&#45;&gt;userdata</title>
+<path fill="none" stroke="black" d="M452.08,-297.05C480.73,-273.57 534.12,-232.44 585.45,-207 597.94,-200.81 612.16,-195.61 625.31,-191.49"/>
+<polygon fill="black" stroke="black" points="626.55,-194.77 635.12,-188.54 624.53,-188.07 626.55,-194.77"/>
+</g>
+<!-- metadata -->
+<g id="node17" class="node">
+<title>metadata</title>
+<g id="a_node17"><a xlink:href="https://source.android.com/docs/security/features/encryption/metadata" xlink:title="metadata">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-126" rx="42.79" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-122.3" font-family="Times,serif" font-size="14.00">metadata</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;metadata -->
+<g id="edge15" class="edge">
+<title>operating_system&#45;&gt;metadata</title>
+<path fill="none" stroke="black" d="M443.35,-296.86C467.16,-263.57 522.37,-192.52 585.45,-153 596.53,-146.06 609.56,-140.75 622.01,-136.75"/>
+<polygon fill="black" stroke="black" points="623.24,-140.03 631.82,-133.82 621.24,-133.33 623.24,-140.03"/>
+</g>
+<!-- ap_rootfs -->
+<g id="node18" class="node">
+<title>ap_rootfs</title>
+<ellipse fill="none" stroke="black" stroke-dasharray="5,2" cx="670.6" cy="-72" rx="44.39" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-68.3" font-family="Times,serif" font-size="14.00">ap_rootfs</text>
+</g>
+<!-- operating_system&#45;&gt;ap_rootfs -->
+<g id="edge16" class="edge">
+<title>operating_system&#45;&gt;ap_rootfs</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M438.83,-296.94C457.97,-255.62 510.56,-153.61 585.45,-99 595.72,-91.51 608.16,-86.04 620.28,-82.07"/>
+<polygon fill="black" stroke="black" points="621.32,-85.41 629.89,-79.19 619.31,-78.71 621.32,-85.41"/>
+</g>
+<!-- gpt_footer -->
+<g id="node19" class="node">
+<title>gpt_footer</title>
+<g id="a_node19"><a xlink:href="https://en.wikipedia.org/wiki/GUID_Partition_Table" xlink:title="GPT Footer">
+<ellipse fill="none" stroke="black" cx="670.6" cy="-18" rx="51.99" ry="18"/>
+<text text-anchor="middle" x="670.6" y="-14.3" font-family="Times,serif" font-size="14.00">GPT Footer</text>
+</a>
+</g>
+</g>
+<!-- operating_system&#45;&gt;gpt_footer -->
+<g id="edge17" class="edge">
+<title>operating_system&#45;&gt;gpt_footer</title>
+<path fill="none" stroke="black" d="M436.14,-296.84C451.12,-248.47 498.56,-115.36 585.45,-45 593.77,-38.27 603.79,-33.2 613.98,-29.39"/>
+<polygon fill="black" stroke="black" points="615.39,-32.61 623.75,-26.11 613.16,-25.98 615.39,-32.61"/>
+</g>
+<!-- persistent -->
+<g id="node3" class="node">
+<title>persistent</title>
+<g id="a_node3"><a xlink:href="https://chromium.googlesource.com/chromiumos/platform/crosvm/+/refs/heads/main/disk/src/composite.rs" xlink:title="Persistent Disk (composite)">
+<ellipse fill="none" stroke="black" cx="137.14" cy="-450" rx="108.58" ry="18"/>
+<text text-anchor="middle" x="137.14" y="-446.3" font-family="Times,serif" font-size="14.00">Persistent Disk (composite)</text>
+</a>
+</g>
+</g>
+<!-- uboot_env -->
+<g id="node4" class="node">
+<title>uboot_env</title>
+<g id="a_node4"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/assemble_cvd/boot_config.cc" xlink:title="uboot_env">
+<ellipse fill="none" stroke="black" cx="429.87" cy="-531" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="429.87" y="-527.3" font-family="Times,serif" font-size="14.00">uboot_env</text>
+</a>
+</g>
+</g>
+<!-- persistent&#45;&gt;uboot_env -->
+<g id="edge1" class="edge">
+<title>persistent&#45;&gt;uboot_env</title>
+<path fill="none" stroke="black" d="M186.06,-466.17C220.43,-477.48 267.94,-492.56 310.28,-504 332.17,-509.92 356.57,-515.62 377.52,-520.25"/>
+<polygon fill="black" stroke="black" points="376.99,-523.72 387.5,-522.43 378.48,-516.88 376.99,-523.72"/>
+</g>
+<!-- persistent_vbmeta -->
+<g id="node5" class="node">
+<title>persistent_vbmeta</title>
+<g id="a_node5"><a xlink:href="https://android.googlesource.com/platform/external/avb/+/master/README.md" xlink:title="vbmeta">
+<ellipse fill="none" stroke="black" cx="429.87" cy="-477" rx="37.09" ry="18"/>
+<text text-anchor="middle" x="429.87" y="-473.3" font-family="Times,serif" font-size="14.00">vbmeta</text>
+</a>
+</g>
+</g>
+<!-- persistent&#45;&gt;persistent_vbmeta -->
+<g id="edge2" class="edge">
+<title>persistent&#45;&gt;persistent_vbmeta</title>
+<path fill="none" stroke="black" d="M232.13,-458.73C282.79,-463.43 343.02,-469.03 383.37,-472.77"/>
+<polygon fill="black" stroke="black" points="383.16,-476.27 393.44,-473.71 383.8,-469.3 383.16,-476.27"/>
+</g>
+<!-- frp -->
+<g id="node6" class="node">
+<title>frp</title>
+<ellipse fill="none" stroke="black" cx="429.87" cy="-423" rx="27" ry="18"/>
+<text text-anchor="middle" x="429.87" y="-419.3" font-family="Times,serif" font-size="14.00">frp</text>
+</g>
+<!-- persistent&#45;&gt;frp -->
+<g id="edge3" class="edge">
+<title>persistent&#45;&gt;frp</title>
+<path fill="none" stroke="black" d="M232.13,-441.27C286.95,-436.18 352.96,-430.05 392.88,-426.34"/>
+<polygon fill="black" stroke="black" points="393.3,-429.82 402.93,-425.41 392.65,-422.85 393.3,-429.82"/>
+</g>
+<!-- bootconfig -->
+<g id="node7" class="node">
+<title>bootconfig</title>
+<g id="a_node7"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/implementing-bootconfig" xlink:title="bootconfig">
+<ellipse fill="none" stroke="black" cx="429.87" cy="-369" rx="48.99" ry="18"/>
+<text text-anchor="middle" x="429.87" y="-365.3" font-family="Times,serif" font-size="14.00">bootconfig</text>
+</a>
+</g>
+</g>
+<!-- persistent&#45;&gt;bootconfig -->
+<g id="edge4" class="edge">
+<title>persistent&#45;&gt;bootconfig</title>
+<path fill="none" stroke="black" d="M186.06,-433.83C220.43,-422.52 267.94,-407.44 310.28,-396 332,-390.13 356.18,-384.47 377.02,-379.86"/>
+<polygon fill="black" stroke="black" points="377.93,-383.24 386.95,-377.68 376.44,-376.4 377.93,-383.24"/>
+</g>
+<!-- product -->
+<g id="node20" class="node">
+<title>product</title>
+<g id="a_node20"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/product-partitions" xlink:title="product">
+<ellipse fill="none" stroke="green" cx="850.23" cy="-423" rx="37.89" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-419.3" font-family="Times,serif" font-size="14.00">product</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;product -->
+<g id="edge18" class="edge">
+<title>super&#45;&gt;product</title>
+<path fill="none" stroke="green" d="M700.65,-236.34C718.85,-239.31 741.46,-245.99 755.74,-261 798.54,-305.99 751.92,-348.35 791.74,-396 796.25,-401.4 802.09,-405.79 808.31,-409.33"/>
+<polygon fill="green" stroke="green" points="807.07,-412.62 817.59,-413.93 810.19,-406.35 807.07,-412.62"/>
+</g>
+<!-- system -->
+<g id="node21" class="node">
+<title>system</title>
+<g id="a_node21"><a xlink:href="https://source.android.com/docs/core/architecture" xlink:title="system">
+<ellipse fill="none" stroke="green" cx="850.23" cy="-369" rx="36" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-365.3" font-family="Times,serif" font-size="14.00">system</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;system -->
+<g id="edge19" class="edge">
+<title>super&#45;&gt;system</title>
+<path fill="none" stroke="green" d="M700.29,-237.19C718.1,-240.51 740.44,-247.27 755.74,-261 785.06,-287.32 764.17,-313.85 791.74,-342 796.99,-347.36 803.51,-351.78 810.26,-355.38"/>
+<polygon fill="green" stroke="green" points="808.97,-358.64 819.5,-359.79 811.99,-352.33 808.97,-358.64"/>
+</g>
+<!-- system_ext -->
+<g id="node22" class="node">
+<title>system_ext</title>
+<g id="a_node22"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/shared-system-image" xlink:title="system_ext">
+<ellipse fill="none" stroke="green" cx="850.23" cy="-315" rx="50.89" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-311.3" font-family="Times,serif" font-size="14.00">system_ext</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;system_ext -->
+<g id="edge20" class="edge">
+<title>super&#45;&gt;system_ext</title>
+<path fill="none" stroke="green" d="M699.03,-240.32C715.94,-244.81 737.75,-251.7 755.74,-261 773.51,-270.18 774.6,-277.7 791.74,-288 796.83,-291.06 802.31,-294.02 807.81,-296.8"/>
+<polygon fill="green" stroke="green" points="806.48,-300.04 817,-301.28 809.54,-293.75 806.48,-300.04"/>
+</g>
+<!-- system_dlkm -->
+<g id="node23" class="node">
+<title>system_dlkm</title>
+<g id="a_node23"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/gki-partitions" xlink:title="system_dlkm">
+<ellipse fill="none" stroke="green" cx="850.23" cy="-261" rx="58.49" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-257.3" font-family="Times,serif" font-size="14.00">system_dlkm</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;system_dlkm -->
+<g id="edge21" class="edge">
+<title>super&#45;&gt;system_dlkm</title>
+<path fill="none" stroke="green" d="M699.74,-238.28C723.21,-241.85 757.5,-247.06 787.33,-251.59"/>
+<polygon fill="green" stroke="green" points="786.83,-255.06 797.25,-253.1 787.89,-248.14 786.83,-255.06"/>
+</g>
+<!-- odm -->
+<g id="node24" class="node">
+<title>odm</title>
+<g id="a_node24"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/odm-partitions" xlink:title="odm">
+<ellipse fill="none" stroke="blue" cx="850.23" cy="-207" rx="27" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-203.3" font-family="Times,serif" font-size="14.00">odm</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;odm -->
+<g id="edge22" class="edge">
+<title>super&#45;&gt;odm</title>
+<path fill="none" stroke="blue" d="M699.74,-229.72C730.55,-225.04 780.02,-217.52 813.53,-212.43"/>
+<polygon fill="blue" stroke="blue" points="814.45,-215.83 823.81,-210.86 813.4,-208.91 814.45,-215.83"/>
+</g>
+<!-- vendor -->
+<g id="node25" class="node">
+<title>vendor</title>
+<g id="a_node25"><a xlink:href="https://source.android.com/docs/core/architecture" xlink:title="vendor">
+<ellipse fill="none" stroke="blue" cx="850.23" cy="-153" rx="35.19" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-149.3" font-family="Times,serif" font-size="14.00">vendor</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;vendor -->
+<g id="edge23" class="edge">
+<title>super&#45;&gt;vendor</title>
+<path fill="none" stroke="blue" d="M699.03,-227.68C715.94,-223.19 737.75,-216.3 755.74,-207 773.51,-197.82 774.6,-190.3 791.74,-180 798.58,-175.89 806.16,-171.94 813.5,-168.39"/>
+<polygon fill="blue" stroke="blue" points="815.03,-171.54 822.6,-164.13 812.06,-165.2 815.03,-171.54"/>
+</g>
+<!-- vendor_dlkm -->
+<g id="node26" class="node">
+<title>vendor_dlkm</title>
+<g id="a_node26"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-odm-dlkm-partition" xlink:title="vendor_dlkm">
+<ellipse fill="none" stroke="blue" cx="850.23" cy="-99" rx="57.69" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-95.3" font-family="Times,serif" font-size="14.00">vendor_dlkm</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;vendor_dlkm -->
+<g id="edge24" class="edge">
+<title>super&#45;&gt;vendor_dlkm</title>
+<path fill="none" stroke="blue" d="M700.29,-230.81C718.1,-227.49 740.44,-220.73 755.74,-207 785.06,-180.68 764.17,-154.15 791.74,-126 794.78,-122.9 798.24,-120.11 801.93,-117.62"/>
+<polygon fill="blue" stroke="blue" points="804.03,-120.44 810.84,-112.31 800.46,-114.42 804.03,-120.44"/>
+</g>
+<!-- odm_dlkm -->
+<g id="node27" class="node">
+<title>odm_dlkm</title>
+<g id="a_node27"><a xlink:href="https://source.android.com/docs/core/architecture/bootloader/partitions/vendor-odm-dlkm-partition" xlink:title="odm_dlkm">
+<ellipse fill="none" stroke="blue" cx="850.23" cy="-45" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="850.23" y="-41.3" font-family="Times,serif" font-size="14.00">odm_dlkm</text>
+</a>
+</g>
+</g>
+<!-- super&#45;&gt;odm_dlkm -->
+<g id="edge25" class="edge">
+<title>super&#45;&gt;odm_dlkm</title>
+<path fill="none" stroke="blue" d="M700.65,-231.66C718.85,-228.69 741.46,-222.01 755.74,-207 798.54,-162.01 751.92,-119.65 791.74,-72 794.93,-68.18 798.78,-64.87 802.96,-62.01"/>
+<polygon fill="blue" stroke="blue" points="805.07,-64.83 811.9,-56.74 801.51,-58.8 805.07,-64.83"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/assemble_cvd/flags.cc b/host/commands/assemble_cvd/flags.cc
index 609ea4b..c5ef034 100644
--- a/host/commands/assemble_cvd/flags.cc
+++ b/host/commands/assemble_cvd/flags.cc
@@ -1,6 +1,22 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "host/commands/assemble_cvd/flags.h"
 
 #include <android-base/logging.h>
+#include <android-base/parsebool.h>
 #include <android-base/parseint.h>
 #include <android-base/strings.h>
 #include <gflags/gflags.h>
@@ -20,15 +36,27 @@
 #include <unordered_map>
 
 #include <fruit/fruit.h>
+#include <google/protobuf/text_format.h>
 
-#include "common/libs/utils/environment.h"
+#include "launch_cvd.pb.h"
+
+#include "common/libs/utils/base64.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/network.h"
+#include "flags.h"
+#include "flags_defaults.h"
 #include "host/commands/assemble_cvd/alloc.h"
 #include "host/commands/assemble_cvd/boot_config.h"
+#include "host/commands/assemble_cvd/boot_image_utils.h"
 #include "host/commands/assemble_cvd/disk_flags.h"
+#include "host/commands/assemble_cvd/display_flags.h"
 #include "host/libs/config/config_flag.h"
+#include "host/libs/config/esp.h"
 #include "host/libs/config/host_tools_version.h"
+#include "host/libs/config/instance_nums.h"
+#include "host/libs/graphics_detector/graphics_configuration.h"
 #include "host/libs/graphics_detector/graphics_detector.h"
 #include "host/libs/vm_manager/crosvm_manager.h"
 #include "host/libs/vm_manager/gem5_manager.h"
@@ -42,294 +70,346 @@
 using google::FlagSettingMode::SET_FLAGS_DEFAULT;
 using google::FlagSettingMode::SET_FLAGS_VALUE;
 
-DEFINE_int32(cpus, 2, "Virtual CPU count.");
-DEFINE_string(data_policy, "use_existing", "How to handle userdata partition."
-            " Either 'use_existing', 'create_if_missing', 'resize_up_to', or "
-            "'always_create'.");
-DEFINE_int32(blank_data_image_mb, 0,
+#define DEFINE_vec DEFINE_string
+#define DEFINE_proto DEFINE_string
+#define GET_FLAG_STR_VALUE(name) GetFlagStrValueForInstances(FLAGS_ ##name, instances_size, #name, name_to_default_value)
+#define GET_FLAG_INT_VALUE(name) GetFlagIntValueForInstances(FLAGS_ ##name, instances_size, #name, name_to_default_value)
+#define GET_FLAG_BOOL_VALUE(name) GetFlagBoolValueForInstances(FLAGS_ ##name, instances_size, #name, name_to_default_value)
+
+DEFINE_proto(displays_textproto, CF_DEFAULTS_DISPLAYS_TEXTPROTO,
+              "Text Proto input for multi-vd multi-displays");
+DEFINE_proto(displays_binproto, CF_DEFAULTS_DISPLAYS_TEXTPROTO,
+              "Binary Proto input for multi-vd multi-displays");
+
+DEFINE_vec(cpus, std::to_string(CF_DEFAULTS_CPUS),
+              "Virtual CPU count.");
+DEFINE_vec(data_policy, CF_DEFAULTS_DATA_POLICY,
+              "How to handle userdata partition."
+              " Either 'use_existing', 'create_if_missing', 'resize_up_to', or "
+              "'always_create'.");
+DEFINE_vec(blank_data_image_mb,
+              std::to_string(CF_DEFAULTS_BLANK_DATA_IMAGE_MB),
              "The size of the blank data image to generate, MB.");
-DEFINE_int32(gdb_port, 0,
+DEFINE_vec(gdb_port, std::to_string(CF_DEFAULTS_GDB_PORT),
              "Port number to spawn kernel gdb on e.g. -gdb_port=1234. The"
              "kernel must have been built with CONFIG_RANDOMIZE_BASE "
              "disabled.");
 
-constexpr const char kDisplayHelp[] =
-    "Comma separated key=value pairs of display properties. Supported "
-    "properties:\n"
-    " 'width': required, width of the display in pixels\n"
-    " 'height': required, height of the display in pixels\n"
-    " 'dpi': optional, default 320, density of the display\n"
-    " 'refresh_rate_hz': optional, default 60, display refresh rate in Hertz\n"
-    ". Example usage: \n"
-    "--display0=width=1280,height=720\n"
-    "--display1=width=1440,height=900,dpi=480,refresh_rate_hz=30\n";
-
 // TODO(b/192495477): combine these into a single repeatable '--display' flag
 // when assemble_cvd switches to using the new flag parsing library.
-DEFINE_string(display0, "", kDisplayHelp);
-DEFINE_string(display1, "", kDisplayHelp);
-DEFINE_string(display2, "", kDisplayHelp);
-DEFINE_string(display3, "", kDisplayHelp);
+DEFINE_string(display0, CF_DEFAULTS_DISPLAY0, cuttlefish::kDisplayHelp);
+DEFINE_string(display1, CF_DEFAULTS_DISPLAY1, cuttlefish::kDisplayHelp);
+DEFINE_string(display2, CF_DEFAULTS_DISPLAY2, cuttlefish::kDisplayHelp);
+DEFINE_string(display3, CF_DEFAULTS_DISPLAY3, cuttlefish::kDisplayHelp);
 
 // TODO(b/171305898): mark these as deprecated after multi-display is fully
 // enabled.
-DEFINE_int32(x_res, 0, "Width of the screen in pixels");
-DEFINE_int32(y_res, 0, "Height of the screen in pixels");
-DEFINE_int32(dpi, 0, "Pixels per inch for the screen");
-DEFINE_int32(refresh_rate_hz, 60, "Screen refresh rate in Hertz");
-DEFINE_string(kernel_path, "",
+DEFINE_string(x_res, "0", "Width of the screen in pixels");
+DEFINE_string(y_res, "0", "Height of the screen in pixels");
+DEFINE_string(dpi, "0", "Pixels per inch for the screen");
+DEFINE_string(refresh_rate_hz, "60", "Screen refresh rate in Hertz");
+DEFINE_vec(kernel_path, CF_DEFAULTS_KERNEL_PATH,
               "Path to the kernel. Overrides the one from the boot image");
-DEFINE_string(initramfs_path, "", "Path to the initramfs");
-DEFINE_string(extra_kernel_cmdline, "",
+DEFINE_vec(initramfs_path, CF_DEFAULTS_INITRAMFS_PATH,
+              "Path to the initramfs");
+DEFINE_string(extra_kernel_cmdline, CF_DEFAULTS_EXTRA_KERNEL_CMDLINE,
               "Additional flags to put on the kernel command line");
-DEFINE_string(extra_bootconfig_args, "",
+DEFINE_string(extra_bootconfig_args, CF_DEFAULTS_EXTRA_BOOTCONFIG_ARGS,
               "Space-separated list of extra bootconfig args. "
               "Note: overwriting an existing bootconfig argument "
               "requires ':=' instead of '='.");
-DEFINE_bool(guest_enforce_security, true,
+DEFINE_vec(guest_enforce_security,
+              cuttlefish::BoolToString(CF_DEFAULTS_GUEST_ENFORCE_SECURITY),
             "Whether to run in enforcing mode (non permissive).");
-DEFINE_int32(memory_mb, 0, "Total amount of memory available for guest, MB.");
-DEFINE_string(serial_number, cuttlefish::ForCurrentInstance("CUTTLEFISHCVD"),
+DEFINE_vec(memory_mb, std::to_string(CF_DEFAULTS_MEMORY_MB),
+             "Total amount of memory available for guest, MB.");
+DEFINE_vec(serial_number, CF_DEFAULTS_SERIAL_NUMBER,
               "Serial number to use for the device");
-DEFINE_bool(use_random_serial, false,
+DEFINE_vec(use_random_serial, cuttlefish::BoolToString(CF_DEFAULTS_USE_RANDOM_SERIAL),
             "Whether to use random serial for the device.");
-DEFINE_string(vm_manager, "",
+DEFINE_vec(vm_manager, CF_DEFAULTS_VM_MANAGER,
               "What virtual machine manager to use, one of {qemu_cli, crosvm}");
-DEFINE_string(gpu_mode, cuttlefish::kGpuModeAuto,
+DEFINE_vec(gpu_mode, CF_DEFAULTS_GPU_MODE,
               "What gpu configuration to use, one of {auto, drm_virgl, "
               "gfxstream, guest_swiftshader}");
-DEFINE_string(hwcomposer, cuttlefish::kHwComposerAuto,
+DEFINE_vec(hwcomposer, CF_DEFAULTS_HWCOMPOSER,
               "What hardware composer to use, one of {auto, drm, ranchu} ");
-DEFINE_string(gpu_capture_binary, "",
+DEFINE_vec(gpu_capture_binary, CF_DEFAULTS_GPU_CAPTURE_BINARY,
               "Path to the GPU capture binary to use when capturing GPU traces"
               "(ngfx, renderdoc, etc)");
-DEFINE_bool(enable_gpu_udmabuf,
-            false,
-            "Use the udmabuf driver for zero-copy virtio-gpu");
+DEFINE_vec(enable_gpu_udmabuf,
+           cuttlefish::BoolToString(CF_DEFAULTS_ENABLE_GPU_UDMABUF),
+           "Use the udmabuf driver for zero-copy virtio-gpu");
 
-DEFINE_bool(enable_gpu_angle,
-            false,
-            "Use ANGLE to provide GLES implementation (always true for"
-            " guest_swiftshader");
-DEFINE_bool(deprecated_boot_completed, false, "Log boot completed message to"
-            " host kernel. This is only used during transition of our clients."
-            " Will be deprecated soon.");
-
-DEFINE_bool(use_allocd, false,
+DEFINE_vec(use_allocd, CF_DEFAULTS_USE_ALLOCD?"true":"false",
             "Acquire static resources from the resource allocator daemon.");
-DEFINE_bool(enable_minimal_mode, false,
-            "Only enable the minimum features to boot a cuttlefish device and "
-            "support minimal UI interactions.\nNote: Currently only supports "
-            "handheld/phone targets");
-DEFINE_bool(pause_in_bootloader, false,
-            "Stop the bootflow in u-boot. You can continue the boot by connecting "
-            "to the device console and typing in \"boot\".");
-DEFINE_bool(enable_host_bluetooth, true,
+DEFINE_vec(
+    enable_minimal_mode, CF_DEFAULTS_ENABLE_MINIMAL_MODE ? "true" : "false",
+    "Only enable the minimum features to boot a cuttlefish device and "
+    "support minimal UI interactions.\nNote: Currently only supports "
+    "handheld/phone targets");
+DEFINE_vec(
+    pause_in_bootloader, CF_DEFAULTS_PAUSE_IN_BOOTLOADER?"true":"false",
+    "Stop the bootflow in u-boot. You can continue the boot by connecting "
+    "to the device console and typing in \"boot\".");
+DEFINE_bool(enable_host_bluetooth, CF_DEFAULTS_ENABLE_HOST_BLUETOOTH,
             "Enable the root-canal which is Bluetooth emulator in the host.");
+DEFINE_int32(
+    rootcanal_instance_num, CF_DEFAULTS_ENABLE_ROOTCANAL_INSTANCE_NUM,
+    "If it is greater than 0, use an existing rootcanal instance which is "
+    "launched from cuttlefish instance "
+    "with rootcanal_instance_num. Else, launch a new rootcanal instance");
+DEFINE_string(rootcanal_args, CF_DEFAULTS_ROOTCANAL_ARGS,
+              "Space-separated list of rootcanal args. ");
+DEFINE_bool(enable_host_uwb, CF_DEFAULTS_ENABLE_HOST_UWB,
+            "Enable Pica in the host.");
+DEFINE_int32(
+    pica_instance_num, CF_DEFAULTS_ENABLE_PICA_INSTANCE_NUM,
+    "If it is greater than 0, use an existing pica instance which is "
+    "launched from cuttlefish instance "
+    "with pica_instance_num. Else, launch a new pica instance");
+DEFINE_bool(netsim, CF_DEFAULTS_NETSIM,
+            "[Experimental] Connect all radios to netsim.");
+
+DEFINE_bool(netsim_bt, CF_DEFAULTS_NETSIM_BT,
+            "[Experimental] Connect Bluetooth radio to netsim.");
 
 DEFINE_string(bluetooth_controller_properties_file,
-              "etc/rootcanal/data/controller_properties.json",
+              CF_DEFAULTS_BLUETOOTH_CONTROLLER_PROPERTIES_FILE,
               "The configuartion file path for root-canal which is a Bluetooth "
               "emulator.");
 DEFINE_string(
-    bluetooth_default_commands_file, "etc/rootcanal/data/default_commands",
+    bluetooth_default_commands_file,
+    CF_DEFAULTS_BLUETOOTH_DEFAULT_COMMANDS_FILE,
     "The default commands which root-canal executes when it launches.");
 
 /**
- *
  * crosvm sandbox feature requires /var/empty and seccomp directory
  *
- * --enable-sandbox: will enforce the sandbox feature
- *                   failing to meet the requirements result in assembly_cvd termination
- *
- * --enable-sandbox=no, etc: will disable sandbox
- *
- * no option given: it is enabled if /var/empty exists and an empty directory
- *                             or if it does not exist and can be created
- *
- * if seccomp dir doesn't exist, assembly_cvd will terminate
- *
- * See SetDefaultFlagsForCrosvm()
- *
+ * Also see SetDefaultFlagsForCrosvm()
  */
-DEFINE_bool(enable_sandbox,
-            false,
-            "Enable crosvm sandbox. Use this when you are sure about what you are doing.");
-
-static const std::string kSeccompDir =
-    std::string("usr/share/crosvm/") + cuttlefish::HostArchStr() + "-linux-gnu/seccomp";
-DEFINE_string(seccomp_policy_dir, DefaultHostArtifactsPath(kSeccompDir),
-              "With sandbox'ed crosvm, overrieds the security comp policy directory");
-
-DEFINE_bool(start_webrtc, false, "Whether to start the webrtc process.");
+DEFINE_vec(
+    enable_sandbox, cuttlefish::BoolToString(CF_DEFAULTS_ENABLE_SANDBOX),
+    "Enable crosvm sandbox assuming /var/empty and seccomp directories exist. "
+    "--noenable-sandbox will disable crosvm sandbox. "
+    "When no option is given, sandbox is disabled if Cuttlefish is running "
+    "inside a container, or if GPU is enabled (b/152323505), "
+    "or if the empty /var/empty directory either does not exist and "
+    "cannot be created. Otherwise, sandbox is enabled on the supported "
+    "architecture when no option is given.");
 
 DEFINE_string(
-        webrtc_assets_dir, DefaultHostArtifactsPath("usr/share/webrtc/assets"),
-        "[Experimental] Path to WebRTC webpage assets.");
+    seccomp_policy_dir, CF_DEFAULTS_SECCOMP_POLICY_DIR,
+    "With sandbox'ed crosvm, overrieds the security comp policy directory");
 
-DEFINE_string(
-        webrtc_certs_dir, DefaultHostArtifactsPath("usr/share/webrtc/certs"),
-        "[Experimental] Path to WebRTC certificates directory.");
+DEFINE_vec(start_webrtc, cuttlefish::BoolToString(CF_DEFAULTS_START_WEBRTC),
+            "Whether to start the webrtc process.");
 
-DEFINE_string(
-        webrtc_public_ip,
-        "0.0.0.0",
-        "[Deprecated] Ignored, webrtc can figure out its IP address");
+DEFINE_vec(webrtc_assets_dir, CF_DEFAULTS_WEBRTC_ASSETS_DIR,
+              "[Experimental] Path to WebRTC webpage assets.");
 
-DEFINE_bool(
-        webrtc_enable_adb_websocket,
-        false,
-        "[Experimental] If enabled, exposes local adb service through a websocket.");
+DEFINE_string(webrtc_certs_dir, CF_DEFAULTS_WEBRTC_CERTS_DIR,
+              "[Experimental] Path to WebRTC certificates directory.");
 
 static constexpr auto HOST_OPERATOR_SOCKET_PATH = "/run/cuttlefish/operator";
 
 DEFINE_bool(
     // The actual default for this flag is set with SetCommandLineOption() in
-    // GetKernelConfigsAndSetDefaults() at the end of this file.
-    start_webrtc_sig_server, true,
+    // GetGuestConfigsAndSetDefaults() at the end of this file.
+    start_webrtc_sig_server, CF_DEFAULTS_START_WEBRTC_SIG_SERVER,
     "Whether to start the webrtc signaling server. This option only applies to "
     "the first instance, if multiple instances are launched they'll share the "
     "same signaling server, which is owned by the first one.");
 
-DEFINE_string(webrtc_sig_server_addr, "",
+DEFINE_string(webrtc_sig_server_addr, CF_DEFAULTS_WEBRTC_SIG_SERVER_ADDR,
               "The address of the webrtc signaling server.");
 
 DEFINE_int32(
-    webrtc_sig_server_port, 443,
+    webrtc_sig_server_port, CF_DEFAULTS_WEBRTC_SIG_SERVER_PORT,
     "The port of the signaling server if started outside of this launch. If "
     "-start_webrtc_sig_server is given it will choose 8443+instance_num1-1 and "
     "this parameter is ignored.");
 
 // TODO (jemoreira): We need a much bigger range to reliably support several
 // simultaneous connections.
-DEFINE_string(tcp_port_range, "15550:15558",
+DEFINE_vec(tcp_port_range, CF_DEFAULTS_TCP_PORT_RANGE,
               "The minimum and maximum TCP port numbers to allocate for ICE "
               "candidates as 'min:max'. To use any port just specify '0:0'");
 
-DEFINE_string(udp_port_range, "15550:15558",
+DEFINE_vec(udp_port_range, CF_DEFAULTS_UDP_PORT_RANGE,
               "The minimum and maximum UDP port numbers to allocate for ICE "
               "candidates as 'min:max'. To use any port just specify '0:0'");
 
-DEFINE_string(webrtc_sig_server_path, "/register_device",
+DEFINE_string(webrtc_sig_server_path, CF_DEFAULTS_WEBRTC_SIG_SERVER_PATH,
               "The path section of the URL where the device should be "
               "registered with the signaling server.");
 
-DEFINE_bool(webrtc_sig_server_secure, true,
-            "Whether the WebRTC signaling server uses secure protocols (WSS vs WS).");
+DEFINE_bool(
+    webrtc_sig_server_secure, CF_DEFAULTS_WEBRTC_SIG_SERVER_SECURE,
+    "Whether the WebRTC signaling server uses secure protocols (WSS vs WS).");
 
-DEFINE_bool(verify_sig_server_certificate, false,
+DEFINE_bool(verify_sig_server_certificate,
+            CF_DEFAULTS_VERIFY_SIG_SERVER_CERTIFICATE,
             "Whether to verify the signaling server's certificate with a "
             "trusted signing authority (Disallow self signed certificates). "
             "This is ignored if an insecure server is configured.");
 
-DEFINE_string(sig_server_headers_file, "",
-              "Path to a file containing HTTP headers to be included in the "
-              "connection to the signaling server. Each header should be on a "
-              "line by itself in the form <name>: <value>");
-
-DEFINE_string(
-    webrtc_device_id, "cvd-{num}",
+DEFINE_vec(
+    webrtc_device_id, CF_DEFAULTS_WEBRTC_DEVICE_ID,
     "The for the device to register with the signaling server. Every "
     "appearance of the substring '{num}' in the device id will be substituted "
     "with the instance number to support multiple instances");
 
-DEFINE_string(uuid, cuttlefish::ForCurrentInstance(cuttlefish::kDefaultUuidPrefix),
+DEFINE_vec(uuid, CF_DEFAULTS_UUID,
               "UUID to use for the device. Random if not specified");
-DEFINE_bool(daemon, false,
+DEFINE_vec(daemon, CF_DEFAULTS_DAEMON?"true":"false",
             "Run cuttlefish in background, the launcher exits on boot "
             "completed/failed");
 
-DEFINE_string(setupwizard_mode, "DISABLED",
-            "One of DISABLED,OPTIONAL,REQUIRED");
+DEFINE_vec(setupwizard_mode, CF_DEFAULTS_SETUPWIZARD_MODE,
+              "One of DISABLED,OPTIONAL,REQUIRED");
+DEFINE_vec(enable_bootanimation,
+           cuttlefish::BoolToString(CF_DEFAULTS_ENABLE_BOOTANIMATION),
+           "Whether to enable the boot animation.");
 
-DEFINE_string(qemu_binary_dir, "/usr/bin",
+DEFINE_string(qemu_binary_dir, CF_DEFAULTS_QEMU_BINARY_DIR,
               "Path to the directory containing the qemu binary to use");
-DEFINE_string(crosvm_binary, HostBinaryPath("crosvm"),
+DEFINE_string(crosvm_binary, CF_DEFAULTS_CROSVM_BINARY,
               "The Crosvm binary to use");
-DEFINE_string(gem5_binary_dir, HostBinaryPath("gem5"),
+DEFINE_vec(gem5_binary_dir, CF_DEFAULTS_GEM5_BINARY_DIR,
               "Path to the gem5 build tree root");
-DEFINE_bool(restart_subprocesses, true, "Restart any crashed host process");
-DEFINE_bool(enable_vehicle_hal_grpc_server, true, "Enables the vehicle HAL "
-            "emulation gRPC server on the host");
-DEFINE_string(bootloader, "", "Bootloader binary path");
-DEFINE_string(boot_slot, "", "Force booting into the given slot. If empty, "
-             "the slot will be chosen based on the misc partition if using a "
-             "bootloader. It will default to 'a' if empty and not using a "
-             "bootloader.");
-DEFINE_int32(num_instances, 1, "Number of Android guests to launch");
-DEFINE_string(report_anonymous_usage_stats, "", "Report anonymous usage "
-            "statistics for metrics collection and analysis.");
-DEFINE_string(ril_dns, "8.8.8.8", "DNS address of mobile network (RIL)");
-DEFINE_bool(kgdb, false, "Configure the virtual device for debugging the kernel "
-                         "with kgdb/kdb. The kernel must have been built with "
-                         "kgdb support, and serial console must be enabled.");
+DEFINE_vec(gem5_checkpoint_dir, CF_DEFAULTS_GEM5_CHECKPOINT_DIR,
+              "Path to the gem5 restore checkpoint directory");
+DEFINE_vec(gem5_debug_file, CF_DEFAULTS_GEM5_DEBUG_FILE,
+              "The file name where gem5 saves debug prints and logs");
+DEFINE_string(gem5_debug_flags, CF_DEFAULTS_GEM5_DEBUG_FLAGS,
+              "The debug flags gem5 uses to print debugs to file");
 
-DEFINE_bool(start_gnss_proxy, false, "Whether to start the gnss proxy.");
+DEFINE_vec(restart_subprocesses,
+              cuttlefish::BoolToString(CF_DEFAULTS_RESTART_SUBPROCESSES),
+              "Restart any crashed host process");
+DEFINE_vec(bootloader, CF_DEFAULTS_BOOTLOADER, "Bootloader binary path");
+DEFINE_vec(boot_slot, CF_DEFAULTS_BOOT_SLOT,
+              "Force booting into the given slot. If empty, "
+              "the slot will be chosen based on the misc partition if using a "
+              "bootloader. It will default to 'a' if empty and not using a "
+              "bootloader.");
+DEFINE_int32(num_instances, CF_DEFAULTS_NUM_INSTANCES,
+             "Number of Android guests to launch");
+DEFINE_string(instance_nums, CF_DEFAULTS_INSTANCE_NUMS,
+              "A comma-separated list of instance numbers "
+              "to use. Mutually exclusive with base_instance_num.");
+DEFINE_string(report_anonymous_usage_stats,
+              CF_DEFAULTS_REPORT_ANONYMOUS_USAGE_STATS,
+              "Report anonymous usage "
+              "statistics for metrics collection and analysis.");
+DEFINE_vec(ril_dns, CF_DEFAULTS_RIL_DNS,
+              "DNS address of mobile network (RIL)");
+DEFINE_vec(kgdb, cuttlefish::BoolToString(CF_DEFAULTS_KGDB),
+            "Configure the virtual device for debugging the kernel "
+            "with kgdb/kdb. The kernel must have been built with "
+            "kgdb support, and serial console must be enabled.");
 
-DEFINE_string(gnss_file_path, "",
-              "Local gnss file path for the gnss proxy");
+DEFINE_vec(start_gnss_proxy, cuttlefish::BoolToString(CF_DEFAULTS_START_GNSS_PROXY),
+            "Whether to start the gnss proxy.");
+
+DEFINE_vec(gnss_file_path, CF_DEFAULTS_GNSS_FILE_PATH,
+              "Local gnss raw measurement file path for the gnss proxy");
+
+DEFINE_vec(fixed_location_file_path, CF_DEFAULTS_FIXED_LOCATION_FILE_PATH,
+              "Local fixed location file path for the gnss proxy");
 
 // by default, this modem-simulator is disabled
-DEFINE_bool(enable_modem_simulator, true,
-            "Enable the modem simulator to process RILD AT commands");
+DEFINE_vec(enable_modem_simulator,
+              CF_DEFAULTS_ENABLE_MODEM_SIMULATOR ? "true" : "false",
+              "Enable the modem simulator to process RILD AT commands");
 // modem_simulator_sim_type=2 for test CtsCarrierApiTestCases
-DEFINE_int32(modem_simulator_sim_type, 1,
-             "Sim type: 1 for normal, 2 for CtsCarrierApiTestCases");
+DEFINE_vec(modem_simulator_sim_type,
+              std::to_string(CF_DEFAULTS_MODEM_SIMULATOR_SIM_TYPE),
+              "Sim type: 1 for normal, 2 for CtsCarrierApiTestCases");
 
-DEFINE_bool(console, false, "Enable the serial console");
+DEFINE_vec(console, cuttlefish::BoolToString(CF_DEFAULTS_CONSOLE),
+              "Enable the serial console");
 
-DEFINE_bool(vhost_net, false, "Enable vhost acceleration of networking");
+DEFINE_vec(enable_kernel_log,
+           cuttlefish::BoolToString(CF_DEFAULTS_ENABLE_KERNEL_LOG),
+            "Enable kernel console/dmesg logging");
+
+DEFINE_vec(vhost_net, cuttlefish::BoolToString(CF_DEFAULTS_VHOST_NET),
+            "Enable vhost acceleration of networking");
 
 DEFINE_string(
-    vhost_user_mac80211_hwsim, "",
+    vhost_user_mac80211_hwsim, CF_DEFAULTS_VHOST_USER_MAC80211_HWSIM,
     "Unix socket path for vhost-user of mac80211_hwsim, typically served by "
     "wmediumd. You can set this when using an external wmediumd instance.");
-DEFINE_string(wmediumd_config, "",
+DEFINE_string(wmediumd_config, CF_DEFAULTS_WMEDIUMD_CONFIG,
               "Path to the wmediumd config file. When missing, the default "
               "configuration is used which adds MAC addresses for up to 16 "
               "cuttlefish instances including AP.");
-DEFINE_string(ap_rootfs_image,
-              DefaultHostArtifactsPath("etc/openwrt/images/openwrt_rootfs"),
+
+DEFINE_string(ap_rootfs_image, CF_DEFAULTS_AP_ROOTFS_IMAGE,
               "rootfs image for AP instance");
-DEFINE_string(ap_kernel_image,
-              DefaultHostArtifactsPath("etc/openwrt/images/kernel_for_openwrt"),
+DEFINE_string(ap_kernel_image, CF_DEFAULTS_AP_KERNEL_IMAGE,
               "kernel image for AP instance");
 
-DEFINE_bool(record_screen, false, "Enable screen recording. "
-                                  "Requires --start_webrtc");
+DEFINE_vec(record_screen, cuttlefish::BoolToString(CF_DEFAULTS_RECORD_SCREEN),
+           "Enable screen recording. "
+           "Requires --start_webrtc");
 
-DEFINE_bool(smt, false, "Enable simultaneous multithreading (SMT/HT)");
+DEFINE_vec(smt, cuttlefish::BoolToString(CF_DEFAULTS_SMT),
+           "Enable simultaneous multithreading (SMT/HT)");
 
-DEFINE_int32(vsock_guest_cid,
-             cuttlefish::GetDefaultVsockCid(),
-             "vsock_guest_cid is used to determine the guest vsock cid as well as all the ports"
-             "of all vsock servers such as tombstone or modem simulator(s)."
-             "The vsock ports and guest vsock cid are a function of vsock_guest_cid and instance number."
-             "An instance number of i th instance is determined by --num_instances=N and --base_instance_num=B"
-             "The instance number of i th instance is B + i where i in [0, N-1] and B >= 1."
-             "See --num_instances, and --base_instance_num for more information"
-             "If --vsock_guest_cid=C is given and C >= 3, the guest vsock cid is C + i. Otherwise,"
-             "the guest vsock cid is 2 + instance number, which is 2 + (B + i)."
-             "If --vsock_guest_cid is not given, each vsock server port number for i th instance is"
-             "base + instance number - 1. vsock_guest_cid is by default B + i + 2."
-             "Thus, by default, each port is base + vsock_guest_cid - 3."
-             "The same formula holds when --vsock_guest_cid=C is given, for algorithm's sake."
-             "Each vsock server port number is base + C - 3.");
+DEFINE_vec(
+    vsock_guest_cid, std::to_string(CF_DEFAULTS_VSOCK_GUEST_CID),
+    "vsock_guest_cid is used to determine the guest vsock cid as well as all "
+    "the ports"
+    "of all vsock servers such as tombstone or modem simulator(s)."
+    "The vsock ports and guest vsock cid are a function of vsock_guest_cid and "
+    "instance number."
+    "An instance number of i th instance is determined by --num_instances=N "
+    "and --base_instance_num=B"
+    "The instance number of i th instance is B + i where i in [0, N-1] and B "
+    ">= 1."
+    "See --num_instances, and --base_instance_num for more information"
+    "If --vsock_guest_cid=C is given and C >= 3, the guest vsock cid is C + i. "
+    "Otherwise,"
+    "the guest vsock cid is 2 + instance number, which is 2 + (B + i)."
+    "If --vsock_guest_cid is not given, each vsock server port number for i th "
+    "instance is"
+    "base + instance number - 1. vsock_guest_cid is by default B + i + 2."
+    "Thus, by default, each port is base + vsock_guest_cid - 3."
+    "The same formula holds when --vsock_guest_cid=C is given, for algorithm's "
+    "sake."
+    "Each vsock server port number is base + C - 3.");
 
-DEFINE_string(secure_hals, "keymint,gatekeeper",
+DEFINE_string(secure_hals, CF_DEFAULTS_SECURE_HALS,
               "Which HALs to use enable host security features for. Supports "
               "keymint and gatekeeper at the moment.");
 
-DEFINE_bool(use_sdcard, true, "Create blank SD-Card image and expose to guest");
+DEFINE_vec(use_sdcard, CF_DEFAULTS_USE_SDCARD?"true":"false",
+            "Create blank SD-Card image and expose to guest");
 
-DEFINE_bool(protected_vm, false, "Boot in Protected VM mode");
+DEFINE_vec(protected_vm, cuttlefish::BoolToString(CF_DEFAULTS_PROTECTED_VM),
+            "Boot in Protected VM mode");
 
-DEFINE_bool(enable_audio, cuttlefish::HostArch() != cuttlefish::Arch::Arm64,
+DEFINE_vec(mte, cuttlefish::BoolToString(CF_DEFAULTS_MTE), "Enable MTE");
+
+DEFINE_vec(enable_audio, cuttlefish::BoolToString(CF_DEFAULTS_ENABLE_AUDIO),
             "Whether to play or capture audio");
 
-DEFINE_uint32(camera_server_port, 0, "camera vsock port");
+DEFINE_vec(camera_server_port, std::to_string(CF_DEFAULTS_CAMERA_SERVER_PORT),
+              "camera vsock port");
 
-DEFINE_string(userdata_format, "f2fs", "The userdata filesystem format");
+DEFINE_vec(userdata_format, CF_DEFAULTS_USERDATA_FORMAT,
+              "The userdata filesystem format");
+
+DEFINE_bool(use_overlay, CF_DEFAULTS_USE_OVERLAY,
+            "Capture disk writes an overlay. This is a "
+            "prerequisite for powerwash_cvd or multiple instances.");
+
+DEFINE_vec(modem_simulator_count,
+              std::to_string(CF_DEFAULTS_MODEM_SIMULATOR_COUNT),
+              "Modem simulator count corresponding to maximum sim number");
 
 DECLARE_string(assembly_dir);
 DECLARE_string(boot_image);
@@ -361,122 +441,338 @@
   return stream.str();
 }
 
-std::optional<CuttlefishConfig::DisplayConfig> ParseDisplayConfig(
-    const std::string& flag) {
-  if (flag.empty()) {
-    return std::nullopt;
-  }
-
-  std::unordered_map<std::string, std::string> props;
-
-  const std::vector<std::string> pairs = android::base::Split(flag, ",");
-  for (const std::string& pair : pairs) {
-    const std::vector<std::string> keyvalue = android::base::Split(pair, "=");
-    CHECK_EQ(2, keyvalue.size()) << "Invalid display: " << flag;
-
-    const std::string& prop_key = keyvalue[0];
-    const std::string& prop_val = keyvalue[1];
-    props[prop_key] = prop_val;
-  }
-
-  CHECK(props.find("width") != props.end())
-      << "Display configuration missing 'width' in " << flag;
-  CHECK(props.find("height") != props.end())
-      << "Display configuration missing 'height' in " << flag;
-
-  int display_width;
-  CHECK(android::base::ParseInt(props["width"], &display_width))
-      << "Display configuration invalid 'width' in " << flag;
-
-  int display_height;
-  CHECK(android::base::ParseInt(props["height"], &display_height))
-      << "Display configuration invalid 'height' in " << flag;
-
-  int display_dpi = 320;
-  auto display_dpi_it = props.find("dpi");
-  if (display_dpi_it != props.end()) {
-    CHECK(android::base::ParseInt(display_dpi_it->second, &display_dpi))
-        << "Display configuration invalid 'dpi' in " << flag;
-  }
-
-  int display_refresh_rate_hz = 60;
-  auto display_refresh_rate_hz_it = props.find("refresh_rate_hz");
-  if (display_refresh_rate_hz_it != props.end()) {
-    CHECK(android::base::ParseInt(display_refresh_rate_hz_it->second,
-                                  &display_refresh_rate_hz))
-        << "Display configuration invalid 'refresh_rate_hz' in " << flag;
-  }
-
-  return CuttlefishConfig::DisplayConfig{
-      .width = display_width,
-      .height = display_height,
-      .dpi = display_dpi,
-      .refresh_rate_hz = display_refresh_rate_hz,
-  };
-}
-
 #ifdef __ANDROID__
-Result<KernelConfig> ReadKernelConfig() {
-  // QEMU isn't on Android, so always follow host arch
-  KernelConfig ret{};
-  ret.target_arch = HostArch();
-  ret.bootconfig_supported = true;
-  return ret;
+Result<std::vector<GuestConfig>> ReadGuestConfig() {
+  std::vector<GuestConfig> rets;
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  for (int instance_index = 0; instance_index < instance_nums.size(); instance_index++) {
+    // QEMU isn't on Android, so always follow host arch
+    GuestConfig ret{};
+    ret.target_arch = HostArch();
+    ret.bootconfig_supported = true;
+    ret.android_version_number = "0.0.0";
+    rets.push_back(ret);
+  }
+  return rets;
 }
 #else
-Result<KernelConfig> ReadKernelConfig() {
-  // extract-ikconfig can be called directly on the boot image since it looks
-  // for the ikconfig header in the image before extracting the config list.
-  // This code is liable to break if the boot image ever includes the
-  // ikconfig header outside the kernel.
-  const std::string kernel_image_path =
-      FLAGS_kernel_path.size() ? FLAGS_kernel_path : FLAGS_boot_image;
-
-  Command ikconfig_cmd(HostBinaryPath("extract-ikconfig"));
-  ikconfig_cmd.AddParameter(kernel_image_path);
+Result<std::vector<GuestConfig>> ReadGuestConfig() {
+  std::vector<GuestConfig> guest_configs;
+  std::vector<std::string> boot_image =
+      android::base::Split(FLAGS_boot_image, ",");
+  std::vector<std::string> kernel_path =
+      android::base::Split(FLAGS_kernel_path, ",");
+  std::string kernel_image_path = "";
+  std::string cur_boot_image;
+  std::string cur_kernel_path;
 
   std::string current_path = StringFromEnv("PATH", "");
   std::string bin_folder = DefaultHostArtifactsPath("bin");
-  ikconfig_cmd.SetEnvironment({"PATH=" + current_path + ":" + bin_folder});
+  std::string new_path = "PATH=";
+  new_path += current_path;
+  new_path += ":";
+  new_path += bin_folder;
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  for (int instance_index = 0; instance_index < instance_nums.size(); instance_index++) {
+    // extract-ikconfig can be called directly on the boot image since it looks
+    // for the ikconfig header in the image before extracting the config list.
+    // This code is liable to break if the boot image ever includes the
+    // ikconfig header outside the kernel.
+    cur_kernel_path = "";
+    if (instance_index < kernel_path.size()) {
+      cur_kernel_path = kernel_path[instance_index];
+    }
 
-  std::string ikconfig_path =
-      StringFromEnv("TEMP", "/tmp") + "/ikconfig.XXXXXX";
-  auto ikconfig_fd = SharedFD::Mkstemp(&ikconfig_path);
-  CF_EXPECT(ikconfig_fd->IsOpen(),
-            "Unable to create ikconfig file: " << ikconfig_fd->StrError());
-  ikconfig_cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, ikconfig_fd);
+    cur_boot_image = "";
+    if (instance_index < boot_image.size()) {
+      cur_boot_image = boot_image[instance_index];
+    }
 
-  auto ikconfig_proc = ikconfig_cmd.Start();
-  CF_EXPECT(ikconfig_proc.Started() && ikconfig_proc.Wait() == 0,
-            "Failed to extract ikconfig from " << kernel_image_path);
+    if (cur_kernel_path.size() > 0) {
+      kernel_image_path = cur_kernel_path;
+    } else if (cur_boot_image.size() > 0) {
+      kernel_image_path = cur_boot_image;
+    }
 
-  std::string config = ReadFile(ikconfig_path);
+    Command ikconfig_cmd(HostBinaryPath("extract-ikconfig"));
+    ikconfig_cmd.AddParameter(kernel_image_path);
+    ikconfig_cmd.SetEnvironment({new_path});
 
-  KernelConfig kernel_config;
-  if (config.find("\nCONFIG_ARM=y") != std::string::npos) {
-    kernel_config.target_arch = Arch::Arm;
-  } else if (config.find("\nCONFIG_ARM64=y") != std::string::npos) {
-    kernel_config.target_arch = Arch::Arm64;
-  } else if (config.find("\nCONFIG_X86_64=y") != std::string::npos) {
-    kernel_config.target_arch = Arch::X86_64;
-  } else if (config.find("\nCONFIG_X86=y") != std::string::npos) {
-    kernel_config.target_arch = Arch::X86;
-  } else {
-    return CF_ERR("Unknown target architecture");
+    std::string ikconfig_path =
+        StringFromEnv("TEMP", "/tmp") + "/ikconfig.XXXXXX";
+    auto ikconfig_fd = SharedFD::Mkstemp(&ikconfig_path);
+    CF_EXPECT(ikconfig_fd->IsOpen(),
+              "Unable to create ikconfig file: " << ikconfig_fd->StrError());
+    ikconfig_cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, ikconfig_fd);
+
+    auto ikconfig_proc = ikconfig_cmd.Start();
+    CF_EXPECT(ikconfig_proc.Started() && ikconfig_proc.Wait() == 0,
+              "Failed to extract ikconfig from " << kernel_image_path);
+
+    std::string config = ReadFile(ikconfig_path);
+
+    GuestConfig guest_config;
+    if (config.find("\nCONFIG_ARM=y") != std::string::npos) {
+      guest_config.target_arch = Arch::Arm;
+    } else if (config.find("\nCONFIG_ARM64=y") != std::string::npos) {
+      guest_config.target_arch = Arch::Arm64;
+    } else if (config.find("\nCONFIG_ARCH_RV64I=y") != std::string::npos) {
+      guest_config.target_arch = Arch::RiscV64;
+    } else if (config.find("\nCONFIG_X86_64=y") != std::string::npos) {
+      guest_config.target_arch = Arch::X86_64;
+    } else if (config.find("\nCONFIG_X86=y") != std::string::npos) {
+      guest_config.target_arch = Arch::X86;
+    } else {
+      return CF_ERR("Unknown target architecture");
+    }
+    guest_config.bootconfig_supported =
+        config.find("\nCONFIG_BOOT_CONFIG=y") != std::string::npos;
+    // Once all Cuttlefish kernel versions are at least 5.15, this code can be
+    // removed. CONFIG_CRYPTO_HCTR2=y will always be set.
+    guest_config.hctr2_supported =
+        config.find("\nCONFIG_CRYPTO_HCTR2=y") != std::string::npos;
+
+    unlink(ikconfig_path.c_str());
+    guest_config.android_version_number =
+        CF_EXPECT(ReadAndroidVersionFromBootImage(cur_boot_image),
+                  "Failed to read guest's android version");
+    ;
+    guest_configs.push_back(guest_config);
   }
-  kernel_config.bootconfig_supported =
-      config.find("\nCONFIG_BOOT_CONFIG=y") != std::string::npos;
-
-  unlink(ikconfig_path.c_str());
-  return kernel_config;
+  return guest_configs;
 }
+
 #endif  // #ifdef __ANDROID__
 
+template <typename ProtoType>
+Result<ProtoType> ParseTextProtoFlagHelper(const std::string& flag_value,
+                                       const std::string& flag_name) {
+  ProtoType proto_result;
+  google::protobuf::TextFormat::Parser p;
+  CF_EXPECT(p.ParseFromString(flag_value, &proto_result),
+            "Failed to parse: " << flag_name << ", value: " << flag_value);
+  return proto_result;
+}
+
+template <typename ProtoType>
+Result<ProtoType> ParseBinProtoFlagHelper(const std::string& flag_value,
+                                       const std::string& flag_name) {
+  ProtoType proto_result;
+  std::vector<uint8_t> output;
+  CF_EXPECT(DecodeBase64(flag_value, &output));
+  std::string serialized = std::string(output.begin(), output.end());
+
+  CF_EXPECT(proto_result.ParseFromString(serialized),
+            "Failed to parse binary proto, flag: "<< flag_name << ", value: " << flag_value);
+  return proto_result;
+}
+
+Result<std::vector<std::vector<CuttlefishConfig::DisplayConfig>>>
+    ParseDisplaysProto() {
+  auto proto_result = FLAGS_displays_textproto.empty() ? \
+  ParseBinProtoFlagHelper<InstancesDisplays>(FLAGS_displays_binproto, "displays_binproto") : \
+  ParseTextProtoFlagHelper<InstancesDisplays>(FLAGS_displays_textproto, "displays_textproto");
+
+  std::vector<std::vector<CuttlefishConfig::DisplayConfig>> result;
+  for (int i=0; i<proto_result->instances_size(); i++) {
+    std::vector<CuttlefishConfig::DisplayConfig> display_configs;
+    const InstanceDisplays& launch_cvd_instance = proto_result->instances(i);
+    for (int display_num=0; display_num<launch_cvd_instance.displays_size(); display_num++) {
+      const InstanceDisplay& display = launch_cvd_instance.displays(display_num);
+
+      // use same code logic from ParseDisplayConfig
+      int display_dpi = CF_DEFAULTS_DISPLAY_DPI;
+      if (display.dpi() != 0) {
+        display_dpi = display.dpi();
+      }
+
+      int display_refresh_rate_hz = CF_DEFAULTS_DISPLAY_REFRESH_RATE;
+      if (display.refresh_rate_hertz() != 0) {
+        display_refresh_rate_hz = display.refresh_rate_hertz();
+      }
+
+      display_configs.push_back(CuttlefishConfig::DisplayConfig{
+        .width = display.width(),
+        .height = display.height(),
+        .dpi = display_dpi,
+        .refresh_rate_hz = display_refresh_rate_hz,
+        });
+    }
+    result.push_back(display_configs);
+  }
+  return result;
+}
+
+Result<bool> ParseBool(const std::string& flag_str,
+                        const std::string& flag_name) {
+  auto result = android::base::ParseBool(flag_str);
+  CF_EXPECT(result != android::base::ParseBoolResult::kError,
+            "Failed to parse value \"" << flag_str
+            << "\" for " << flag_name);
+  if (result == android::base::ParseBoolResult::kTrue) {
+    return true;
+  }
+  return false;
+}
+
+Result<std::unordered_map<int, std::string>> CreateNumToWebrtcDeviceIdMap(
+    const CuttlefishConfig& tmp_config_obj,
+    const std::vector<std::int32_t>& instance_nums,
+    const std::string& webrtc_device_id_flag) {
+  std::unordered_map<int, std::string> output_map;
+  if (webrtc_device_id_flag.empty()) {
+    for (const auto num : instance_nums) {
+      const auto const_instance = tmp_config_obj.ForInstance(num);
+      output_map[num] = const_instance.instance_name();
+    }
+    return output_map;
+  }
+  auto tokens = android::base::Tokenize(webrtc_device_id_flag, ",");
+  CF_EXPECT(tokens.size() == 1 || tokens.size() == instance_nums.size(),
+            "--webrtc_device_ids provided " << tokens.size()
+                                            << " tokens"
+                                               " while 1 or "
+                                            << instance_nums.size()
+                                            << " is expected.");
+  CF_EXPECT(!tokens.empty(), "--webrtc_device_ids is ill-formatted");
+
+  std::vector<std::string> device_ids;
+  if (tokens.size() != instance_nums.size()) {
+    /* this is only possible when tokens.size() == 1
+     * and instance_nums.size() > 1. The token must include {num}
+     * so that the token pattern can be expanded to multiple instances.
+     */
+    auto device_id = tokens.front();
+    CF_EXPECT(device_id.find("{num}") != std::string::npos,
+              "If one webrtc_device_ids is given for multiple instances, "
+                  << " {num} should be included in webrtc_device_id.");
+    device_ids = std::move(
+        std::vector<std::string>(instance_nums.size(), tokens.front()));
+  }
+
+  if (tokens.size() == instance_nums.size()) {
+    // doesn't have to include {num}
+    device_ids = std::move(tokens);
+  }
+
+  auto itr = device_ids.begin();
+  for (const auto num : instance_nums) {
+    std::string_view device_id_view(itr->data(), itr->size());
+    output_map[num] = android::base::StringReplace(device_id_view, "{num}",
+                                                   std::to_string(num), true);
+    ++itr;
+  }
+  return output_map;
+}
+
+/**
+ * Returns a mapping between flag name and "gflags default_value" as strings for flags
+ * defined in the binary.
+ */
+std::map<std::string, std::string> CurrentFlagsToDefaultValue() {
+  std::map<std::string, std::string> name_to_default_value;
+  std::vector<gflags::CommandLineFlagInfo> self_flags;
+  gflags::GetAllFlags(&self_flags);
+  for (auto& flag : self_flags) {
+    name_to_default_value[flag.name] = flag.default_value;
+  }
+  return name_to_default_value;
+}
+
+Result<std::vector<bool>> GetFlagBoolValueForInstances(
+    const std::string& flag_values, int32_t instances_size, const std::string& flag_name,
+    std::map<std::string, std::string>& name_to_default_value) {
+  std::vector<std::string> flag_vec = android::base::Split(flag_values, ",");
+  std::vector<bool> value_vec(instances_size);
+
+  CF_EXPECT(name_to_default_value.find(flag_name) != name_to_default_value.end());
+  std::vector<std::string> default_value_vec =  android::base::Split(name_to_default_value[flag_name], ",");
+
+  for (int instance_index=0; instance_index<instances_size; instance_index++) {
+    if (instance_index >= flag_vec.size()) {
+      value_vec[instance_index] = CF_EXPECT(ParseBool(flag_vec[0], flag_name));
+    } else {
+      if (flag_vec[instance_index] == "unset" || flag_vec[instance_index] == "\"unset\"") {
+        std::string default_value = default_value_vec[0];
+        if (instance_index < default_value_vec.size()) {
+          default_value = default_value_vec[instance_index];
+        }
+        value_vec[instance_index] = CF_EXPECT(ParseBool(default_value, flag_name));
+      } else {
+        value_vec[instance_index] = CF_EXPECT(ParseBool(flag_vec[instance_index], flag_name));
+      }
+    }
+  }
+  return value_vec;
+}
+
+Result<std::vector<int>> GetFlagIntValueForInstances(
+    const std::string& flag_values, int32_t instances_size, const std::string& flag_name,
+    std::map<std::string, std::string>& name_to_default_value) {
+  std::vector<std::string> flag_vec = android::base::Split(flag_values, ",");
+  std::vector<int> value_vec(instances_size);
+
+  CF_EXPECT(name_to_default_value.find(flag_name) != name_to_default_value.end());
+  std::vector<std::string> default_value_vec =  android::base::Split(name_to_default_value[flag_name], ",");
+
+  for (int instance_index=0; instance_index<instances_size; instance_index++) {
+    if (instance_index >= flag_vec.size()) {
+      CF_EXPECT(android::base::ParseInt(flag_vec[0].c_str(), &value_vec[instance_index]),
+      "Failed to parse value \"" << flag_vec[0] << "\" for " << flag_name);
+    } else {
+      if (flag_vec[instance_index] == "unset" || flag_vec[instance_index] == "\"unset\"") {
+        std::string default_value = default_value_vec[0];
+        if (instance_index < default_value_vec.size()) {
+          default_value = default_value_vec[instance_index];
+        }
+        CF_EXPECT(android::base::ParseInt(default_value,
+        &value_vec[instance_index]),
+        "Failed to parse value \"" << default_value << "\" for " << flag_name);
+      } else {
+        CF_EXPECT(android::base::ParseInt(flag_vec[instance_index].c_str(),
+        &value_vec[instance_index]),
+        "Failed to parse value \"" << flag_vec[instance_index] << "\" for " << flag_name);
+      }
+    }
+  }
+  return value_vec;
+}
+
+Result<std::vector<std::string>> GetFlagStrValueForInstances(
+    const std::string& flag_values, int32_t instances_size,
+    const std::string& flag_name, std::map<std::string, std::string>& name_to_default_value) {
+  std::vector<std::string> flag_vec = android::base::Split(flag_values, ",");
+  std::vector<std::string> value_vec(instances_size);
+
+  CF_EXPECT(name_to_default_value.find(flag_name) != name_to_default_value.end());
+  std::vector<std::string> default_value_vec =  android::base::Split(name_to_default_value[flag_name], ",");
+
+  for (int instance_index=0; instance_index<instances_size; instance_index++) {
+    if (instance_index >= flag_vec.size()) {
+      value_vec[instance_index] = flag_vec[0];
+    } else {
+      if (flag_vec[instance_index] == "unset" || flag_vec[instance_index] == "\"unset\"") {
+        std::string default_value = default_value_vec[0];
+        if (instance_index < default_value_vec.size()) {
+          default_value = default_value_vec[instance_index];
+        }
+        value_vec[instance_index] = default_value;
+      } else {
+        value_vec[instance_index] = flag_vec[instance_index];
+      }
+    }
+  }
+  return value_vec;
+}
+
 } // namespace
 
-CuttlefishConfig InitializeCuttlefishConfiguration(
-    const std::string& root_dir, int modem_simulator_count,
-    KernelConfig kernel_config, fruit::Injector<>& injector) {
+Result<CuttlefishConfig> InitializeCuttlefishConfiguration(
+    const std::string& root_dir,
+    const std::vector<GuestConfig>& guest_configs,
+    fruit::Injector<>& injector, const FetcherConfig& fetcher_config) {
   CuttlefishConfig tmp_config_obj;
 
   for (const auto& fragment : injector.getMultibindings<ConfigFragment>()) {
@@ -486,179 +782,43 @@
 
   tmp_config_obj.set_root_dir(root_dir);
 
-  tmp_config_obj.set_target_arch(kernel_config.target_arch);
-  tmp_config_obj.set_bootconfig_supported(kernel_config.bootconfig_supported);
-  auto vmm = GetVmManager(FLAGS_vm_manager, kernel_config.target_arch);
+  // TODO(weihsu), b/250988697:
+  // FLAGS_vm_manager used too early, have to handle this vectorized string early
+  // Currently, all instances should use same vmm, added checking here
+  std::vector<std::string> vm_manager_vec =
+      android::base::Split(FLAGS_vm_manager, ",");
+  for (int i=1; i<vm_manager_vec.size(); i++) {
+    CF_EXPECT(
+        vm_manager_vec[0] == vm_manager_vec[i],
+        "All instances should have same vm_manager, " << FLAGS_vm_manager);
+  }
+
+  // TODO(weihsu), b/250988697: moved bootconfig_supported and hctr2_supported
+  // into each instance, but target_arch is still in todo
+  // target_arch should be in instance later
+  auto vmm = GetVmManager(vm_manager_vec[0], guest_configs[0].target_arch);
   if (!vmm) {
-    LOG(FATAL) << "Invalid vm_manager: " << FLAGS_vm_manager;
+    LOG(FATAL) << "Invalid vm_manager: " << vm_manager_vec[0];
   }
-  tmp_config_obj.set_vm_manager(FLAGS_vm_manager);
-
-  std::vector<CuttlefishConfig::DisplayConfig> display_configs;
-
-  auto display0 = ParseDisplayConfig(FLAGS_display0);
-  if (display0) {
-    display_configs.push_back(*display0);
-  }
-  auto display1 = ParseDisplayConfig(FLAGS_display1);
-  if (display1) {
-    display_configs.push_back(*display1);
-  }
-  auto display2 = ParseDisplayConfig(FLAGS_display2);
-  if (display2) {
-    display_configs.push_back(*display2);
-  }
-  auto display3 = ParseDisplayConfig(FLAGS_display3);
-  if (display3) {
-    display_configs.push_back(*display3);
-  }
-
-  if (FLAGS_x_res > 0 && FLAGS_y_res > 0) {
-    if (display_configs.empty()) {
-      display_configs.push_back({
-          .width = FLAGS_x_res,
-          .height = FLAGS_y_res,
-          .dpi = FLAGS_dpi,
-          .refresh_rate_hz = FLAGS_refresh_rate_hz,
-      });
-    } else {
-      LOG(WARNING) << "Ignoring --x_res and --y_res when --displayN specified.";
-    }
-  }
-
-  tmp_config_obj.set_display_configs(display_configs);
+  tmp_config_obj.set_vm_manager(vm_manager_vec[0]);
 
   const GraphicsAvailability graphics_availability =
     GetGraphicsAvailabilityWithSubprocessCheck();
 
   LOG(DEBUG) << graphics_availability;
 
-  tmp_config_obj.set_gpu_mode(FLAGS_gpu_mode);
-  if (tmp_config_obj.gpu_mode() != kGpuModeAuto &&
-      tmp_config_obj.gpu_mode() != kGpuModeDrmVirgl &&
-      tmp_config_obj.gpu_mode() != kGpuModeGfxStream &&
-      tmp_config_obj.gpu_mode() != kGpuModeGuestSwiftshader) {
-    LOG(FATAL) << "Invalid gpu_mode: " << FLAGS_gpu_mode;
-  }
-  if (tmp_config_obj.gpu_mode() == kGpuModeAuto) {
-    if (ShouldEnableAcceleratedRendering(graphics_availability)) {
-      LOG(INFO) << "GPU auto mode: detected prerequisites for accelerated "
-                   "rendering support.";
-      if (FLAGS_vm_manager == QemuManager::name()) {
-        LOG(INFO) << "Enabling --gpu_mode=drm_virgl.";
-        tmp_config_obj.set_gpu_mode(kGpuModeDrmVirgl);
-      } else {
-        LOG(INFO) << "Enabling --gpu_mode=gfxstream.";
-        tmp_config_obj.set_gpu_mode(kGpuModeGfxStream);
-      }
-    } else {
-      LOG(INFO) << "GPU auto mode: did not detect prerequisites for "
-                   "accelerated rendering support, enabling "
-                   "--gpu_mode=guest_swiftshader.";
-      tmp_config_obj.set_gpu_mode(kGpuModeGuestSwiftshader);
-    }
-  } else if (tmp_config_obj.gpu_mode() == kGpuModeGfxStream ||
-             tmp_config_obj.gpu_mode() == kGpuModeDrmVirgl) {
-    if (!ShouldEnableAcceleratedRendering(graphics_availability)) {
-      LOG(ERROR) << "--gpu_mode="
-                 << tmp_config_obj.gpu_mode()
-                 << " was requested but the prerequisites for accelerated "
-                    "rendering were not detected so the device may not "
-                    "function correctly. Please consider switching to "
-                    "--gpu_mode=auto or --gpu_mode=guest_swiftshader.";
-    }
-  }
-
-  tmp_config_obj.set_restart_subprocesses(FLAGS_restart_subprocesses);
-  tmp_config_obj.set_gpu_capture_binary(FLAGS_gpu_capture_binary);
-  if (!tmp_config_obj.gpu_capture_binary().empty()) {
-    CHECK(tmp_config_obj.gpu_mode() == kGpuModeGfxStream)
-        << "GPU capture only supported with --gpu_mode=gfxstream";
-
-    // GPU capture runs in a detached mode where the "launcher" process
-    // intentionally exits immediately.
-    CHECK(!tmp_config_obj.restart_subprocesses())
-        << "GPU capture only supported with --norestart_subprocesses";
-  }
-
-  tmp_config_obj.set_hwcomposer(FLAGS_hwcomposer);
-  if (!tmp_config_obj.hwcomposer().empty()) {
-    if (tmp_config_obj.hwcomposer() == kHwComposerRanchu) {
-      CHECK(tmp_config_obj.gpu_mode() != kGpuModeDrmVirgl)
-        << "ranchu hwcomposer not supported with --gpu_mode=drm_virgl";
-    }
-  }
-
-  if (tmp_config_obj.hwcomposer() == kHwComposerAuto) {
-      if (tmp_config_obj.gpu_mode() == kGpuModeDrmVirgl) {
-        tmp_config_obj.set_hwcomposer(kHwComposerDrm);
-      } else {
-        tmp_config_obj.set_hwcomposer(kHwComposerRanchu);
-      }
-  }
-
-  // The device needs to avoid having both hwcomposer2.4 and hwcomposer3
-  // services running at the same time so warn the user to manually build
-  // in drm_hwcomposer when needed.
-  if (tmp_config_obj.hwcomposer() == kHwComposerAuto) {
-    LOG(WARNING) << "In order to run with --hwcomposer=drm. Please make sure "
-                    "Cuttlefish was built with "
-                    "TARGET_ENABLE_DRMHWCOMPOSER=true.";
-  }
-
-  tmp_config_obj.set_enable_gpu_udmabuf(FLAGS_enable_gpu_udmabuf);
-  tmp_config_obj.set_enable_gpu_angle(FLAGS_enable_gpu_angle);
-
-  // Sepolicy rules need to be updated to support gpu mode. Temporarily disable
-  // auto-enabling sandbox when gpu is enabled (b/152323505).
-  if (tmp_config_obj.gpu_mode() != kGpuModeGuestSwiftshader) {
-    SetCommandLineOptionWithMode("enable_sandbox", "false", SET_FLAGS_DEFAULT);
-  }
-
-  if (vmm->ConfigureGraphics(tmp_config_obj).empty()) {
-    LOG(FATAL) << "Invalid (gpu_mode=," << FLAGS_gpu_mode <<
-               " hwcomposer= " << FLAGS_hwcomposer <<
-               ") does not work with vm_manager=" << FLAGS_vm_manager;
-  }
-
-  CHECK(!FLAGS_smt || FLAGS_cpus % 2 == 0)
-      << "CPUs must be a multiple of 2 in SMT mode";
-  tmp_config_obj.set_cpus(FLAGS_cpus);
-  tmp_config_obj.set_smt(FLAGS_smt);
-
-  tmp_config_obj.set_memory_mb(FLAGS_memory_mb);
-
-  tmp_config_obj.set_setupwizard_mode(FLAGS_setupwizard_mode);
-
   auto secure_hals = android::base::Split(FLAGS_secure_hals, ",");
   tmp_config_obj.set_secure_hals(
       std::set<std::string>(secure_hals.begin(), secure_hals.end()));
 
-  tmp_config_obj.set_gdb_port(FLAGS_gdb_port);
-
-  tmp_config_obj.set_guest_enforce_security(FLAGS_guest_enforce_security);
   tmp_config_obj.set_extra_kernel_cmdline(FLAGS_extra_kernel_cmdline);
   tmp_config_obj.set_extra_bootconfig_args(FLAGS_extra_bootconfig_args);
 
-  if (FLAGS_console) {
-    SetCommandLineOptionWithMode("enable_sandbox", "false", SET_FLAGS_DEFAULT);
-  }
-
-  tmp_config_obj.set_console(FLAGS_console);
-  tmp_config_obj.set_kgdb(FLAGS_console && FLAGS_kgdb);
-
   tmp_config_obj.set_host_tools_version(HostToolsCrc());
 
-  tmp_config_obj.set_deprecated_boot_completed(FLAGS_deprecated_boot_completed);
+  tmp_config_obj.set_gem5_debug_flags(FLAGS_gem5_debug_flags);
 
-  tmp_config_obj.set_qemu_binary_dir(FLAGS_qemu_binary_dir);
-  tmp_config_obj.set_crosvm_binary(FLAGS_crosvm_binary);
-  tmp_config_obj.set_gem5_binary_dir(FLAGS_gem5_binary_dir);
-
-  tmp_config_obj.set_seccomp_policy_dir(FLAGS_seccomp_policy_dir);
-
-  tmp_config_obj.set_enable_webrtc(FLAGS_start_webrtc);
-  tmp_config_obj.set_webrtc_assets_dir(FLAGS_webrtc_assets_dir);
+  // streaming, webrtc setup
   tmp_config_obj.set_webrtc_certs_dir(FLAGS_webrtc_certs_dir);
   tmp_config_obj.set_sig_server_secure(FLAGS_webrtc_sig_server_secure);
   // Note: This will be overridden if the sig server is started by us
@@ -666,85 +826,204 @@
   tmp_config_obj.set_sig_server_address(FLAGS_webrtc_sig_server_addr);
   tmp_config_obj.set_sig_server_path(FLAGS_webrtc_sig_server_path);
   tmp_config_obj.set_sig_server_strict(FLAGS_verify_sig_server_certificate);
-  tmp_config_obj.set_sig_server_headers_path(FLAGS_sig_server_headers_file);
-
-  auto tcp_range  = ParsePortRange(FLAGS_tcp_port_range);
-  tmp_config_obj.set_webrtc_tcp_port_range(tcp_range);
-  auto udp_range  = ParsePortRange(FLAGS_udp_port_range);
-  tmp_config_obj.set_webrtc_udp_port_range(udp_range);
-
-  tmp_config_obj.set_enable_modem_simulator(FLAGS_enable_modem_simulator &&
-                                            !FLAGS_enable_minimal_mode);
-  tmp_config_obj.set_modem_simulator_instance_number(modem_simulator_count);
-  tmp_config_obj.set_modem_simulator_sim_type(FLAGS_modem_simulator_sim_type);
-
-  tmp_config_obj.set_webrtc_enable_adb_websocket(
-          FLAGS_webrtc_enable_adb_websocket);
-
-  tmp_config_obj.set_run_as_daemon(FLAGS_daemon);
-
-  tmp_config_obj.set_data_policy(FLAGS_data_policy);
-  tmp_config_obj.set_blank_data_image_mb(FLAGS_blank_data_image_mb);
-
-  tmp_config_obj.set_enable_gnss_grpc_proxy(FLAGS_start_gnss_proxy);
-
-  tmp_config_obj.set_enable_vehicle_hal_grpc_server(
-      FLAGS_enable_vehicle_hal_grpc_server);
-
-  tmp_config_obj.set_bootloader(FLAGS_bootloader);
 
   tmp_config_obj.set_enable_metrics(FLAGS_report_anonymous_usage_stats);
 
-  if (!FLAGS_boot_slot.empty()) {
-      tmp_config_obj.set_boot_slot(FLAGS_boot_slot);
-  }
-
-  tmp_config_obj.set_cuttlefish_env_path(GetCuttlefishEnvPath());
-
-  tmp_config_obj.set_ril_dns(FLAGS_ril_dns);
-
-  tmp_config_obj.set_enable_minimal_mode(FLAGS_enable_minimal_mode);
-
-  tmp_config_obj.set_vhost_net(FLAGS_vhost_net);
+#ifdef ENFORCE_MAC80211_HWSIM
+  tmp_config_obj.set_virtio_mac80211_hwsim(true);
+#else
+  tmp_config_obj.set_virtio_mac80211_hwsim(false);
+#endif
 
   tmp_config_obj.set_vhost_user_mac80211_hwsim(FLAGS_vhost_user_mac80211_hwsim);
 
   if ((FLAGS_ap_rootfs_image.empty()) != (FLAGS_ap_kernel_image.empty())) {
     LOG(FATAL) << "Either both ap_rootfs_image and ap_kernel_image should be "
-                  "set or neither should be set.";
+        "set or neither should be set.";
+  }
+  // If user input multiple values, we only take the 1st value and shared with
+  // all instances
+  std::string ap_rootfs_image = "";
+  if (!FLAGS_ap_rootfs_image.empty()) {
+    ap_rootfs_image = android::base::Split(FLAGS_ap_rootfs_image, ",")[0];
   }
 
-  tmp_config_obj.set_ap_rootfs_image(FLAGS_ap_rootfs_image);
+  tmp_config_obj.set_ap_rootfs_image(ap_rootfs_image);
   tmp_config_obj.set_ap_kernel_image(FLAGS_ap_kernel_image);
 
   tmp_config_obj.set_wmediumd_config(FLAGS_wmediumd_config);
 
-  tmp_config_obj.set_rootcanal_hci_port(7300);
-  tmp_config_obj.set_rootcanal_link_port(7400);
-  tmp_config_obj.set_rootcanal_test_port(7500);
-  tmp_config_obj.set_rootcanal_config_file(
-      FLAGS_bluetooth_controller_properties_file);
+  // netsim flags allow all radios or selecting a specific radio
   tmp_config_obj.set_rootcanal_default_commands_file(
       FLAGS_bluetooth_default_commands_file);
+  tmp_config_obj.set_rootcanal_config_file(
+      FLAGS_bluetooth_controller_properties_file);
 
-  tmp_config_obj.set_record_screen(FLAGS_record_screen);
+  bool is_any_netsim = FLAGS_netsim || FLAGS_netsim_bt;
+  bool is_bt_netsim = FLAGS_netsim || FLAGS_netsim_bt;
 
-  tmp_config_obj.set_enable_host_bluetooth(FLAGS_enable_host_bluetooth);
+  // crosvm should create fifos for Bluetooth
+  tmp_config_obj.set_enable_host_bluetooth(FLAGS_enable_host_bluetooth || is_bt_netsim);
 
-  tmp_config_obj.set_protected_vm(FLAGS_protected_vm);
+  // rootcanal and bt_connector should handle Bluetooth (instead of netsim)
+  tmp_config_obj.set_enable_host_bluetooth_connector(FLAGS_enable_host_bluetooth && !is_bt_netsim);
 
-  tmp_config_obj.set_userdata_format(FLAGS_userdata_format);
-
-  std::vector<int> num_instances;
-  for (int i = 0; i < FLAGS_num_instances; i++) {
-    num_instances.push_back(GetInstance() + i);
+  // These flags inform NetsimServer::ResultSetup which radios it owns.
+  if (is_bt_netsim) {
+    tmp_config_obj.netsim_radio_enable(CuttlefishConfig::NetsimRadio::Bluetooth);
   }
-  std::vector<std::string> gnss_file_paths = android::base::Split(FLAGS_gnss_file_path, ",");
+  // end of vectorize ap_rootfs_image, ap_kernel_image, wmediumd_config
+
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+
+  // get flag default values and store into map
+  auto name_to_default_value = CurrentFlagsToDefaultValue();
+  // old flags but vectorized for multi-device instances
+  int32_t instances_size = instance_nums.size();
+  std::vector<std::string> gnss_file_paths =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gnss_file_path));
+  std::vector<std::string> fixed_location_file_paths =
+      CF_EXPECT(GET_FLAG_STR_VALUE(fixed_location_file_path));
+  std::vector<int> x_res_vec = CF_EXPECT(GET_FLAG_INT_VALUE(x_res));
+  std::vector<int> y_res_vec = CF_EXPECT(GET_FLAG_INT_VALUE(y_res));
+  std::vector<int> dpi_vec = CF_EXPECT(GET_FLAG_INT_VALUE(dpi));
+  std::vector<int> refresh_rate_hz_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      refresh_rate_hz));
+  std::vector<int> memory_mb_vec = CF_EXPECT(GET_FLAG_INT_VALUE(memory_mb));
+  std::vector<int> camera_server_port_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      camera_server_port));
+  std::vector<int> vsock_guest_cid_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      vsock_guest_cid));
+  std::vector<int> cpus_vec = CF_EXPECT(GET_FLAG_INT_VALUE(cpus));
+  std::vector<int> blank_data_image_mb_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      blank_data_image_mb));
+  std::vector<int> gdb_port_vec = CF_EXPECT(GET_FLAG_INT_VALUE(gdb_port));
+  std::vector<std::string> setupwizard_mode_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(setupwizard_mode));
+  std::vector<std::string> userdata_format_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(userdata_format));
+  std::vector<bool> guest_enforce_security_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      guest_enforce_security));
+  std::vector<bool> use_random_serial_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      use_random_serial));
+  std::vector<bool> use_allocd_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(use_allocd));
+  std::vector<bool> use_sdcard_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(use_sdcard));
+  std::vector<bool> pause_in_bootloader_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      pause_in_bootloader));
+  std::vector<bool> daemon_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(daemon));
+  std::vector<bool> enable_minimal_mode_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      enable_minimal_mode));
+  std::vector<bool> enable_modem_simulator_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      enable_modem_simulator));
+  std::vector<int> modem_simulator_count_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      modem_simulator_count));
+  std::vector<int> modem_simulator_sim_type_vec = CF_EXPECT(GET_FLAG_INT_VALUE(
+      modem_simulator_sim_type));
+  std::vector<bool> console_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(console));
+  std::vector<bool> enable_audio_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(enable_audio));
+  std::vector<bool> start_gnss_proxy_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      start_gnss_proxy));
+  std::vector<bool> enable_bootanimation_vec =
+      CF_EXPECT(GET_FLAG_BOOL_VALUE(enable_bootanimation));
+  std::vector<bool> record_screen_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      record_screen));
+  std::vector<std::string> gem5_debug_file_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gem5_debug_file));
+  std::vector<bool> protected_vm_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      protected_vm));
+  std::vector<bool> mte_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(mte));
+  std::vector<bool> enable_kernel_log_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      enable_kernel_log));
+  std::vector<bool> kgdb_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(kgdb));
+  std::vector<std::string> boot_slot_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(boot_slot));
+  std::vector<bool> start_webrtc_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      start_webrtc));
+  std::vector<std::string> webrtc_assets_dir_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(webrtc_assets_dir));
+  std::vector<std::string> tcp_port_range_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(tcp_port_range));
+  std::vector<std::string> udp_port_range_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(udp_port_range));
+  std::vector<bool> vhost_net_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      vhost_net));
+  std::vector<std::string> ril_dns_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(ril_dns));
+
+  // At this time, FLAGS_enable_sandbox comes from SetDefaultFlagsForCrosvm
+  std::vector<bool> enable_sandbox_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      enable_sandbox));
+
+  std::vector<std::string> gpu_mode_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gpu_mode));
+  std::vector<std::string> gpu_capture_binary_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gpu_capture_binary));
+  std::vector<bool> restart_subprocesses_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      restart_subprocesses));
+  std::vector<std::string> hwcomposer_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(hwcomposer));
+  std::vector<bool> enable_gpu_udmabuf_vec =
+      CF_EXPECT(GET_FLAG_BOOL_VALUE(enable_gpu_udmabuf));
+  std::vector<bool> smt_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(smt));
+  std::vector<std::string> crosvm_binary_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(crosvm_binary));
+  std::vector<std::string> seccomp_policy_dir_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(seccomp_policy_dir));
+  std::vector<std::string> qemu_binary_dir_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(qemu_binary_dir));
+
+  // new instance specific flags (moved from common flags)
+  std::vector<std::string> gem5_binary_dir_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gem5_binary_dir));
+  std::vector<std::string> gem5_checkpoint_dir_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gem5_checkpoint_dir));
+  std::vector<std::string> data_policy_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(data_policy));
+
+  // multi-dv multi-display proto input
+  std::vector<std::vector<CuttlefishConfig::DisplayConfig>> instances_display_configs;
+  if (!FLAGS_displays_textproto.empty() || !FLAGS_displays_binproto.empty()) {
+    instances_display_configs = CF_EXPECT(ParseDisplaysProto());
+  }
+
+  std::string default_enable_sandbox = "";
+  std::string comma_str = "";
+
+  CHECK(FLAGS_use_overlay || instance_nums.size() == 1)
+      << "`--use_overlay=false` is incompatible with multiple instances";
+  CHECK(instance_nums.size() > 0) << "Require at least one instance.";
+  auto rootcanal_instance_num = *instance_nums.begin() - 1;
+  if (FLAGS_rootcanal_instance_num > 0) {
+    rootcanal_instance_num = FLAGS_rootcanal_instance_num - 1;
+  }
+  tmp_config_obj.set_rootcanal_args(FLAGS_rootcanal_args);
+  tmp_config_obj.set_rootcanal_hci_port(7300 + rootcanal_instance_num);
+  tmp_config_obj.set_rootcanal_link_port(7400 + rootcanal_instance_num);
+  tmp_config_obj.set_rootcanal_test_port(7500 + rootcanal_instance_num);
+  tmp_config_obj.set_rootcanal_link_ble_port(7600 + rootcanal_instance_num);
+  LOG(DEBUG) << "rootcanal_instance_num: " << rootcanal_instance_num;
+  LOG(DEBUG) << "launch rootcanal: " << (FLAGS_rootcanal_instance_num <= 0);
+
+  // crosvm should create fifos for UWB
+  auto pica_instance_num = *instance_nums.begin() - 1;
+  if (FLAGS_pica_instance_num > 0) {
+    pica_instance_num = FLAGS_pica_instance_num - 1;
+  }
+  tmp_config_obj.set_enable_host_uwb(FLAGS_enable_host_uwb);
+  tmp_config_obj.set_enable_host_uwb_connector(FLAGS_enable_host_uwb);
+  tmp_config_obj.set_pica_uci_port(7000 + pica_instance_num);
+  LOG(DEBUG) << "pica_instance_num: " << pica_instance_num;
+  LOG(DEBUG) << "launch pica: " << (FLAGS_pica_instance_num <= 0);
 
   bool is_first_instance = true;
-  for (const auto& num : num_instances) {
+  int instance_index = 0;
+  auto num_to_webrtc_device_id_flag_map =
+      CF_EXPECT(CreateNumToWebrtcDeviceIdMap(tmp_config_obj, instance_nums,
+                                             FLAGS_webrtc_device_id));
+  for (const auto& num : instance_nums) {
     IfaceConfig iface_config;
-    if (FLAGS_use_allocd) {
+    if (use_allocd_vec[instance_index]) {
       auto iface_opt = AllocateNetworkInterfaces();
       if (!iface_opt.has_value()) {
         LOG(FATAL) << "Failed to acquire network interfaces";
@@ -754,18 +1033,48 @@
       iface_config = DefaultNetworkInterfaces(num);
     }
 
+
     auto instance = tmp_config_obj.ForInstance(num);
     auto const_instance =
         const_cast<const CuttlefishConfig&>(tmp_config_obj).ForInstance(num);
-    instance.set_use_allocd(FLAGS_use_allocd);
-    if (FLAGS_use_random_serial) {
+
+    instance.set_bootconfig_supported(guest_configs[instance_index].bootconfig_supported);
+    instance.set_filename_encryption_mode(
+      guest_configs[instance_index].hctr2_supported ? "hctr2" : "cts");
+    instance.set_use_allocd(use_allocd_vec[instance_index]);
+    instance.set_enable_audio(enable_audio_vec[instance_index]);
+    instance.set_enable_gnss_grpc_proxy(start_gnss_proxy_vec[instance_index]);
+    instance.set_enable_bootanimation(enable_bootanimation_vec[instance_index]);
+    instance.set_record_screen(record_screen_vec[instance_index]);
+    instance.set_gem5_debug_file(gem5_debug_file_vec[instance_index]);
+    instance.set_protected_vm(protected_vm_vec[instance_index]);
+    instance.set_mte(mte_vec[instance_index]);
+    instance.set_enable_kernel_log(enable_kernel_log_vec[instance_index]);
+    if (!boot_slot_vec[instance_index].empty()) {
+      instance.set_boot_slot(boot_slot_vec[instance_index]);
+    }
+
+    instance.set_crosvm_binary(crosvm_binary_vec[instance_index]);
+    instance.set_seccomp_policy_dir(seccomp_policy_dir_vec[instance_index]);
+    instance.set_qemu_binary_dir(qemu_binary_dir_vec[instance_index]);
+
+    // wifi, bluetooth, connectivity setup
+    instance.set_ril_dns(ril_dns_vec[instance_index]);
+
+    instance.set_vhost_net(vhost_net_vec[instance_index]);
+    // end of wifi, bluetooth, connectivity setup
+
+    if (use_random_serial_vec[instance_index]) {
       instance.set_serial_number(
           RandomSerialNumber("CFCVD" + std::to_string(num)));
     } else {
       instance.set_serial_number(FLAGS_serial_number + std::to_string(num));
     }
+
+    instance.set_grpc_socket_path(const_instance.PerInstanceGrpcSocketPath(""));
+
     // call this before all stuff that has vsock server: e.g. touchpad, keyboard, etc
-    const auto vsock_guest_cid = FLAGS_vsock_guest_cid + num - GetInstance();
+    const auto vsock_guest_cid = vsock_guest_cid_vec[instance_index] + num - GetInstance();
     instance.set_vsock_guest_cid(vsock_guest_cid);
     auto calc_vsock_port = [vsock_guest_cid](const int base_port) {
       // a base (vsock) port is like 9600 for modem_simulator, etc
@@ -773,9 +1082,96 @@
     };
     instance.set_session_id(iface_config.mobile_tap.session_id);
 
+    instance.set_cpus(cpus_vec[instance_index]);
+    // make sure all instances have multiple of 2 then SMT mode
+    // if any of instance doesn't have multiple of 2 then NOT SMT
+    CF_EXPECT(!smt_vec[instance_index] || cpus_vec[instance_index] % 2 == 0,
+              "CPUs must be a multiple of 2 in SMT mode");
+    instance.set_smt(smt_vec[instance_index]);
+
+    // new instance specific flags (moved from common flags)
+    CF_EXPECT(instance_index < guest_configs.size(),
+              "instance_index " << instance_index << " out of boundary "
+                                << guest_configs.size());
+    instance.set_target_arch(guest_configs[instance_index].target_arch);
+    instance.set_guest_android_version(
+        guest_configs[instance_index].android_version_number);
+    instance.set_console(console_vec[instance_index]);
+    instance.set_kgdb(console_vec[instance_index] && kgdb_vec[instance_index]);
+    instance.set_blank_data_image_mb(blank_data_image_mb_vec[instance_index]);
+    instance.set_gdb_port(gdb_port_vec[instance_index]);
+
+    std::vector<CuttlefishConfig::DisplayConfig> display_configs;
+    // assume displays proto input has higher priority than original display inputs
+    if (!FLAGS_displays_textproto.empty() || !FLAGS_displays_binproto.empty()) {
+      if (instance_index < instances_display_configs.size()) {
+        display_configs = instances_display_configs[instance_index];
+      } // else display_configs is an empty vector
+    } else {
+      auto display0 = CF_EXPECT(ParseDisplayConfig(FLAGS_display0));
+      if (display0) {
+        display_configs.push_back(*display0);
+      }
+      auto display1 = CF_EXPECT(ParseDisplayConfig(FLAGS_display1));
+      if (display1) {
+        display_configs.push_back(*display1);
+      }
+      auto display2 = CF_EXPECT(ParseDisplayConfig(FLAGS_display2));
+      if (display2) {
+        display_configs.push_back(*display2);
+      }
+      auto display3 = CF_EXPECT(ParseDisplayConfig(FLAGS_display3));
+      if (display3) {
+        display_configs.push_back(*display3);
+      }
+    }
+
+    if (x_res_vec[instance_index] > 0 && y_res_vec[instance_index] > 0) {
+      if (display_configs.empty()) {
+        display_configs.push_back({
+            .width = x_res_vec[instance_index],
+            .height = y_res_vec[instance_index],
+            .dpi = dpi_vec[instance_index],
+            .refresh_rate_hz = refresh_rate_hz_vec[instance_index],
+          });
+      } else {
+        LOG(WARNING) << "Ignoring --x_res and --y_res when --displayN specified.";
+      }
+    }
+    instance.set_display_configs(display_configs);
+
+    instance.set_memory_mb(memory_mb_vec[instance_index]);
+    instance.set_ddr_mem_mb(memory_mb_vec[instance_index] * 1.2);
+    instance.set_setupwizard_mode(setupwizard_mode_vec[instance_index]);
+    instance.set_userdata_format(userdata_format_vec[instance_index]);
+    instance.set_guest_enforce_security(guest_enforce_security_vec[instance_index]);
+    instance.set_pause_in_bootloader(pause_in_bootloader_vec[instance_index]);
+    instance.set_run_as_daemon(daemon_vec[instance_index]);
+    instance.set_enable_modem_simulator(enable_modem_simulator_vec[instance_index] &&
+                                        !enable_minimal_mode_vec[instance_index]);
+    instance.set_modem_simulator_instance_number(modem_simulator_count_vec[instance_index]);
+    instance.set_modem_simulator_sim_type(modem_simulator_sim_type_vec[instance_index]);
+
+    instance.set_enable_minimal_mode(enable_minimal_mode_vec[instance_index]);
+    instance.set_camera_server_port(camera_server_port_vec[instance_index]);
+    instance.set_gem5_binary_dir(gem5_binary_dir_vec[instance_index]);
+    instance.set_gem5_checkpoint_dir(gem5_checkpoint_dir_vec[instance_index]);
+    instance.set_data_policy(data_policy_vec[instance_index]);
+
     instance.set_mobile_bridge_name(StrForInstance("cvd-mbr-", num));
+    instance.set_wifi_bridge_name("cvd-wbr");
+    instance.set_ethernet_bridge_name("cvd-ebr");
     instance.set_mobile_tap_name(iface_config.mobile_tap.name);
-    instance.set_wifi_tap_name(iface_config.wireless_tap.name);
+
+    if (NetworkInterfaceExists(iface_config.non_bridged_wireless_tap.name) &&
+        tmp_config_obj.virtio_mac80211_hwsim()) {
+      instance.set_use_bridged_wifi_tap(false);
+      instance.set_wifi_tap_name(iface_config.non_bridged_wireless_tap.name);
+    } else {
+      instance.set_use_bridged_wifi_tap(true);
+      instance.set_wifi_tap_name(iface_config.bridged_wireless_tap.name);
+    }
+
     instance.set_ethernet_tap_name(iface_config.ethernet_tap.name);
 
     instance.set_uuid(FLAGS_uuid);
@@ -785,68 +1181,195 @@
     instance.set_qemu_vnc_server_port(544 + num - 1);
     instance.set_adb_host_port(6520 + num - 1);
     instance.set_adb_ip_and_port("0.0.0.0:" + std::to_string(6520 + num - 1));
-    instance.set_confui_host_vsock_port(7700 + num - 1);
+
+    instance.set_fastboot_host_port(7520 + num - 1);
+
+    std::uint8_t ethernet_mac[6] = {};
+    std::uint8_t mobile_mac[6] = {};
+    std::uint8_t wifi_mac[6] = {};
+    std::uint8_t ethernet_ipv6[16] = {};
+    GenerateEthMacForInstance(num - 1, ethernet_mac);
+    GenerateMobileMacForInstance(num - 1, mobile_mac);
+    GenerateWifiMacForInstance(num - 1, wifi_mac);
+    GenerateCorrespondingIpv6ForMac(ethernet_mac, ethernet_ipv6);
+
+    instance.set_ethernet_mac(MacAddressToString(ethernet_mac));
+    instance.set_mobile_mac(MacAddressToString(mobile_mac));
+    instance.set_wifi_mac(MacAddressToString(wifi_mac));
+    instance.set_ethernet_ipv6(Ipv6ToString(ethernet_ipv6));
+
     instance.set_tombstone_receiver_port(calc_vsock_port(6600));
-    instance.set_vehicle_hal_server_port(9300 + num - 1);
     instance.set_audiocontrol_server_port(9410);  /* OK to use the same port number across instances */
     instance.set_config_server_port(calc_vsock_port(6800));
 
-    if (tmp_config_obj.gpu_mode() != kGpuModeDrmVirgl &&
-        tmp_config_obj.gpu_mode() != kGpuModeGfxStream) {
-      if (FLAGS_vm_manager == QemuManager::name()) {
+    // gpu related settings
+    auto gpu_mode = gpu_mode_vec[instance_index];
+    if (gpu_mode != kGpuModeAuto && gpu_mode != kGpuModeDrmVirgl &&
+        gpu_mode != kGpuModeGfxstream &&
+        gpu_mode != kGpuModeGfxstreamGuestAngle &&
+        gpu_mode != kGpuModeGuestSwiftshader && gpu_mode != kGpuModeNone) {
+      LOG(FATAL) << "Invalid gpu_mode: " << gpu_mode;
+    }
+    if (gpu_mode == kGpuModeAuto) {
+      if (ShouldEnableAcceleratedRendering(graphics_availability)) {
+        LOG(INFO) << "GPU auto mode: detected prerequisites for accelerated "
+            "rendering support.";
+        if (vm_manager_vec[0] == QemuManager::name()) {
+          LOG(INFO) << "Enabling --gpu_mode=drm_virgl.";
+          gpu_mode = kGpuModeDrmVirgl;
+        } else {
+          LOG(INFO) << "Enabling --gpu_mode=gfxstream.";
+          gpu_mode = kGpuModeGfxstream;
+        }
+      } else {
+        LOG(INFO) << "GPU auto mode: did not detect prerequisites for "
+            "accelerated rendering support, enabling "
+            "--gpu_mode=guest_swiftshader.";
+        gpu_mode = kGpuModeGuestSwiftshader;
+      }
+    } else if (gpu_mode == kGpuModeGfxstream ||
+               gpu_mode == kGpuModeGfxstreamGuestAngle ||
+               gpu_mode == kGpuModeDrmVirgl) {
+      if (!ShouldEnableAcceleratedRendering(graphics_availability)) {
+        LOG(ERROR) << "--gpu_mode=" << gpu_mode
+                   << " was requested but the prerequisites for accelerated "
+                      "rendering were not detected so the device may not "
+                      "function correctly. Please consider switching to "
+                      "--gpu_mode=auto or --gpu_mode=guest_swiftshader.";
+      }
+    }
+    instance.set_gpu_mode(gpu_mode);
+
+    const auto angle_features = CF_EXPECT(GetNeededAngleFeatures(
+        CF_EXPECT(GetRenderingMode(gpu_mode)), graphics_availability));
+    instance.set_gpu_angle_feature_overrides_enabled(
+        angle_features.angle_feature_overrides_enabled);
+    instance.set_gpu_angle_feature_overrides_disabled(
+        angle_features.angle_feature_overrides_disabled);
+
+    instance.set_restart_subprocesses(restart_subprocesses_vec[instance_index]);
+    instance.set_gpu_capture_binary(gpu_capture_binary_vec[instance_index]);
+    if (!gpu_capture_binary_vec[instance_index].empty()) {
+      CF_EXPECT(gpu_mode == kGpuModeGfxstream ||
+                    gpu_mode == kGpuModeGfxstreamGuestAngle,
+                "GPU capture only supported with --gpu_mode=gfxstream");
+
+      // GPU capture runs in a detached mode where the "launcher" process
+      // intentionally exits immediately.
+      CF_EXPECT(!restart_subprocesses_vec[instance_index],
+          "GPU capture only supported with --norestart_subprocesses");
+    }
+
+    instance.set_hwcomposer(hwcomposer_vec[instance_index]);
+    if (!hwcomposer_vec[instance_index].empty()) {
+      if (hwcomposer_vec[instance_index] == kHwComposerRanchu) {
+        CF_EXPECT(gpu_mode != kGpuModeDrmVirgl,
+                  "ranchu hwcomposer not supported with --gpu_mode=drm_virgl");
+      }
+    }
+
+    if (hwcomposer_vec[instance_index] == kHwComposerAuto) {
+      if (gpu_mode == kGpuModeDrmVirgl) {
+        instance.set_hwcomposer(kHwComposerDrm);
+      } else if (gpu_mode == kGpuModeNone) {
+        instance.set_hwcomposer(kHwComposerNone);
+      } else {
+        instance.set_hwcomposer(kHwComposerRanchu);
+      }
+    }
+
+    instance.set_enable_gpu_udmabuf(enable_gpu_udmabuf_vec[instance_index]);
+
+    // 1. Keep original code order SetCommandLineOptionWithMode("enable_sandbox")
+    // then set_enable_sandbox later.
+    // 2. SetCommandLineOptionWithMode condition: if gpu_mode or console,
+    // then SetCommandLineOptionWithMode false as original code did,
+    // otherwise keep default enable_sandbox value.
+    // 3. Sepolicy rules need to be updated to support gpu mode. Temporarily disable
+    // auto-enabling sandbox when gpu is enabled (b/152323505).
+    default_enable_sandbox += comma_str;
+    if ((gpu_mode != kGpuModeGuestSwiftshader) || console_vec[instance_index]) {
+      // original code, just moved to each instance setting block
+      default_enable_sandbox += "false";
+    } else {
+      default_enable_sandbox += BoolToString(enable_sandbox_vec[instance_index]);
+    }
+    comma_str = ",";
+
+    auto graphics_check = vmm->ConfigureGraphics(const_instance);
+    if (!graphics_check.ok()) {
+      LOG(FATAL) << graphics_check.error().Message();
+    }
+
+    if (gpu_mode != kGpuModeDrmVirgl && gpu_mode != kGpuModeGfxstream) {
+      if (vm_manager_vec[0] == QemuManager::name()) {
         instance.set_keyboard_server_port(calc_vsock_port(7000));
         instance.set_touch_server_port(calc_vsock_port(7100));
       }
     }
+    // end of gpu related settings
 
     instance.set_gnss_grpc_proxy_server_port(7200 + num -1);
+    instance.set_gnss_file_path(gnss_file_paths[instance_index]);
+    instance.set_fixed_location_file_path(fixed_location_file_paths[instance_index]);
 
-    if (num <= gnss_file_paths.size()) {
-      instance.set_gnss_file_path(gnss_file_paths[num-1]);
-    }
+    std::vector<std::string> virtual_disk_paths;
 
-    instance.set_camera_server_port(FLAGS_camera_server_port);
-
-    if (FLAGS_protected_vm) {
-      instance.set_virtual_disk_paths(
-          {const_instance.PerInstancePath("os_composite.img")});
+    bool os_overlay = true;
+    os_overlay &= !protected_vm_vec[instance_index];
+    // Gem5 already uses CoW wrappers around disk images
+    os_overlay &= vm_manager_vec[0] != Gem5Manager::name();
+    os_overlay &= FLAGS_use_overlay;
+    if (os_overlay) {
+      auto path = const_instance.PerInstancePath("overlay.img");
+      virtual_disk_paths.push_back(path);
     } else {
-      std::vector<std::string> virtual_disk_paths = {
-          const_instance.PerInstancePath("persistent_composite.img"),
-      };
-      if (FLAGS_vm_manager != Gem5Manager::name()) {
-        virtual_disk_paths.insert(virtual_disk_paths.begin(),
-            const_instance.PerInstancePath("overlay.img"));
-      } else {
-        // Gem5 already uses CoW wrappers around disk images
-        virtual_disk_paths.insert(virtual_disk_paths.begin(),
-            tmp_config_obj.os_composite_disk_path());
-      }
-      if (FLAGS_use_sdcard) {
-        virtual_disk_paths.push_back(const_instance.sdcard_path());
-      }
-      instance.set_virtual_disk_paths(virtual_disk_paths);
+      virtual_disk_paths.push_back(const_instance.os_composite_disk_path());
     }
 
+    bool persistent_disk = true;
+    persistent_disk &= !protected_vm_vec[instance_index];
+    persistent_disk &= vm_manager_vec[0] != Gem5Manager::name();
+    if (persistent_disk) {
+      auto path = const_instance.PerInstancePath("persistent_composite.img");
+      virtual_disk_paths.push_back(path);
+    }
+
+    instance.set_use_sdcard(use_sdcard_vec[instance_index]);
+
+    bool sdcard = true;
+    sdcard &= use_sdcard_vec[instance_index];
+    sdcard &= !protected_vm_vec[instance_index];
+    if (sdcard) {
+      virtual_disk_paths.push_back(const_instance.sdcard_path());
+    }
+
+    instance.set_virtual_disk_paths(virtual_disk_paths);
+
     // We'd like to set mac prefix to be 5554, 5555, 5556, ... in normal cases.
     // When --base_instance_num=3, this might be 5556, 5557, 5558, ... (skipping
     // first two)
     instance.set_wifi_mac_prefix(5554 + (num - 1));
 
+    // streaming, webrtc setup
+    instance.set_enable_webrtc(start_webrtc_vec[instance_index]);
+    instance.set_webrtc_assets_dir(webrtc_assets_dir_vec[instance_index]);
+
+    auto tcp_range  = ParsePortRange(tcp_port_range_vec[instance_index]);
+    instance.set_webrtc_tcp_port_range(tcp_range);
+
+    auto udp_range  = ParsePortRange(udp_port_range_vec[instance_index]);
+    instance.set_webrtc_udp_port_range(udp_range);
+
+    // end of streaming, webrtc setup
+
     instance.set_start_webrtc_signaling_server(false);
 
-    if (FLAGS_webrtc_device_id.empty()) {
-      // Use the instance's name as a default
-      instance.set_webrtc_device_id(const_instance.instance_name());
-    } else {
-      std::string device_id = FLAGS_webrtc_device_id;
-      size_t pos;
-      while ((pos = device_id.find("{num}")) != std::string::npos) {
-        device_id.replace(pos, strlen("{num}"), std::to_string(num));
-      }
-      instance.set_webrtc_device_id(device_id);
-    }
-    if (!is_first_instance || !FLAGS_start_webrtc) {
+    CF_EXPECT(Contains(num_to_webrtc_device_id_flag_map, num),
+              "Error in looking up num to webrtc_device_id_flag_map");
+    instance.set_webrtc_device_id(num_to_webrtc_device_id_flag_map[num]);
+
+    if (!is_first_instance || !start_webrtc_vec[instance_index]) {
       // Only the first instance starts the signaling server or proxy
       instance.set_start_webrtc_signaling_server(false);
       instance.set_start_webrtc_sig_server_proxy(false);
@@ -864,14 +1387,15 @@
 
     // Start wmediumd process for the first instance if
     // vhost_user_mac80211_hwsim is not specified.
-    const bool start_wmediumd =
-        FLAGS_vhost_user_mac80211_hwsim.empty() && is_first_instance;
+    const bool start_wmediumd = tmp_config_obj.virtio_mac80211_hwsim() &&
+                                FLAGS_vhost_user_mac80211_hwsim.empty() &&
+                                is_first_instance;
     if (start_wmediumd) {
       // TODO(b/199020470) move this to the directory for shared resources
       auto vhost_user_socket_path =
-          const_instance.PerInstanceInternalPath("vhost_user_mac80211");
+          const_instance.PerInstanceInternalUdsPath("vhost_user_mac80211");
       auto wmediumd_api_socket_path =
-          const_instance.PerInstanceInternalPath("wmediumd_api_server");
+          const_instance.PerInstanceInternalUdsPath("wmediumd_api_server");
 
       tmp_config_obj.set_vhost_user_mac80211_hwsim(vhost_user_socket_path);
       tmp_config_obj.set_wmediumd_api_server_socket(wmediumd_api_socket_path);
@@ -880,55 +1404,107 @@
       instance.set_start_wmediumd(false);
     }
 
-    instance.set_start_rootcanal(is_first_instance);
+    instance.set_start_netsim(is_first_instance && is_any_netsim);
 
-    instance.set_start_ap(!FLAGS_ap_rootfs_image.empty() &&
-                          !FLAGS_ap_kernel_image.empty() && is_first_instance);
+    instance.set_start_rootcanal(is_first_instance && !is_bt_netsim &&
+                                 (FLAGS_rootcanal_instance_num <= 0));
+
+    instance.set_start_pica(is_first_instance);
+
+    if (!FLAGS_ap_rootfs_image.empty() && !FLAGS_ap_kernel_image.empty() && start_wmediumd) {
+      // TODO(264537774): Ubuntu grub modules / grub monoliths cannot be used to boot
+      // 64 bit kernel using 32 bit u-boot / grub.
+      // Enable this code back after making sure it works across all popular environments
+      // if (CanGenerateEsp(guest_configs[0].target_arch)) {
+      //   instance.set_ap_boot_flow(CuttlefishConfig::InstanceSpecific::APBootFlow::Grub);
+      // } else {
+      //   instance.set_ap_boot_flow(CuttlefishConfig::InstanceSpecific::APBootFlow::LegacyDirect);
+      // }
+      instance.set_ap_boot_flow(CuttlefishConfig::InstanceSpecific::APBootFlow::LegacyDirect);
+    } else {
+      instance.set_ap_boot_flow(CuttlefishConfig::InstanceSpecific::APBootFlow::None);
+    }
 
     is_first_instance = false;
 
     // instance.modem_simulator_ports := "" or "[port,]*port"
-    if (modem_simulator_count > 0) {
+    if (modem_simulator_count_vec[instance_index] > 0) {
       std::stringstream modem_ports;
-      for (auto index {0}; index < modem_simulator_count - 1; index++) {
-        auto port = 9600 + (modem_simulator_count * (num - 1)) + index;
+      for (auto index {0}; index < modem_simulator_count_vec[instance_index] - 1; index++) {
+        auto port = 9600 + (modem_simulator_count_vec[instance_index] * (num - 1)) + index;
         modem_ports << calc_vsock_port(port) << ",";
       }
-      auto port = 9600 + (modem_simulator_count * (num - 1)) +
-                  modem_simulator_count - 1;
+      auto port = 9600 + (modem_simulator_count_vec[instance_index] * (num - 1)) +
+                  modem_simulator_count_vec[instance_index] - 1;
       modem_ports << calc_vsock_port(port);
       instance.set_modem_simulator_ports(modem_ports.str());
     } else {
       instance.set_modem_simulator_ports("");
     }
-  } // end of num_instances loop
+    instance_index++;
+  }  // end of num_instances loop
 
   std::vector<std::string> names;
+  names.reserve(tmp_config_obj.Instances().size());
   for (const auto& instance : tmp_config_obj.Instances()) {
     names.emplace_back(instance.instance_name());
   }
   tmp_config_obj.set_instance_names(names);
 
-  tmp_config_obj.set_enable_sandbox(FLAGS_enable_sandbox);
+  // keep legacy values for acloud or other related tools (b/262284453)
+  tmp_config_obj.set_crosvm_binary(crosvm_binary_vec[0]);
 
-  // Audio is not available for Arm64
-  SetCommandLineOptionWithMode(
-      "enable_audio",
-      (cuttlefish::HostArch() == cuttlefish::Arch::Arm64) ? "false" : "true",
-      SET_FLAGS_DEFAULT);
-  tmp_config_obj.set_enable_audio(FLAGS_enable_audio);
+  // Keep the original code here to set enable_sandbox commandline flag value
+  SetCommandLineOptionWithMode("enable_sandbox", default_enable_sandbox.c_str(),
+                               google::FlagSettingMode::SET_FLAGS_DEFAULT);
+
+  // After SetCommandLineOptionWithMode,
+  // default flag values changed, need recalculate name_to_default_value
+  name_to_default_value = CurrentFlagsToDefaultValue();
+  // After last SetCommandLineOptionWithMode, we could set these special flags
+  enable_sandbox_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      enable_sandbox));
+
+  instance_index = 0;
+  for (const auto& num : instance_nums) {
+    auto instance = tmp_config_obj.ForInstance(num);
+    instance.set_enable_sandbox(enable_sandbox_vec[instance_index]);
+    instance_index++;
+  }
+
+  DiskImageFlagsVectorization(tmp_config_obj, fetcher_config);
 
   return tmp_config_obj;
 }
 
-void SetDefaultFlagsForQemu(Arch target_arch) {
-  // for now, we don't set non-default options for QEMU
-  if (FLAGS_gpu_mode == kGpuModeGuestSwiftshader && !FLAGS_start_webrtc) {
-    // This makes WebRTC the default streamer unless the user requests
-    // another via a --star_<streamer> flag, while at the same time it's
-    // possible to run without any streamer by setting --start_webrtc=false.
-    SetCommandLineOptionWithMode("start_webrtc", "true", SET_FLAGS_DEFAULT);
+Result<void> SetDefaultFlagsForQemu(Arch target_arch, std::map<std::string, std::string>& name_to_default_value) {
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  int32_t instances_size = instance_nums.size();
+  std::vector<std::string> gpu_mode_vec =
+      CF_EXPECT(GET_FLAG_STR_VALUE(gpu_mode));
+  std::vector<bool> start_webrtc_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      start_webrtc));
+  std::string default_start_webrtc = "";
+
+  for (int instance_index = 0; instance_index < instance_nums.size(); instance_index++) {
+    if (instance_index > 0) {
+      default_start_webrtc += ",";
+    }
+    if (gpu_mode_vec[instance_index] == kGpuModeGuestSwiftshader && !start_webrtc_vec[instance_index]) {
+      // This makes WebRTC the default streamer unless the user requests
+      // another via a --star_<streamer> flag, while at the same time it's
+      // possible to run without any streamer by setting --start_webrtc=false.
+      default_start_webrtc += "true";
+    } else {
+      default_start_webrtc += BoolToString(start_webrtc_vec[instance_index]);
+    }
   }
+  // This is the 1st place to set "start_webrtc" flag value
+  // for now, we don't set non-default options for QEMU
+  SetCommandLineOptionWithMode("start_webrtc", default_start_webrtc.c_str(),
+                               SET_FLAGS_DEFAULT);
+
   std::string default_bootloader =
       DefaultHostArtifactsPath("etc/bootloader_");
   if(target_arch == Arch::Arm) {
@@ -937,34 +1513,81 @@
       default_bootloader += "arm";
   } else if (target_arch == Arch::Arm64) {
       default_bootloader += "aarch64";
+  } else if (target_arch == Arch::RiscV64) {
+      default_bootloader += "riscv64";
   } else {
       default_bootloader += "x86_64";
   }
   default_bootloader += "/bootloader.qemu";
   SetCommandLineOptionWithMode("bootloader", default_bootloader.c_str(),
                                SET_FLAGS_DEFAULT);
+  return {};
 }
 
-void SetDefaultFlagsForCrosvm() {
-  if (!FLAGS_start_webrtc) {
-    // This makes WebRTC the default streamer unless the user requests
-    // another via a --star_<streamer> flag, while at the same time it's
-    // possible to run without any streamer by setting --start_webrtc=false.
-    SetCommandLineOptionWithMode("start_webrtc", "true", SET_FLAGS_DEFAULT);
-  }
+
+Result<void> SetDefaultFlagsForCrosvm(
+    const std::vector<GuestConfig>& guest_configs,
+    std::map<std::string, std::string>& name_to_default_value) {
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  int32_t instances_size = instance_nums.size();
+  std::vector<bool> start_webrtc_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+      start_webrtc));
+  std::string default_start_webrtc = "";
 
   std::set<Arch> supported_archs{Arch::X86_64};
   bool default_enable_sandbox =
       supported_archs.find(HostArch()) != supported_archs.end() &&
       EnsureDirectoryExists(kCrosvmVarEmptyDir).ok() &&
       IsDirectoryEmpty(kCrosvmVarEmptyDir) && !IsRunningInContainer();
-  SetCommandLineOptionWithMode("enable_sandbox",
-                               (default_enable_sandbox ? "true" : "false"),
-                               SET_FLAGS_DEFAULT);
 
-  std::string default_bootloader = FLAGS_system_image_dir + "/bootloader";
+  std::vector<std::string> system_image_dir =
+      android::base::Split(FLAGS_system_image_dir, ",");
+  std::string cur_bootloader = "";
+  std::string default_bootloader = "";
+  std::string default_enable_sandbox_str = "";
+  for (int instance_index = 0; instance_index < instance_nums.size(); instance_index++) {
+    if (guest_configs[instance_index].android_version_number == "11.0.0") {
+      cur_bootloader = DefaultHostArtifactsPath("etc/bootloader_");
+      if (guest_configs[instance_index].target_arch == Arch::Arm64) {
+        cur_bootloader += "aarch64";
+      } else {
+        cur_bootloader += "x86_64";
+      }
+      cur_bootloader += "/bootloader.crosvm";
+    } else {
+      if (instance_index >= system_image_dir.size()) {
+        cur_bootloader = system_image_dir[0];
+      } else {
+        cur_bootloader = system_image_dir[instance_index];
+      }
+      cur_bootloader += "/bootloader";
+    }
+    if (instance_index > 0) {
+      default_bootloader += ",";
+      default_enable_sandbox_str += ",";
+      default_start_webrtc += ",";
+    }
+    default_bootloader += cur_bootloader;
+    default_enable_sandbox_str += BoolToString(default_enable_sandbox);
+    if (!start_webrtc_vec[instance_index]) {
+      // This makes WebRTC the default streamer unless the user requests
+      // another via a --star_<streamer> flag, while at the same time it's
+      // possible to run without any streamer by setting --start_webrtc=false.
+      default_start_webrtc += "true";
+    } else {
+      default_start_webrtc += BoolToString(start_webrtc_vec[instance_index]);
+    }
+  }
   SetCommandLineOptionWithMode("bootloader", default_bootloader.c_str(),
                                SET_FLAGS_DEFAULT);
+  // This is the 1st place to set "start_webrtc" flag value
+  SetCommandLineOptionWithMode("start_webrtc", default_start_webrtc.c_str(),
+                               SET_FLAGS_DEFAULT);
+  // This is the 1st place to set "enable_sandbox" flag value
+  SetCommandLineOptionWithMode("enable_sandbox",
+                               default_enable_sandbox_str.c_str(), SET_FLAGS_DEFAULT);
+  return {};
 }
 
 void SetDefaultFlagsForGem5() {
@@ -975,50 +1598,109 @@
   SetCommandLineOptionWithMode("cpus", "1", SET_FLAGS_DEFAULT);
 }
 
-Result<KernelConfig> GetKernelConfigAndSetDefaults() {
+void SetDefaultFlagsForOpenwrt(Arch target_arch) {
+  if (target_arch == Arch::X86_64) {
+    SetCommandLineOptionWithMode(
+        "ap_kernel_image",
+        DefaultHostArtifactsPath("etc/openwrt/images/openwrt_kernel_x86_64")
+            .c_str(),
+        SET_FLAGS_DEFAULT);
+    SetCommandLineOptionWithMode(
+        "ap_rootfs_image",
+        DefaultHostArtifactsPath("etc/openwrt/images/openwrt_rootfs_x86_64")
+            .c_str(),
+        SET_FLAGS_DEFAULT);
+  } else if (target_arch == Arch::Arm64) {
+    SetCommandLineOptionWithMode(
+        "ap_kernel_image",
+        DefaultHostArtifactsPath("etc/openwrt/images/openwrt_kernel_aarch64")
+            .c_str(),
+        SET_FLAGS_DEFAULT);
+    SetCommandLineOptionWithMode(
+        "ap_rootfs_image",
+        DefaultHostArtifactsPath("etc/openwrt/images/openwrt_rootfs_aarch64")
+            .c_str(),
+        SET_FLAGS_DEFAULT);
+  }
+}
+
+Result<std::vector<GuestConfig>> GetGuestConfigAndSetDefaults() {
+  auto instance_nums =
+      CF_EXPECT(InstanceNumsCalculator().FromGlobalGflags().Calculate());
+  int32_t instances_size = instance_nums.size();
   CF_EXPECT(ResolveInstanceFiles(), "Failed to resolve instance files");
 
-  KernelConfig kernel_config = CF_EXPECT(ReadKernelConfig());
+  std::vector<GuestConfig> guest_configs = CF_EXPECT(ReadGuestConfig());
 
+  // TODO(weihsu), b/250988697:
+  // assume all instances are using same VM manager/app/arch,
+  // later that multiple instances may use different VM manager/app/arch
+
+  // Temporary add this checking to make sure all instances have same target_arch.
+  // This checking should be removed later.
+  for (int instance_index = 1; instance_index < guest_configs.size(); instance_index++) {
+    CF_EXPECT(guest_configs[0].target_arch == guest_configs[instance_index].target_arch,
+              "all instance target_arch should be same");
+  }
   if (FLAGS_vm_manager == "") {
-    if (IsHostCompatible(kernel_config.target_arch)) {
+    if (IsHostCompatible(guest_configs[0].target_arch)) {
       FLAGS_vm_manager = CrosvmManager::name();
     } else {
       FLAGS_vm_manager = QemuManager::name();
     }
   }
+  // TODO(weihsu), b/250988697:
+  // Currently, all instances should use same vmm
+  std::vector<std::string> vm_manager_vec =
+      android::base::Split(FLAGS_vm_manager, ",");
+  // get flag default values and store into map
+  auto name_to_default_value = CurrentFlagsToDefaultValue();
 
-  if (FLAGS_vm_manager == QemuManager::name()) {
-    SetDefaultFlagsForQemu(kernel_config.target_arch);
-  } else if (FLAGS_vm_manager == CrosvmManager::name()) {
-    SetDefaultFlagsForCrosvm();
-  } else if (FLAGS_vm_manager == Gem5Manager::name()) {
+  if (vm_manager_vec[0] == QemuManager::name()) {
+
+    CF_EXPECT(SetDefaultFlagsForQemu(guest_configs[0].target_arch, name_to_default_value));
+  } else if (vm_manager_vec[0] == CrosvmManager::name()) {
+    CF_EXPECT(SetDefaultFlagsForCrosvm(guest_configs, name_to_default_value));
+  } else if (vm_manager_vec[0] == Gem5Manager::name()) {
     // TODO: Get the other architectures working
-    if (kernel_config.target_arch != Arch::Arm64) {
+    if (guest_configs[0].target_arch != Arch::Arm64) {
       return CF_ERR("Gem5 only supports ARM64");
     }
     SetDefaultFlagsForGem5();
   } else {
     return CF_ERR("Unknown Virtual Machine Manager: " << FLAGS_vm_manager);
   }
-  if (FLAGS_vm_manager != Gem5Manager::name()) {
+  if (vm_manager_vec[0] != Gem5Manager::name()) {
+    // After SetCommandLineOptionWithMode in SetDefaultFlagsForCrosvm/Qemu,
+    // default flag values changed, need recalculate name_to_default_value
+    name_to_default_value = CurrentFlagsToDefaultValue();
+    std::vector<bool> start_webrtc_vec = CF_EXPECT(GET_FLAG_BOOL_VALUE(
+        start_webrtc));
+    bool start_webrtc = false;
+    for(bool value : start_webrtc_vec) {
+      start_webrtc |= value;
+    }
+
     auto host_operator_present =
         cuttlefish::FileIsSocket(HOST_OPERATOR_SOCKET_PATH);
     // The default for starting signaling server depends on whether or not webrtc
     // is to be started and the presence of the host orchestrator.
     SetCommandLineOptionWithMode(
         "start_webrtc_sig_server",
-        FLAGS_start_webrtc && !host_operator_present ? "true" : "false",
+        start_webrtc && !host_operator_present ? "true" : "false",
         SET_FLAGS_DEFAULT);
     SetCommandLineOptionWithMode(
         "webrtc_sig_server_addr",
         host_operator_present ? HOST_OPERATOR_SOCKET_PATH : "0.0.0.0",
         SET_FLAGS_DEFAULT);
   }
+
+  SetDefaultFlagsForOpenwrt(guest_configs[0].target_arch);
+
   // Set the env variable to empty (in case the caller passed a value for it).
   unsetenv(kCuttlefishConfigEnvVarName);
 
-  return kernel_config;
+  return guest_configs;
 }
 
 std::string GetConfigFilePath(const CuttlefishConfig& config) {
@@ -1029,4 +1711,11 @@
   return StringFromEnv("HOME", ".") + "/.cuttlefish.sh";
 }
 
+std::string GetSeccompPolicyDir() {
+  static const std::string kSeccompDir = std::string("usr/share/crosvm/") +
+                                         cuttlefish::HostArchStr() +
+                                         "-linux-gnu/seccomp";
+  return DefaultHostArtifactsPath(kSeccompDir);
+}
+
 } // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/flags.h b/host/commands/assemble_cvd/flags.h
index bd067ae..43eada0 100644
--- a/host/commands/assemble_cvd/flags.h
+++ b/host/commands/assemble_cvd/flags.h
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #pragma once
 
 #include <fruit/fruit.h>
@@ -13,19 +28,22 @@
 
 namespace cuttlefish {
 
-struct KernelConfig {
+struct GuestConfig {
   Arch target_arch;
   bool bootconfig_supported;
+  bool hctr2_supported;
+  std::string android_version_number;
 };
 
-Result<KernelConfig> GetKernelConfigAndSetDefaults();
+Result<std::vector<GuestConfig>> GetGuestConfigAndSetDefaults();
 // Must be called after ParseCommandLineFlags.
-CuttlefishConfig InitializeCuttlefishConfiguration(const std::string& root_dir,
-                                                   int modem_simulator_count,
-                                                   KernelConfig kernel_config,
-                                                   fruit::Injector<>& injector);
+Result<CuttlefishConfig> InitializeCuttlefishConfiguration(
+    const std::string& root_dir,
+    const std::vector<GuestConfig>& guest_configs,
+    fruit::Injector<>& injector, const FetcherConfig& fetcher_config);
 
 std::string GetConfigFilePath(const CuttlefishConfig& config);
 std::string GetCuttlefishEnvPath();
+std::string GetSeccompPolicyDir();
 
 } // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/flags_defaults.h b/host/commands/assemble_cvd/flags_defaults.h
new file mode 100644
index 0000000..0109bac
--- /dev/null
+++ b/host/commands/assemble_cvd/flags_defaults.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#define CF_DEFAULTS_DYNAMIC_STRING ""
+#define CF_DEFAULTS_DYNAMIC_INT 0
+
+// Common configs paramneters
+#define CF_DEFAULTS_NUM_INSTANCES 1
+#define CF_DEFAULTS_INSTANCE_NUMS CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_BASE_INSTANCE_NUM cuttlefish::GetInstance()
+#define CF_DEFAULTS_ASSEMBLY_DIR \
+  (StringFromEnv("HOME", ".") + "/cuttlefish_assembly")
+#define CF_DEFAULTS_INSTANCE_DIR (StringFromEnv("HOME", ".") + "/cuttlefish")
+
+#define CF_DEFAULTS_SYSTEM_IMAGE_DIR CF_DEFAULTS_DYNAMIC_STRING
+
+// Instance specific parameters
+//  VM default parameters
+#define CF_DEFAULTS_DISPLAY_DPI 320
+#define CF_DEFAULTS_DISPLAY_REFRESH_RATE 60
+#define CF_DEFAULTS_DISPLAY_WIDTH 720
+#define CF_DEFAULTS_DISPLAY_HEIGHT 1280
+#define CF_DEFAULTS_DISPLAYS_TEXTPROTO ""
+#define CF_DEFAULTS_CPUS 2
+#define CF_DEFAULTS_RESUME true
+#define CF_DEFAULTS_DAEMON false
+#define CF_DEFAULTS_VM_MANAGER CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VSOCK_GUEST_CID cuttlefish::GetDefaultVsockCid()
+#define CF_DEFAULTS_ENABLE_MINIMAL_MODE false
+#define CF_DEFAULTS_RESTART_SUBPROCESSES false
+#define CF_DEFAULTS_SETUPWIZARD_MODE "DISABLED"
+#define CF_DEFAULTS_SMT false
+#define CF_DEFAULTS_USE_ALLOCD false
+#define CF_DEFAULTS_USE_SDCARD true
+#define CF_DEFAULTS_UUID \
+  cuttlefish::ForCurrentInstance(cuttlefish::kDefaultUuidPrefix)
+#define CF_DEFAULTS_FILE_VERBOSITY "DEBUG"
+#define CF_DEFAULTS_VERBOSITY "INFO"
+#define CF_DEFAULTS_RUN_FILE_DISCOVERY true
+#define CF_DEFAULTS_MEMORY_MB CF_DEFAULTS_DYNAMIC_INT
+#define CF_DEFAULTS_SHARE_SCHED_CORE false
+// TODO: defined twice, please remove redundant definitions
+#define CF_DEFAULTS_USE_OVERLAY true
+
+// crosvm default parameters
+#define CF_DEFAULTS_CROSVM_BINARY HostBinaryPath("crosvm")
+#define CF_DEFAULTS_SECCOMP_POLICY_DIR cuttlefish::GetSeccompPolicyDir()
+#define CF_DEFAULTS_ENABLE_SANDBOX false
+
+// Qemu default parameters
+#define CF_DEFAULTS_QEMU_BINARY_DIR "/usr/bin"
+
+// Gem5 default parameters
+#define CF_DEFAULTS_GEM5_BINARY_DIR HostBinaryPath("gem5")
+#define CF_DEFAULTS_GEM5_CHECKPOINT_DIR CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_GEM5_DEBUG_FILE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_GEM5_DEBUG_FLAGS CF_DEFAULTS_DYNAMIC_STRING
+
+// Boot default parameters
+#define CF_DEFAULTS_BOOT_SLOT CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_BOOTLOADER CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_ENABLE_BOOTANIMATION true
+#define CF_DEFAULTS_EXTRA_BOOTCONFIG_ARGS CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_PAUSE_IN_BOOTLOADER false
+#define CF_DEFAULTS_REBOOT_NOTIFICATION_FD (-1)
+
+// Security default parameters
+#define CF_DEFAULTS_GUEST_ENFORCE_SECURITY true
+#define CF_DEFAULTS_USE_RANDOM_SERIAL false
+#define CF_DEFAULTS_SERIAL_NUMBER \
+  cuttlefish::ForCurrentInstance("CUTTLEFISHCVD")
+#define CF_DEFAULTS_SECURE_HALS "keymint,gatekeeper"
+#define CF_DEFAULTS_PROTECTED_VM false
+#define CF_DEFAULTS_MTE false
+
+// Kernel default parameters
+#define CF_DEFAULTS_ENABLE_KERNEL_LOG true
+#define CF_DEFAULTS_KGDB false
+#define CF_DEFAULTS_GDB_PORT CF_DEFAULTS_DYNAMIC_INT
+#define CF_DEFAULTS_CONSOLE false
+#define CF_DEFAULTS_EXTRA_KERNEL_CMDLINE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_INITRAMFS_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_KERNEL_PATH CF_DEFAULTS_DYNAMIC_STRING
+
+// Disk default parameters
+#define CF_DEFAULTS_BLANK_METADATA_IMAGE_MB "64"
+#define CF_DEFAULTS_BLANK_SDCARD_IMAGE_MB "2048"
+#define CF_DEFAULTS_BOOT_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_DATA_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_INIT_BOOT_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_METADATA_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_MISC_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_LINUX_INITRAMFS_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_LINUX_KERNEL_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_LINUX_ROOT_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_FUCHSIA_ZEDBOOT_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_FUCHSIA_MULTIBOOT_BIN_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_FUCHSIA_ROOT_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_CUSTOM_PARTITION_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_SUPER_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VBMETA_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VBMETA_SYSTEM_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VBMETA_VENDOR_DLKM_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VENDOR_BOOT_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+
+// Policy default parameters
+#define CF_DEFAULTS_DATA_POLICY "use_existing"
+#define CF_DEFAULTS_USERDATA_FORMAT "f2fs"
+#define CF_DEFAULTS_BLANK_DATA_IMAGE_MB CF_DEFAULTS_DYNAMIC_INT
+
+// Graphics default parameters
+#define CF_DEFAULTS_HWCOMPOSER cuttlefish::kHwComposerAuto
+#define CF_DEFAULTS_GPU_MODE cuttlefish::kGpuModeAuto
+#define CF_DEFAULTS_RECORD_SCREEN false
+#define CF_DEFAULTS_GPU_CAPTURE_BINARY CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_ENABLE_GPU_UDMABUF false
+#define CF_DEFAULTS_DISPLAY0 CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_DISPLAY1 CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_DISPLAY2 CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_DISPLAY3 CF_DEFAULTS_DYNAMIC_STRING
+
+// Camera default parameters
+#define CF_DEFAULTS_CAMERA_SERVER_PORT CF_DEFAULTS_DYNAMIC_INT
+
+// Connectivity default parameters
+#define CF_DEFAULTS_RIL_DNS "8.8.8.8"
+#define CF_DEFAULTS_NETSIM false
+#define CF_DEFAULTS_NETSIM_BT false
+
+// Wifi default parameters
+#define CF_DEFAULTS_AP_KERNEL_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_AP_ROOTFS_IMAGE CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_VHOST_NET false
+#define CF_DEFAULTS_VHOST_USER_MAC80211_HWSIM CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_WMEDIUMD_CONFIG CF_DEFAULTS_DYNAMIC_STRING
+
+// UWB default parameters
+#define CF_DEFAULTS_ENABLE_HOST_UWB false
+#define CF_DEFAULTS_ENABLE_PICA_INSTANCE_NUM 0
+
+// Bluetooth default parameters
+#define CF_DEFAULTS_BLUETOOTH_CONTROLLER_PROPERTIES_FILE \
+  "etc/rootcanal/data/controller_properties.json"
+#define CF_DEFAULTS_BLUETOOTH_DEFAULT_COMMANDS_FILE \
+  "etc/rootcanal/data/default_commands"
+#define CF_DEFAULTS_ENABLE_HOST_BLUETOOTH true
+#define CF_DEFAULTS_ENABLE_ROOTCANAL_INSTANCE_NUM 0
+#define CF_DEFAULTS_ROOTCANAL_ARGS CF_DEFAULTS_DYNAMIC_STRING
+
+// Modem Simulator default parameters
+#define CF_DEFAULTS_ENABLE_MODEM_SIMULATOR true
+#define CF_DEFAULTS_MODEM_SIMULATOR_SIM_TYPE 1
+#define CF_DEFAULTS_MODEM_SIMULATOR_COUNT 1
+
+// Audio default parameters
+#define CF_DEFAULTS_ENABLE_AUDIO true
+
+// Streaming default parameters
+#define CF_DEFAULTS_START_WEBRTC false
+#define CF_DEFAULTS_START_WEBRTC_SIG_SERVER true
+#define CF_DEFAULTS_WEBRTC_DEVICE_ID "cvd-{num}"
+#define CF_DEFAULTS_VERIFY_SIG_SERVER_CERTIFICATE false
+#define CF_DEFAULTS_WEBRTC_ASSETS_DIR \
+  DefaultHostArtifactsPath("usr/share/webrtc/assets")
+#define CF_DEFAULTS_WEBRTC_CERTS_DIR \
+  DefaultHostArtifactsPath("usr/share/webrtc/certs")
+#define CF_DEFAULTS_WEBRTC_SIG_SERVER_ADDR CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_WEBRTC_SIG_SERVER_PATH "/register_device"
+#define CF_DEFAULTS_WEBRTC_SIG_SERVER_PORT 443
+#define CF_DEFAULTS_WEBRTC_SIG_SERVER_SECURE true
+#define CF_DEFAULTS_TCP_PORT_RANGE "15550:15599"
+#define CF_DEFAULTS_UDP_PORT_RANGE "15550:15599"
+
+// Adb default parameters
+// TODO : Replaceconstants with these flags, they're currently defined throug
+// GflagsCompatFlag
+#define CF_DEFAULTS_RUN_ADB_CONNECTOR true
+#define CF_DEFAULTS_ADB_MODE "vsock_half_tunnel"
+
+// Location default parameters
+#define CF_DEFAULTS_START_GNSS_PROXY true
+#define CF_DEFAULTS_FIXED_LOCATION_FILE_PATH CF_DEFAULTS_DYNAMIC_STRING
+#define CF_DEFAULTS_GNSS_FILE_PATH CF_DEFAULTS_DYNAMIC_STRING
+
+// Metrics default parameters
+// TODO: Defined twice , please remove redundant definitions
+#define CF_DEFAULTS_REPORT_ANONYMOUS_USAGE_STATS CF_DEFAULTS_DYNAMIC_STRING
diff --git a/host/commands/assemble_cvd/proto/Android.bp b/host/commands/assemble_cvd/proto/Android.bp
new file mode 100644
index 0000000..ec0d810
--- /dev/null
+++ b/host/commands/assemble_cvd/proto/Android.bp
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_launch_cvd_proto",
+    host_supported: true,
+    proto: {
+        export_proto_headers: true,
+        type: "full",
+    },
+    srcs: ["launch_cvd.proto"],
+    defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
+}
diff --git a/host/commands/assemble_cvd/proto/launch_cvd.proto b/host/commands/assemble_cvd/proto/launch_cvd.proto
new file mode 100644
index 0000000..87855e8
--- /dev/null
+++ b/host/commands/assemble_cvd/proto/launch_cvd.proto
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+syntax = "proto3";
+
+package cuttlefish;
+
+message InstanceDisplay {
+  int32 width = 1;
+  int32 height = 2;
+  int32 dpi = 3;
+  int32 refresh_rate_hertz = 4;
+}
+message InstanceDisplays {
+  repeated InstanceDisplay displays = 1;
+}
+message InstancesDisplays {
+  repeated InstanceDisplays instances = 1;
+}
\ No newline at end of file
diff --git a/host/commands/assemble_cvd/ramdisk_modules.h b/host/commands/assemble_cvd/ramdisk_modules.h
new file mode 100644
index 0000000..7f23e4a
--- /dev/null
+++ b/host/commands/assemble_cvd/ramdisk_modules.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <set>
+
+static constexpr auto RAMDISK_MODULES = {
+    "failover.ko",   "nd_virtio.ko",      "net_failover.ko",
+    "virtio_blk.ko", "virtio_console.ko", "virtio_dma_buf.ko",
+    "virtio-gpu.ko", "virtio_input.ko",   "virtio_net.ko",
+    "virtio_pci.ko", "virtio-rng.ko",     "vmw_vsock_virtio_transport.ko",
+};
\ No newline at end of file
diff --git a/host/commands/assemble_cvd/super_image_mixer.cc b/host/commands/assemble_cvd/super_image_mixer.cc
index f048a81..a22e2e1 100644
--- a/host/commands/assemble_cvd/super_image_mixer.cc
+++ b/host/commands/assemble_cvd/super_image_mixer.cc
@@ -35,6 +35,20 @@
 #include "host/libs/config/fetcher_config.h"
 
 namespace cuttlefish {
+
+bool SuperImageNeedsRebuilding(const FetcherConfig& fetcher_config) {
+  bool has_default_build = false;
+  bool has_system_build = false;
+  for (const auto& file_iter : fetcher_config.get_cvd_files()) {
+    if (file_iter.second.source == FileSource::DEFAULT_BUILD) {
+      has_default_build = true;
+    } else if (file_iter.second.source == FileSource::SYSTEM_BUILD) {
+      has_system_build = true;
+    }
+  }
+  return has_default_build && has_system_build;
+}
+
 namespace {
 
 std::string TargetFilesZip(const FetcherConfig& fetcher_config,
@@ -55,9 +69,11 @@
 
 const std::string kMiscInfoPath = "META/misc_info.txt";
 const std::set<std::string> kDefaultTargetImages = {
-    "IMAGES/boot.img",        "IMAGES/init_boot.img", "IMAGES/odm.img",
-    "IMAGES/odm_dlkm.img",    "IMAGES/recovery.img",  "IMAGES/userdata.img",
-    "IMAGES/vbmeta.img",      "IMAGES/vendor.img",    "IMAGES/vendor_dlkm.img",
+    "IMAGES/boot.img",        "IMAGES/init_boot.img",
+    "IMAGES/odm.img",         "IMAGES/odm_dlkm.img",
+    "IMAGES/recovery.img",    "IMAGES/userdata.img",
+    "IMAGES/vbmeta.img",      "IMAGES/vendor.img",
+    "IMAGES/vendor_dlkm.img", "IMAGES/vbmeta_vendor_dlkm.img",
     "IMAGES/system_dlkm.img",
 };
 const std::set<std::string> kDefaultTargetBuildProp = {
@@ -78,54 +94,48 @@
   }
 }
 
-bool CombineTargetZipFiles(const std::string& default_target_zip,
-                           const std::string& system_target_zip,
-                           const std::string& output_path) {
+Result<void> CombineTargetZipFiles(const std::string& default_target_zip,
+                                   const std::string& system_target_zip,
+                                   const std::string& output_path) {
   Archive default_target_archive(default_target_zip);
-  Archive system_target_archive(system_target_zip);
-
   auto default_target_contents = default_target_archive.Contents();
-  if (default_target_contents.size() == 0) {
-    LOG(ERROR) << "Could not open " << default_target_zip;
-    return false;
-  }
-  auto system_target_contents = system_target_archive.Contents();
-  if (system_target_contents.size() == 0) {
-    LOG(ERROR) << "Could not open " << system_target_zip;
-    return false;
-  }
-  if (mkdir(output_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) < 0) {
-    LOG(ERROR) << "Could not create directory " << output_path;
-    return false;
-  }
-  std::string output_meta = output_path + "/META";
-  if (mkdir(output_meta.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) < 0) {
-    LOG(ERROR) << "Could not create directory " << output_meta;
-    return false;
-  }
+  CF_EXPECT(default_target_contents.size() != 0,
+            "Could not open " << default_target_zip);
 
-  if (std::find(default_target_contents.begin(), default_target_contents.end(), kMiscInfoPath)
-      == default_target_contents.end()) {
-    LOG(ERROR) << "Default target files zip does not have " << kMiscInfoPath;
-    return false;
-  }
-  if (std::find(system_target_contents.begin(), system_target_contents.end(), kMiscInfoPath)
-      == system_target_contents.end()) {
-    LOG(ERROR) << "System target files zip does not have " << kMiscInfoPath;
-    return false;
-  }
+  Archive system_target_archive(system_target_zip);
+  auto system_target_contents = system_target_archive.Contents();
+  CF_EXPECT(system_target_contents.size() != 0,
+            "Could not open " << system_target_zip);
+
+  CF_EXPECT(
+      mkdir(output_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) >= 0,
+      "Could not create directory " << output_path);
+
+  std::string output_meta = output_path + "/META";
+  CF_EXPECT(
+      mkdir(output_meta.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) >= 0,
+      "Could not create directory " << output_meta);
+
+  CF_EXPECT(
+      std::find(default_target_contents.begin(), default_target_contents.end(),
+                kMiscInfoPath) != default_target_contents.end(),
+      "Default target files zip does not have " << kMiscInfoPath);
+
+  CF_EXPECT(
+      std::find(system_target_contents.begin(), system_target_contents.end(),
+                kMiscInfoPath) != system_target_contents.end(),
+      "System target files zip does not have " << kMiscInfoPath);
+
   const auto default_misc =
       ParseMiscInfo(default_target_archive.ExtractToMemory(kMiscInfoPath));
-  if (default_misc.size() == 0) {
-    LOG(ERROR) << "Could not read the default misc_info.txt file.";
-    return false;
-  }
+  CF_EXPECT(default_misc.size() != 0,
+            "Could not read the default misc_info.txt file.");
+
   const auto system_misc =
       ParseMiscInfo(system_target_archive.ExtractToMemory(kMiscInfoPath));
-  if (system_misc.size() == 0) {
-    LOG(ERROR) << "Could not read the system misc_info.txt file.";
-    return false;
-  }
+  CF_EXPECT(system_misc.size() != 0,
+            "Could not read the system misc_info.txt file.");
+
   auto output_misc = default_misc;
   auto system_super_partitions = SuperPartitionComponents(system_misc);
   // Ensure specific skipped partitions end up in the misc_info.txt
@@ -136,24 +146,18 @@
       system_super_partitions.push_back(partition);
     }
   }
-  if (!SetSuperPartitionComponents(system_super_partitions, &output_misc)) {
-    LOG(ERROR) << "Failed to update super partitions components for misc_info";
-    return false;
-  }
+  CF_EXPECT(SetSuperPartitionComponents(system_super_partitions, &output_misc),
+            "Failed to update super partitions components for misc_info");
 
   auto misc_output_path = output_path + "/" + kMiscInfoPath;
   SharedFD misc_output_file =
       SharedFD::Creat(misc_output_path.c_str(), 0644);
-  if (!misc_output_file->IsOpen()) {
-    LOG(ERROR) << "Failed to open output misc file: "
-               << misc_output_file->StrError();
-    return false;
-  }
-  if (WriteAll(misc_output_file, WriteMiscInfo(output_misc)) < 0) {
-    LOG(ERROR) << "Failed to write output misc file contents: "
-               << misc_output_file->StrError();
-    return false;
-  }
+  CF_EXPECT(misc_output_file->IsOpen(), "Failed to open output misc file: "
+                                            << misc_output_file->StrError());
+
+  CF_EXPECT(WriteAll(misc_output_file, WriteMiscInfo(output_misc)) >= 0,
+            "Failed to write output misc file contents: "
+                << misc_output_file->StrError());
 
   for (const auto& name : default_target_contents) {
     if (!android::base::StartsWith(name, "IMAGES/")) {
@@ -164,10 +168,8 @@
       continue;
     }
     LOG(INFO) << "Writing " << name;
-    if (!default_target_archive.ExtractFiles({name}, output_path)) {
-      LOG(ERROR) << "Failed to extract " << name << " from the default target zip";
-      return false;
-    }
+    CF_EXPECT(default_target_archive.ExtractFiles({name}, output_path),
+              "Failed to extract " << name << " from the default target zip");
   }
   for (const auto& name : default_target_contents) {
     if (!android::base::EndsWith(name, "build.prop")) {
@@ -177,10 +179,8 @@
     }
     FindImports(&default_target_archive, name);
     LOG(INFO) << "Writing " << name;
-    if (!default_target_archive.ExtractFiles({name}, output_path)) {
-      LOG(ERROR) << "Failed to extract " << name << " from the default target zip";
-      return false;
-    }
+    CF_EXPECT(default_target_archive.ExtractFiles({name}, output_path),
+              "Failed to extract " << name << " from the default target zip");
   }
 
   for (const auto& name : system_target_contents) {
@@ -192,10 +192,8 @@
       continue;
     }
     LOG(INFO) << "Writing " << name;
-    if (!system_target_archive.ExtractFiles({name}, output_path)) {
-      LOG(ERROR) << "Failed to extract " << name << " from the system target zip";
-      return false;
-    }
+    CF_EXPECT(system_target_archive.ExtractFiles({name}, output_path),
+              "Failed to extract " << name << " from the system target zip");
   }
   for (const auto& name : system_target_contents) {
     if (!android::base::EndsWith(name, "build.prop")) {
@@ -205,13 +203,11 @@
     }
     FindImports(&system_target_archive, name);
     LOG(INFO) << "Writing " << name;
-    if (!system_target_archive.ExtractFiles({name}, output_path)) {
-      LOG(ERROR) << "Failed to extract " << name << " from the default target zip";
-      return false;
-    }
+    CF_EXPECT(system_target_archive.ExtractFiles({name}, output_path),
+              "Failed to extract " << name << " from the default target zip");
   }
 
-  return true;
+  return {};
 }
 
 bool BuildSuperImage(const std::string& combined_target_zip,
@@ -238,92 +234,63 @@
   }) == 0;
 }
 
-bool SuperImageNeedsRebuilding(const FetcherConfig& fetcher_config) {
-  bool has_default_build = false;
-  bool has_system_build = false;
-  for (const auto& file_iter : fetcher_config.get_cvd_files()) {
-    if (file_iter.second.source == FileSource::DEFAULT_BUILD) {
-      has_default_build = true;
-    } else if (file_iter.second.source == FileSource::SYSTEM_BUILD) {
-      has_system_build = true;
-    }
-  }
-  return has_default_build && has_system_build;
-}
-
-bool RebuildSuperImage(const FetcherConfig& fetcher_config,
-                       const CuttlefishConfig& config,
-                       const std::string& output_path) {
+Result<void> RebuildSuperImage(const FetcherConfig& fetcher_config,
+                               const CuttlefishConfig& config,
+                               const std::string& output_path) {
   std::string default_target_zip =
       TargetFilesZip(fetcher_config, FileSource::DEFAULT_BUILD);
-  if (default_target_zip == "") {
-    LOG(ERROR) << "Unable to find default target zip file.";
-    return false;
-  }
+  CF_EXPECT(default_target_zip != "",
+            "Unable to find default target zip file.");
+
   std::string system_target_zip =
       TargetFilesZip(fetcher_config, FileSource::SYSTEM_BUILD);
-  if (system_target_zip == "") {
-    LOG(ERROR) << "Unable to find system target zip file.";
-    return false;
-  }
+  CF_EXPECT(system_target_zip != "", "Unable to find system target zip file.");
+
   auto instance = config.ForDefaultInstance();
   // TODO(schuffelen): Use cuttlefish_assembly
   std::string combined_target_path = instance.PerInstanceInternalPath("target_combined");
   // TODO(schuffelen): Use otatools/bin/merge_target_files
-  if (!CombineTargetZipFiles(default_target_zip, system_target_zip,
-                             combined_target_path)) {
-    LOG(ERROR) << "Could not combine target zip files.";
-    return false;
-  }
-  bool success = BuildSuperImage(combined_target_path, output_path);
-  if (!success) {
-    LOG(ERROR) << "Could not write the final output super image.";
-  }
-  return success;
-}
+  CF_EXPECT(CombineTargetZipFiles(default_target_zip, system_target_zip,
+                                  combined_target_path),
+            "Could not combine target zip files.");
 
-class SuperImageOutputPathTag {};
+  CF_EXPECT(BuildSuperImage(combined_target_path, output_path),
+            "Could not write the final output super image.");
+  return {};
+}
 
 class SuperImageRebuilderImpl : public SuperImageRebuilder {
  public:
-  INJECT(SuperImageRebuilderImpl(const FetcherConfig& fetcher_config,
-                                 const CuttlefishConfig& config,
-                                 ANNOTATED(SuperImageOutputPathTag, std::string)
-                                     output_path))
-      : fetcher_config_(fetcher_config),
-        config_(config),
-        output_path_(output_path) {}
+  INJECT(SuperImageRebuilderImpl(
+      const FetcherConfig& fetcher_config, const CuttlefishConfig& config,
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : fetcher_config_(fetcher_config), config_(config), instance_(instance) {}
 
   std::string Name() const override { return "SuperImageRebuilderImpl"; }
   bool Enabled() const override { return true; }
 
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override {
+  Result<void> ResultSetup() override {
     if (SuperImageNeedsRebuilding(fetcher_config_)) {
-      bool success = RebuildSuperImage(fetcher_config_, config_, output_path_);
-      if (!success) {
-        LOG(ERROR)
-            << "Super image rebuilding requested but could not be completed.";
-        return false;
-      }
+      CF_EXPECT(RebuildSuperImage(fetcher_config_, config_,
+                                  instance_.new_super_image()));
     }
-    return true;
+    return {};
   }
 
   const FetcherConfig& fetcher_config_;
   const CuttlefishConfig& config_;
-  std::string output_path_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
 }  // namespace
 
-fruit::Component<fruit::Required<const FetcherConfig, const CuttlefishConfig>,
+fruit::Component<fruit::Required<const FetcherConfig, const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
                  SuperImageRebuilder>
-SuperImageRebuilderComponent(const std::string* output_path) {
+SuperImageRebuilderComponent() {
   return fruit::createComponent()
-      .bindInstance<fruit::Annotated<SuperImageOutputPathTag, std::string>>(
-          *output_path)
       .bind<SuperImageRebuilder, SuperImageRebuilderImpl>()
       .addMultibinding<SetupFeature, SuperImageRebuilder>();
 }
diff --git a/host/commands/assemble_cvd/super_image_mixer.h b/host/commands/assemble_cvd/super_image_mixer.h
index fda7f4d..0455fa2 100644
--- a/host/commands/assemble_cvd/super_image_mixer.h
+++ b/host/commands/assemble_cvd/super_image_mixer.h
@@ -12,6 +12,7 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
+#pragma once
 
 #include <fruit/fruit.h>
 
@@ -23,8 +24,10 @@
 
 class SuperImageRebuilder : public SetupFeature {};
 
-fruit::Component<fruit::Required<const FetcherConfig, const CuttlefishConfig>,
+fruit::Component<fruit::Required<const FetcherConfig, const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
                  SuperImageRebuilder>
-SuperImageRebuilderComponent(const std::string* output_path);
+SuperImageRebuilderComponent();
+bool SuperImageNeedsRebuilding(const FetcherConfig& fetcher_config);
 
 } // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/vendor_dlkm_utils.cc b/host/commands/assemble_cvd/vendor_dlkm_utils.cc
new file mode 100644
index 0000000..54c66d5
--- /dev/null
+++ b/host/commands/assemble_cvd/vendor_dlkm_utils.cc
@@ -0,0 +1,428 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/file.h>
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+#include <fcntl.h>
+
+#include <fcntl.h>
+#include <map>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/assemble_cvd/boot_image_utils.h"
+#include "host/commands/assemble_cvd/ramdisk_modules.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+namespace {
+
+constexpr size_t RoundDown(size_t a, size_t divisor) {
+  return a / divisor * divisor;
+}
+
+constexpr size_t RoundUp(size_t a, size_t divisor) {
+  return RoundDown(a + divisor, divisor);
+}
+
+template <typename Container>
+bool WriteLinesToFile(const Container& lines, const char* path) {
+  android::base::unique_fd fd(
+      open(path, O_WRONLY | O_CREAT | O_TRUNC | O_CLOEXEC, 0640));
+  if (!fd.ok()) {
+    PLOG(ERROR) << "Failed to open " << path;
+    return false;
+  }
+  for (const auto& line : lines) {
+    if (!android::base::WriteFully(fd, line.data(), line.size())) {
+      PLOG(ERROR) << "Failed to write to " << path;
+      return false;
+    }
+    const char c = '\n';
+    if (write(fd.get(), &c, 1) != 1) {
+      PLOG(ERROR) << "Failed to write to " << path;
+      return false;
+    }
+  }
+  return true;
+}
+
+
+// Generate a filesystem_config.txt for all files in |fs_root|
+bool WriteFsConfig(const char* output_path, const std::string& fs_root,
+                   const std::string& mount_point) {
+  android::base::unique_fd fd(
+      open(output_path, O_WRONLY | O_CREAT | O_TRUNC | O_CLOEXEC, 0644));
+  if (!fd.ok()) {
+    PLOG(ERROR) << "Failed to open " << output_path;
+    return false;
+  }
+  if (!android::base::WriteStringToFd(
+          " 0 0 755 selabel=u:object_r:rootfs:s0 capabilities=0x0\n", fd)) {
+    PLOG(ERROR) << "Failed to write to " << output_path;
+    return false;
+  }
+  WalkDirectory(fs_root, [&fd, &output_path, &mount_point,
+                          &fs_root](const std::string& file_path) {
+    const auto filename = file_path.substr(
+        fs_root.back() == '/' ? fs_root.size() : fs_root.size() + 1);
+    std::string fs_context = " 0 0 644 capabilities=0x0\n";
+    if (DirectoryExists(file_path)) {
+      fs_context = " 0 0 755 capabilities=0x0\n";
+    }
+    if (!android::base::WriteStringToFd(
+            mount_point + "/" + filename + fs_context, fd)) {
+      PLOG(ERROR) << "Failed to write to " << output_path;
+      return false;
+    }
+    return true;
+  });
+  return true;
+}
+
+std::vector<std::string> GetRamdiskModules(
+    const std::vector<std::string>& all_modules) {
+  static const auto ramdisk_modules_allow_list =
+      std::set<std::string>(RAMDISK_MODULES.begin(), RAMDISK_MODULES.end());
+  std::vector<std::string> ramdisk_modules;
+  for (const auto& mod_path : all_modules) {
+    if (mod_path.empty()) {
+      continue;
+    }
+    const auto mod_name = cpp_basename(mod_path);
+    if (ramdisk_modules_allow_list.count(mod_name) != 0) {
+      ramdisk_modules.emplace_back(mod_path);
+    }
+  }
+  return ramdisk_modules;
+}
+
+// Filter the dependency map |deps| to only contain nodes in |allow_list|
+std::map<std::string, std::vector<std::string>> FilterDependencies(
+    const std::map<std::string, std::vector<std::string>>& deps,
+    const std::set<std::string>& allow_list) {
+  std::map<std::string, std::vector<std::string>> new_deps;
+  for (const auto& mod_name : allow_list) {
+    new_deps[mod_name].clear();
+  }
+  for (const auto& [mod_name, children] : deps) {
+    if (!allow_list.count(mod_name)) {
+      continue;
+    }
+    for (const auto& child : children) {
+      if (!allow_list.count(child)) {
+        continue;
+      }
+      new_deps[mod_name].emplace_back(child);
+    }
+  }
+  return new_deps;
+}
+
+// Write dependency map to modules.dep file
+bool WriteDepsToFile(
+    const std::map<std::string, std::vector<std::string>>& deps,
+    const std::string& output_path) {
+  std::stringstream ss;
+  for (const auto& [key, val] : deps) {
+    ss << key << ":";
+    for (const auto& dep : val) {
+      ss << " " << dep;
+    }
+    ss << "\n";
+  }
+  if (!android::base::WriteStringToFile(ss.str(), output_path)) {
+    PLOG(ERROR) << "Failed to write modules.dep to " << output_path;
+    return false;
+  }
+  return true;
+}
+
+// Parse modules.dep into an in-memory data structure, key is path to a kernel
+// module, value is all dependency modules
+std::map<std::string, std::vector<std::string>> LoadModuleDeps(
+    const std::string& filename) {
+  std::map<std::string, std::vector<std::string>> dependency_map;
+  const auto dep_str = android::base::Trim(ReadFile(filename));
+  const auto dep_lines = android::base::Split(dep_str, "\n");
+  for (const auto& line : dep_lines) {
+    const auto mod_name = line.substr(0, line.find(":"));
+    const auto deps =
+        android::base::Tokenize(line.substr(mod_name.size() + 1), " ");
+    if (!deps.empty()) {
+      dependency_map[mod_name] = deps;
+    }
+  }
+
+  return dependency_map;
+}
+
+// Recursively compute all modules which |start_nodes| depend on
+std::set<std::string> ComputeTransitiveClosure(
+    const std::vector<std::string>& start_nodes,
+    const std::map<std::string, std::vector<std::string>>& dependencies) {
+  std::deque<std::string> queue(start_nodes.begin(), start_nodes.end());
+  std::set<std::string> visited;
+  while (!queue.empty()) {
+    const auto cur = queue.front();
+    queue.pop_front();
+    if (visited.find(cur) != visited.end()) {
+      continue;
+    }
+    visited.insert(cur);
+    const auto it = dependencies.find(cur);
+    if (it == dependencies.end()) {
+      continue;
+    }
+    for (const auto& dep : it->second) {
+      queue.emplace_back(dep);
+    }
+  }
+  return visited;
+}
+
+bool GenerateFileContexts(const char* output_path,
+                          const std::string& mount_point) {
+  const auto file_contexts_txt = std::string(output_path) + ".txt";
+  android::base::unique_fd fd(open(file_contexts_txt.c_str(),
+                                   O_WRONLY | O_CREAT | O_TRUNC | O_CLOEXEC,
+                                   0644));
+  if (!fd.ok()) {
+    PLOG(ERROR) << "Failed to open " << output_path;
+    return false;
+  }
+  if (!android::base::WriteStringToFd(mount_point +
+                                          "(/.*)?       "
+                                          "  u:object_r:vendor_file:s0\n",
+                                      fd)) {
+    return false;
+  }
+  if (!android::base::WriteStringToFd(
+          mount_point + "/etc(/.*)?       "
+                        "  u:object_r:vendor_configs_file:s0\n",
+          fd)) {
+    return false;
+  }
+  Command cmd(HostBinaryPath("sefcontext_compile"));
+  cmd.AddParameter("-o");
+  cmd.AddParameter(output_path);
+  cmd.AddParameter(file_contexts_txt);
+  const auto exit_code = cmd.Start().Wait();
+  return exit_code == 0;
+}
+
+bool AddVbmetaFooter(const std::string& output_image,
+                     const std::string& partition_name) {
+  auto avbtool_path = HostBinaryPath("avbtool");
+  Command avb_cmd(avbtool_path);
+  // Add host binary path to PATH, so that avbtool can locate host util
+  // binaries such as 'fec'
+  auto PATH =
+      StringFromEnv("PATH", "") + ":" + cpp_dirname(avb_cmd.Executable());
+  // Must unset an existing environment variable in order to modify it
+  avb_cmd.UnsetFromEnvironment("PATH");
+  avb_cmd.AddEnvironmentVariable("PATH", PATH);
+
+  avb_cmd.AddParameter("add_hashtree_footer");
+  // Arbitrary salt to keep output consistent
+  avb_cmd.AddParameter("--salt");
+  avb_cmd.AddParameter("62BBAAA0", "E4BD99E783AC");
+  avb_cmd.AddParameter("--image");
+  avb_cmd.AddParameter(output_image);
+  avb_cmd.AddParameter("--partition_name");
+  avb_cmd.AddParameter(partition_name);
+
+  auto exit_code = avb_cmd.Start().Wait();
+  if (exit_code != 0) {
+    LOG(ERROR) << "Failed to add avb footer to image " << output_image;
+    return false;
+  }
+
+  return true;
+}
+
+}  // namespace
+
+// Steps for building a vendor_dlkm.img:
+// 1. Generate filesystem_config.txt , which contains standard linux file
+// permissions, we use 0755 for directories, and 0644 for all files
+// 2. Write file_contexts, which contains all selinux labels
+// 3. Call  sefcontext_compile to compile file_contexts
+// 4. call mkuserimg_mke2fs to build an image, using filesystem_config and
+// file_contexts previously generated
+// 5. call avbtool to add hashtree footer, so that init/bootloader can verify
+// AVB chain
+bool BuildVendorDLKM(const std::string& src_dir, const bool is_erofs,
+                     const std::string& output_image) {
+  if (is_erofs) {
+    LOG(ERROR)
+        << "Building vendor_dlkm in EROFS format is currently not supported!";
+    return false;
+  }
+  const auto fs_config = output_image + ".fs_config";
+  if (!WriteFsConfig(fs_config.c_str(), src_dir, "/vendor_dlkm")) {
+    return false;
+  }
+  const auto file_contexts_bin = output_image + ".file_contexts";
+  if (!GenerateFileContexts(file_contexts_bin.c_str(), "/vendor_dlkm")) {
+    return false;
+  }
+
+  // We are using directory size as an estimate of final image size. To avoid
+  // any rounding errors, add 16M of head room.
+  const auto fs_size = RoundUp(GetDiskUsage(src_dir) + 16 * 1024 * 1024, 4096);
+  LOG(INFO) << "vendor_dlkm src dir " << src_dir << " has size "
+            << fs_size / 1024 << " KB";
+  const auto mkfs = HostBinaryPath("mkuserimg_mke2fs");
+  Command mkfs_cmd(mkfs);
+  // Arbitrary UUID/seed, just to keep output consistent between runs
+  mkfs_cmd.AddParameter("--mke2fs_uuid");
+  mkfs_cmd.AddParameter("cb09b942-ed4e-46a1-81dd-7d535bf6c4b1");
+  mkfs_cmd.AddParameter("--mke2fs_hash_seed");
+  mkfs_cmd.AddParameter("765d8aba-d93f-465a-9fcf-14bb794eb7f4");
+  // Arbitrary date, just to keep output consistent
+  mkfs_cmd.AddParameter("-T");
+  mkfs_cmd.AddParameter("900979200000");
+
+  // selinux permission to keep selinux happy
+  mkfs_cmd.AddParameter("--fs_config");
+  mkfs_cmd.AddParameter(fs_config);
+
+  mkfs_cmd.AddParameter(src_dir);
+  mkfs_cmd.AddParameter(output_image);
+  mkfs_cmd.AddParameter("ext4");
+  mkfs_cmd.AddParameter("/vendor_dlkm");
+  mkfs_cmd.AddParameter(std::to_string(fs_size));
+  mkfs_cmd.AddParameter(file_contexts_bin);
+
+  int exit_code = mkfs_cmd.Start().Wait();
+  if (exit_code != 0) {
+    LOG(ERROR) << "Failed to build vendor_dlkm ext4 image";
+    return false;
+  }
+  return AddVbmetaFooter(output_image, "vendor_dlkm");
+}
+
+bool RepackSuperWithVendorDLKM(const std::string& superimg_path,
+                               const std::string& vendor_dlkm_path) {
+  Command lpadd(HostBinaryPath("lpadd"));
+  lpadd.AddParameter("--replace");
+  lpadd.AddParameter(superimg_path);
+  lpadd.AddParameter("vendor_dlkm_a");
+  lpadd.AddParameter("google_vendor_dynamic_partitions_a");
+  lpadd.AddParameter(vendor_dlkm_path);
+  const auto exit_code = lpadd.Start().Wait();
+  return exit_code == 0;
+}
+
+bool RebuildVbmetaVendor(const std::string& vendor_dlkm_img,
+                         const std::string& vbmeta_path) {
+  auto avbtool_path = HostBinaryPath("avbtool");
+  Command vbmeta_cmd(avbtool_path);
+  vbmeta_cmd.AddParameter("make_vbmeta_image");
+  vbmeta_cmd.AddParameter("--output");
+  vbmeta_cmd.AddParameter(vbmeta_path);
+  vbmeta_cmd.AddParameter("--algorithm");
+  vbmeta_cmd.AddParameter("SHA256_RSA4096");
+  vbmeta_cmd.AddParameter("--key");
+  vbmeta_cmd.AddParameter(DefaultHostArtifactsPath("etc/cvd_avb_testkey.pem"));
+
+  vbmeta_cmd.AddParameter("--include_descriptors_from_image");
+  vbmeta_cmd.AddParameter(vendor_dlkm_img);
+  vbmeta_cmd.AddParameter("--padding_size");
+  vbmeta_cmd.AddParameter("4096");
+
+  bool success = vbmeta_cmd.Start().Wait();
+  if (success != 0) {
+    LOG(ERROR) << "Unable to create vbmeta. Exited with status " << success;
+    return false;
+  }
+
+  const auto vbmeta_size = FileSize(vbmeta_path);
+  if (vbmeta_size > VBMETA_MAX_SIZE) {
+    LOG(ERROR) << "Generated vbmeta - " << vbmeta_path
+               << " is larger than the expected " << VBMETA_MAX_SIZE
+               << ". Stopping.";
+    return false;
+  }
+  if (vbmeta_size != VBMETA_MAX_SIZE) {
+    auto fd = SharedFD::Open(vbmeta_path, O_RDWR | O_CLOEXEC);
+    if (!fd->IsOpen() || fd->Truncate(VBMETA_MAX_SIZE) != 0) {
+      LOG(ERROR) << "`truncate --size=" << VBMETA_MAX_SIZE << " " << vbmeta_path
+                 << "` failed: " << fd->StrError();
+      return false;
+    }
+  }
+  return true;
+}
+
+bool SplitRamdiskModules(const std::string& ramdisk_path,
+                         const std::string& ramdisk_stage_dir,
+                         const std::string& vendor_dlkm_build_dir) {
+  const auto target_modules_dir = vendor_dlkm_build_dir + "/lib/modules";
+  const auto ret = EnsureDirectoryExists(target_modules_dir);
+  CHECK(ret.ok()) << ret.error().Message();
+  UnpackRamdisk(ramdisk_path, ramdisk_stage_dir);
+  const auto module_load_file =
+      android::base::Trim(FindFile(ramdisk_stage_dir.c_str(), "modules.load"));
+  if (module_load_file.empty()) {
+    LOG(ERROR) << "Failed to find modules.dep file in input ramdisk "
+               << ramdisk_path;
+    return false;
+  }
+  LOG(INFO) << "modules.load location " << module_load_file;
+  const auto module_list =
+      android::base::Tokenize(ReadFile(module_load_file), "\n");
+  const auto module_base_dir = cpp_dirname(module_load_file);
+  const auto deps = LoadModuleDeps(module_base_dir + "/modules.dep");
+  const auto ramdisk_modules =
+      ComputeTransitiveClosure(GetRamdiskModules(module_list), deps);
+  std::set<std::string> vendor_dlkm_modules;
+
+  // Move non-ramdisk modules to vendor_dlkm
+  for (const auto& module_path : module_list) {
+    if (!ramdisk_modules.count(module_path)) {
+      const auto vendor_dlkm_module_location =
+          target_modules_dir + "/" + module_path;
+      EnsureDirectoryExists(cpp_dirname(vendor_dlkm_module_location));
+      RenameFile(module_base_dir + "/" + module_path,
+                 vendor_dlkm_module_location);
+      vendor_dlkm_modules.emplace(module_path);
+    }
+  }
+  LOG(INFO) << "There are " << ramdisk_modules.size() << " ramdisk modules and "
+            << vendor_dlkm_modules.size() << " vendor_dlkm modules";
+
+  // Write updated modules.dep and modules.load files
+  CHECK(WriteDepsToFile(FilterDependencies(deps, ramdisk_modules),
+                        module_base_dir + "/modules.dep"));
+  CHECK(WriteDepsToFile(FilterDependencies(deps, vendor_dlkm_modules),
+                        target_modules_dir + "/modules.dep"));
+  CHECK(WriteLinesToFile(ramdisk_modules, module_load_file.c_str()));
+  CHECK(WriteLinesToFile(vendor_dlkm_modules,
+                         (target_modules_dir + "/modules.load").c_str()));
+  PackRamdisk(ramdisk_stage_dir, ramdisk_path);
+  return true;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/assemble_cvd/vendor_dlkm_utils.h b/host/commands/assemble_cvd/vendor_dlkm_utils.h
new file mode 100644
index 0000000..e3cbbdd
--- /dev/null
+++ b/host/commands/assemble_cvd/vendor_dlkm_utils.h
@@ -0,0 +1,41 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string>
+
+namespace cuttlefish {
+
+bool SplitRamdiskModules(const std::string& ramdisk_path,
+                         const std::string& ramdisk_stage_dir,
+                         const std::string& vendor_dlkm_build_dir);
+
+bool WriteFsConfig(const char* output_path, const std::string& fs_root,
+                   const std::string& mount_point);
+
+bool GenerateFileContexts(const char* output_path,
+                          const std::string& mount_point);
+
+bool RepackSuperWithVendorDLKM(const std::string& superimg_path,
+                               const std::string& vendor_dlkm_path);
+
+bool BuildVendorDLKM(const std::string& src_dir, const bool is_erofs,
+                     const std::string& output_image);
+
+bool RebuildVbmetaVendor(const std::string& vendor_dlkm_img,
+                         const std::string& vbmeta_path);
+
+}  // namespace cuttlefish
diff --git a/host/commands/bt_connector/Android.bp b/host/commands/bt_connector/Android.bp
deleted file mode 100644
index 8703a76..0000000
--- a/host/commands/bt_connector/Android.bp
+++ /dev/null
@@ -1,39 +0,0 @@
-//
-// Copyright (C) 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-cc_binary {
-    name: "bt_connector",
-    srcs: [
-        "main.cpp",
-    ],
-    shared_libs: [
-        "libext2_blkid",
-        "libbase",
-        "libcuttlefish_fs",
-        "libjsoncpp",
-        "liblog",
-        "libcuttlefish_utils",
-    ],
-    static_libs: [
-        "libcuttlefish_host_config",
-        "libgflags",
-    ],
-    defaults: ["cuttlefish_buildhost_only"]
-}
diff --git a/host/commands/bt_connector/OWNERS b/host/commands/bt_connector/OWNERS
deleted file mode 100644
index e791d83..0000000
--- a/host/commands/bt_connector/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-include device/google/cuttlefish:/OWNERS
-include platform/system/bt:/OWNERS
-jeongik@google.com
\ No newline at end of file
diff --git a/host/commands/bt_connector/main.cpp b/host/commands/bt_connector/main.cpp
deleted file mode 100644
index a625768..0000000
--- a/host/commands/bt_connector/main.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <fcntl.h>
-#include <poll.h>
-#include <unistd.h>
-#include <ios>
-#include <mutex>
-
-#include <android-base/logging.h>
-#include <gflags/gflags.h>
-#include <thread>
-
-#include "common/libs/fs/shared_buf.h"
-#include "common/libs/fs/shared_fd.h"
-#include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/logging.h"
-
-// Copied from net/bluetooth/hci.h
-#define HCI_MAX_ACL_SIZE 1024
-#define HCI_MAX_FRAME_SIZE (HCI_MAX_ACL_SIZE + 4)
-
-// Include H4 header byte, and reserve more buffer size in the case of excess
-// packet.
-constexpr const size_t kBufferSize = (HCI_MAX_FRAME_SIZE + 1) * 2;
-
-DEFINE_int32(bt_in, -1, "A pipe for bt communication");
-DEFINE_int32(bt_out, -1, "A pipe for bt communication");
-DEFINE_int32(hci_port, -1, "A port for bt hci command");
-DEFINE_int32(link_port, -1, "A pipe for bt link layer command");
-DEFINE_int32(test_port, -1, "A pipe for rootcanal test channel");
-
-void openSocket(cuttlefish::SharedFD* fd, int port) {
-  static std::mutex mutex;
-  std::unique_lock<std::mutex> lock(mutex);
-  *fd = cuttlefish::SharedFD::SocketLocalClient(port, SOCK_STREAM);
-}
-
-int main(int argc, char** argv) {
-  cuttlefish::DefaultSubprocessLogging(argv);
-  gflags::ParseCommandLineFlags(&argc, &argv, true);
-  auto bt_in = cuttlefish::SharedFD::Dup(FLAGS_bt_in);
-  if (!bt_in->IsOpen()) {
-    LOG(ERROR) << "Error dupping fd " << FLAGS_bt_in << ": "
-               << bt_in->StrError();
-    return 1;
-  }
-  close(FLAGS_bt_in);
-
-  auto bt_out = cuttlefish::SharedFD::Dup(FLAGS_bt_out);
-  if (!bt_out->IsOpen()) {
-    LOG(ERROR) << "Error dupping fd " << FLAGS_bt_out << ": "
-               << bt_out->StrError();
-    return 1;
-  }
-  close(FLAGS_bt_out);
-  cuttlefish::SharedFD sock;
-  openSocket(&sock, FLAGS_hci_port);
-
-  auto guest_to_host = std::thread([&]() {
-    while (true) {
-      char buf[kBufferSize];
-      auto read = bt_in->Read(buf, sizeof(buf));
-      while (cuttlefish::WriteAll(sock, buf, read) == -1) {
-        LOG(ERROR) << "failed to write to socket, retry.";
-        // Wait for the host process to be ready
-        sleep(1);
-        openSocket(&sock, FLAGS_hci_port);
-      }
-    }
-  });
-
-  auto host_to_guest = std::thread([&]() {
-    while (true) {
-      char buf[kBufferSize];
-      auto read = sock->Read(buf, sizeof(buf));
-      if (read == -1) {
-        LOG(ERROR) << "failed to read from socket, retry.";
-        // Wait for the host process to be ready
-        sleep(1);
-        openSocket(&sock, FLAGS_hci_port);
-        continue;
-      }
-      cuttlefish::WriteAll(bt_out, buf, read);
-    }
-  });
-  guest_to_host.join();
-  host_to_guest.join();
-}
diff --git a/host/commands/console_forwarder/README.md b/host/commands/console_forwarder/README.md
new file mode 100644
index 0000000..45b662f
--- /dev/null
+++ b/host/commands/console_forwarder/README.md
@@ -0,0 +1,7 @@
+Console pass-through to the serial console on a device to access a root shell.
+
+If a user invokes `launch_cvd --console` or `cvd start --console`, this
+executable runs to forward data from a serial console to a virtual terminal
+that can be accessed with `screen` from the host.
+
+[![linkage](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/console_forwarder/doc/linkage.svg)
diff --git a/host/commands/console_forwarder/doc/linkage.dot b/host/commands/console_forwarder/doc/linkage.dot
new file mode 100644
index 0000000..f1be5b7
--- /dev/null
+++ b/host/commands/console_forwarder/doc/linkage.dot
@@ -0,0 +1,24 @@
+digraph {
+  console_forwarder [label = < <B>console_forwarder</B> >, penwidth = 2]
+  host_console_in [label = "internal/console.in", shape = "rectangle"]
+  host_console_out [label = "internal/console.out", shape = "rectangle"]
+  host_pty [label = "/dev/pty/###", shape = "rectangle"]
+  run_cvd
+  screen
+  user [label = "User CLI"]
+  vmm [label = "crosvm / qemu"]
+  subgraph cluster_android {
+    label = "Android VM"
+    vm_console [label = "/dev/ttyS0", shape = "rectangle"]
+    shell [label = "toybox / sh"]
+  }
+
+  run_cvd -> console_forwarder
+  user -> screen
+  screen -> host_pty -> console_forwarder [dir = "both"]
+
+  console_forwarder -> host_console_out -> vmm [dir = "back"]
+  console_forwarder -> host_console_in -> vmm
+
+  vmm -> vm_console -> shell [dir = "both"]
+}
diff --git a/host/commands/console_forwarder/doc/linkage.png b/host/commands/console_forwarder/doc/linkage.png
new file mode 100644
index 0000000..7303c2c
--- /dev/null
+++ b/host/commands/console_forwarder/doc/linkage.png
Binary files differ
diff --git a/host/commands/console_forwarder/doc/linkage.svg b/host/commands/console_forwarder/doc/linkage.svg
new file mode 100644
index 0000000..e912327
--- /dev/null
+++ b/host/commands/console_forwarder/doc/linkage.svg
@@ -0,0 +1,144 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="268pt" height="567pt"
+ viewBox="0.00 0.00 267.50 567.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 563)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-563 263.5,-563 263.5,4 -4,4"/>
+<g id="clust1" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="69.5,-8 69.5,-155 185.5,-155 185.5,-8 69.5,-8"/>
+<text text-anchor="middle" x="127.5" y="-139.8" font-family="Times,serif" font-size="14.00">Android VM</text>
+</g>
+<!-- console_forwarder -->
+<g id="node1" class="node">
+<title>console_forwarder</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="127.5" cy="-325" rx="86.38" ry="18"/>
+<text text-anchor="start" x="69" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="73" y="-322.3" font-family="Times,serif" font-weight="bold" font-size="14.00">console_forwarder</text>
+<text text-anchor="start" x="182" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- host_console_in -->
+<g id="node2" class="node">
+<title>host_console_in</title>
+<polygon fill="none" stroke="black" points="117,-271 0,-271 0,-235 117,-235 117,-271"/>
+<text text-anchor="middle" x="58.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/console.in</text>
+</g>
+<!-- console_forwarder&#45;&gt;host_console_in -->
+<g id="edge7" class="edge">
+<title>console_forwarder&#45;&gt;host_console_in</title>
+<path fill="none" stroke="black" d="M110.8,-307.05C102.36,-298.5 91.97,-287.96 82.68,-278.54"/>
+<polygon fill="black" stroke="black" points="85.04,-275.94 75.53,-271.28 80.06,-280.85 85.04,-275.94"/>
+</g>
+<!-- host_console_out -->
+<g id="node3" class="node">
+<title>host_console_out</title>
+<polygon fill="none" stroke="black" points="259.5,-271 135.5,-271 135.5,-235 259.5,-235 259.5,-271"/>
+<text text-anchor="middle" x="197.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/console.out</text>
+</g>
+<!-- console_forwarder&#45;&gt;host_console_out -->
+<g id="edge5" class="edge">
+<title>console_forwarder&#45;&gt;host_console_out</title>
+<path fill="none" stroke="black" d="M151.67,-299.83C161.07,-290.43 171.62,-279.88 180.22,-271.28"/>
+<polygon fill="black" stroke="black" points="149.04,-297.51 144.45,-307.05 153.99,-302.46 149.04,-297.51"/>
+</g>
+<!-- vmm -->
+<g id="node8" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="127.5" cy="-181" rx="64.19" ry="18"/>
+<text text-anchor="middle" x="127.5" y="-177.3" font-family="Times,serif" font-size="14.00">crosvm / qemu</text>
+</g>
+<!-- host_console_in&#45;&gt;vmm -->
+<g id="edge8" class="edge">
+<title>host_console_in&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M75.56,-234.7C84.12,-226.01 94.63,-215.34 103.95,-205.89"/>
+<polygon fill="black" stroke="black" points="106.59,-208.2 111.12,-198.62 101.6,-203.29 106.59,-208.2"/>
+</g>
+<!-- host_console_out&#45;&gt;vmm -->
+<g id="edge6" class="edge">
+<title>host_console_out&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M172.87,-227.37C163.35,-217.85 152.71,-207.21 144.12,-198.62"/>
+<polygon fill="black" stroke="black" points="170.65,-230.1 180.2,-234.7 175.6,-225.15 170.65,-230.1"/>
+</g>
+<!-- host_pty -->
+<g id="node4" class="node">
+<title>host_pty</title>
+<polygon fill="none" stroke="black" points="120,-415 35,-415 35,-379 120,-379 120,-415"/>
+<text text-anchor="middle" x="77.5" y="-393.3" font-family="Times,serif" font-size="14.00">/dev/pty/###</text>
+</g>
+<!-- host_pty&#45;&gt;console_forwarder -->
+<g id="edge4" class="edge">
+<title>host_pty&#45;&gt;console_forwarder</title>
+<path fill="none" stroke="black" d="M95.81,-370.36C100.17,-364.26 104.82,-357.75 109.18,-351.65"/>
+<polygon fill="black" stroke="black" points="92.82,-368.52 89.86,-378.7 98.52,-372.59 92.82,-368.52"/>
+<polygon fill="black" stroke="black" points="112.32,-353.28 115.28,-343.1 106.62,-349.21 112.32,-353.28"/>
+</g>
+<!-- run_cvd -->
+<g id="node5" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" cx="177.5" cy="-397" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="177.5" y="-393.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- run_cvd&#45;&gt;console_forwarder -->
+<g id="edge1" class="edge">
+<title>run_cvd&#45;&gt;console_forwarder</title>
+<path fill="none" stroke="black" d="M165.9,-379.76C159.9,-371.35 152.42,-360.89 145.67,-351.44"/>
+<polygon fill="black" stroke="black" points="148.39,-349.23 139.73,-343.12 142.7,-353.3 148.39,-349.23"/>
+</g>
+<!-- screen -->
+<g id="node6" class="node">
+<title>screen</title>
+<ellipse fill="none" stroke="black" cx="77.5" cy="-469" rx="33.29" ry="18"/>
+<text text-anchor="middle" x="77.5" y="-465.3" font-family="Times,serif" font-size="14.00">screen</text>
+</g>
+<!-- screen&#45;&gt;host_pty -->
+<g id="edge3" class="edge">
+<title>screen&#45;&gt;host_pty</title>
+<path fill="none" stroke="black" d="M77.5,-440.67C77.5,-435.69 77.5,-430.49 77.5,-425.51"/>
+<polygon fill="black" stroke="black" points="74,-440.7 77.5,-450.7 81,-440.7 74,-440.7"/>
+<polygon fill="black" stroke="black" points="81,-425.1 77.5,-415.1 74,-425.1 81,-425.1"/>
+</g>
+<!-- user -->
+<g id="node7" class="node">
+<title>user</title>
+<ellipse fill="none" stroke="black" cx="77.5" cy="-541" rx="44.39" ry="18"/>
+<text text-anchor="middle" x="77.5" y="-537.3" font-family="Times,serif" font-size="14.00">User CLI</text>
+</g>
+<!-- user&#45;&gt;screen -->
+<g id="edge2" class="edge">
+<title>user&#45;&gt;screen</title>
+<path fill="none" stroke="black" d="M77.5,-522.7C77.5,-514.98 77.5,-505.71 77.5,-497.11"/>
+<polygon fill="black" stroke="black" points="81,-497.1 77.5,-487.1 74,-497.1 81,-497.1"/>
+</g>
+<!-- vm_console -->
+<g id="node9" class="node">
+<title>vm_console</title>
+<polygon fill="none" stroke="black" points="163.5,-124 91.5,-124 91.5,-88 163.5,-88 163.5,-124"/>
+<text text-anchor="middle" x="127.5" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/ttyS0</text>
+</g>
+<!-- vmm&#45;&gt;vm_console -->
+<g id="edge9" class="edge">
+<title>vmm&#45;&gt;vm_console</title>
+<path fill="none" stroke="black" d="M127.5,-152.49C127.5,-146.55 127.5,-140.27 127.5,-134.33"/>
+<polygon fill="black" stroke="black" points="124,-152.7 127.5,-162.7 131,-152.7 124,-152.7"/>
+<polygon fill="black" stroke="black" points="131,-134.18 127.5,-124.18 124,-134.18 131,-134.18"/>
+</g>
+<!-- shell -->
+<g id="node10" class="node">
+<title>shell</title>
+<ellipse fill="none" stroke="black" cx="127.5" cy="-34" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="127.5" y="-30.3" font-family="Times,serif" font-size="14.00">toybox / sh</text>
+</g>
+<!-- vm_console&#45;&gt;shell -->
+<g id="edge10" class="edge">
+<title>vm_console&#45;&gt;shell</title>
+<path fill="none" stroke="black" d="M127.5,-77.67C127.5,-72.69 127.5,-67.49 127.5,-62.51"/>
+<polygon fill="black" stroke="black" points="124,-77.7 127.5,-87.7 131,-77.7 124,-77.7"/>
+<polygon fill="black" stroke="black" points="131,-62.1 127.5,-52.1 124,-62.1 131,-62.1"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/Android.bp b/host/commands/cvd/Android.bp
index c79447b..6ec0535 100644
--- a/host/commands/cvd/Android.bp
+++ b/host/commands/cvd/Android.bp
@@ -17,48 +17,40 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_binary_host {
-    name: "cvd",
-    symlinks: ["acloud"],
-    srcs: [
-        "acloud_command.cpp",
-        "command_sequence.cpp",
-        "epoll_loop.cpp",
-        "instance_lock.cpp",
-        "instance_manager.cpp",
-        "main.cc",
-        "scope_guard.cpp",
-        "server.cc",
-        "server_client.cpp",
-        "server_command.cpp",
-        "server_shutdown.cpp",
-        "server_version.cpp",
-    ],
+cc_defaults {
+    name: "cvd_lib_defaults",
     target: {
         host: {
             stl: "libc++_static",
             static_libs: [
                 "libbase",
+                "libcurl",
+                "libcrypto",
                 "libcuttlefish_fs",
                 "libcuttlefish_utils",
                 "libext2_blkid",
                 "libfruit",
+                "libgflags",
                 "libjsoncpp",
                 "liblog",
-                "libprotobuf-cpp-lite",
+                "libprotobuf-cpp-full",
+                "libssl",
                 "libz",
             ],
         },
         android: {
             shared_libs: [
                 "libbase",
+                "libcurl",
+                "libcrypto",
                 "libcuttlefish_fs",
                 "libcuttlefish_utils",
                 "libext2_blkid",
                 "libfruit",
                 "libjsoncpp",
                 "liblog",
-                "libprotobuf-cpp-lite",
+                "libprotobuf-cpp-full",
+                "libssl",
                 "libz",
             ],
         },
@@ -66,9 +58,24 @@
     static_libs: [
         "libbuildversion",
         "libcuttlefish_cvd_proto",
+        "libcuttlefish_acloud_proto",
         "libcuttlefish_host_config",
+        "libcuttlefish_web",
+        "libgflags",
+        "libicui18n",
+        "libicuuc",
+        "libicuuc_stubdata",
+        "libxml2",
+    ],
+    header_libs: [
+        "libgtest_prod_headers",
+    ],
+    export_header_lib_headers: [
+        "libgtest_prod_headers",
     ],
     required: [
+        "cvd_internal_display",
+        "cvd_internal_env",
         "cvd_internal_host_bugreport",
         "cvd_internal_start",
         "cvd_internal_status",
@@ -79,3 +86,79 @@
     ],
     use_version_lib: true,
 }
+
+cc_library_host_static {
+    name: "libcvd_server_client",
+    srcs: [
+        "build_api.cpp",
+        "client.cpp",
+        "command_sequence.cpp",
+        "common_utils.cpp",
+        "demo_multi_vd.cpp",
+        "driver_flags.cpp",
+        "flag.cpp",
+        "frontline_parser.cpp",
+        "epoll_loop.cpp",
+        "handle_reset.cpp",
+        "instance_lock.cpp",
+        "instance_manager.cpp",
+        "lock_file.cpp",
+        "logger.cpp",
+        "reset_client_utils.cpp",
+        "server.cc",
+        "server_client.cpp",
+        "types.cpp",
+    ],
+    static_libs: [
+        "libcvd_selector",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
+
+cc_defaults {
+    name: "cvd_and_fetch_cvd_defaults",
+    static_libs: [
+        "libcuttlefish_launch_cvd_proto",
+        "libcvd_acloud",
+        "libcvd_parser",
+        "libcvd_selector",
+        "libcvd_server_client",
+        "libcvd_sub_commands",
+        "libcvd_fetch",
+    ],
+    target: {
+        linux_bionic: {
+            static_executable: true,
+        },
+    },
+    defaults: ["cvd_lib_defaults"],
+}
+
+cc_library_host_static {
+    name: "cvd_and_fetch_cvd",
+    srcs: [
+        "main.cc",
+    ],
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+// These are set up as two separate executables rather than a symlink so that
+// either can be downloaded as a standalone statically linked executable from
+// the build system.
+
+cc_binary_host {
+    name: "cvd",
+    symlinks: ["acloud"],
+    static_libs: [
+        "cvd_and_fetch_cvd",
+    ],
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_binary_host {
+    name: "fetch_cvd",
+    static_libs: [
+        "cvd_and_fetch_cvd",
+    ],
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
diff --git a/host/commands/cvd/acloud/Android.bp b/host/commands/cvd/acloud/Android.bp
new file mode 100644
index 0000000..89e19a5
--- /dev/null
+++ b/host/commands/cvd/acloud/Android.bp
@@ -0,0 +1,39 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libcvd_acloud",
+    srcs: [
+        "converter.cpp",
+        "config.cpp",
+    ],
+    static_libs: [
+        "libcuttlefish_launch_cvd_proto",
+        "libcvd_selector",
+        "libprotobuf-cpp-full",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
+
+prebuilt_etc_host {
+    name: "default.config_host",
+    src: "data/default.config",
+    filename: "default.config",
+    sub_dir: "acloud/data",
+}
diff --git a/host/commands/cvd/acloud/config.cpp b/host/commands/cvd/acloud/config.cpp
new file mode 100644
index 0000000..281dedf
--- /dev/null
+++ b/host/commands/cvd/acloud/config.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/acloud/config.h"
+
+#include <fstream>
+
+#include <google/protobuf/text_format.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/users.h"
+
+namespace cuttlefish {
+
+AcloudConfig::AcloudConfig(const acloud::UserConfig& usr_cfg)
+    : launch_args(usr_cfg.launch_args()) {
+  // TODO(weihsu): Add back fields/variables (except of cheeps and emulator
+  // fields) in config files. Remove cheeps (Android on ChromeOS) and emulator
+  // fields.
+
+  // TODO(weihsu): Verify validity of configurations.
+}
+
+template <typename ProtoType>
+Result<ProtoType> ParseTextProtoConfigHelper(const std::string& config_path) {
+  std::ifstream t(config_path);
+  std::stringstream buffer;
+  buffer << t.rdbuf();
+
+  ProtoType proto_result;
+  google::protobuf::TextFormat::Parser p;
+  CF_EXPECT(p.ParseFromString(buffer.str(), &proto_result),
+            "Failed to parse config: " << config_path);
+  return proto_result;
+}
+
+/**
+ * Return path to default config file.
+ */
+Result<const std::string> GetDefaultConfigFile(const uid_t uid) {
+  const std::string home = CF_EXPECT(SystemWideUserHome(uid));
+  return (std::string(home) + "/.config/acloud/acloud.config");
+}
+
+Result<AcloudConfig> LoadAcloudConfig(const std::string& user_config_path,
+                                      const uid_t uid) {
+  acloud::UserConfig proto_result_user;
+  if (FileExists(user_config_path)) {
+    proto_result_user = CF_EXPECT(
+        ParseTextProtoConfigHelper<acloud::UserConfig>(user_config_path));
+  } else {
+    const std::string conf_path = CF_EXPECT(GetDefaultConfigFile(uid));
+    CF_EXPECT(user_config_path == conf_path,
+              "The specified config file does not exist.");
+
+    // If the default config does not exist, acloud creates an empty object.
+    proto_result_user = acloud::UserConfig();
+  }
+  return AcloudConfig(proto_result_user);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/acloud/config.h b/host/commands/cvd/acloud/config.h
new file mode 100644
index 0000000..992a132
--- /dev/null
+++ b/host/commands/cvd/acloud/config.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+#include <vector>
+
+#include "internal_config.pb.h"
+#include "user_config.pb.h"
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/inject.h"
+
+namespace cuttlefish {
+
+class AcloudConfig {
+ public:
+  INJECT(AcloudConfig(const acloud::UserConfig&));
+  ~AcloudConfig() = default;
+
+ public:
+  // UserConfig/user_config.proto members
+  std::string launch_args;
+
+  // InternalConfig/internal_config.proto members
+
+  // In both config
+};
+
+Result<const std::string> GetDefaultConfigFile(const uid_t uid);
+Result<AcloudConfig> LoadAcloudConfig(const std::string& user_config_path,
+                                      const uid_t uid);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/acloud/converter.cpp b/host/commands/cvd/acloud/converter.cpp
new file mode 100644
index 0000000..7ca692a
--- /dev/null
+++ b/host/commands/cvd/acloud/converter.cpp
@@ -0,0 +1,730 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/acloud/converter.h"
+
+#include <sys/stat.h>
+
+#include <cstdio>
+#include <fstream>
+#include <optional>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+#include <google/protobuf/text_format.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "common/libs/utils/users.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/acloud/config.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/instance_lock.h"  // TempDir()
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace {
+
+// Image names to search
+const std::vector<std::string> _KERNEL_IMAGE_NAMES = {"kernel", "bzImage",
+                                                      "Image"};
+const std::vector<std::string> _INITRAMFS_IMAGE_NAME = {"initramfs.img"};
+const std::vector<std::string> _BOOT_IMAGE_NAME = {"boot.img"};
+const std::vector<std::string> _VENDOR_BOOT_IMAGE_NAME = {"vendor_boot.img"};
+
+/**
+ * Find a image file through the input path and pattern.
+ *
+ * If it finds the file, return the path string.
+ * If it can't find the file, return empty string.
+ */
+std::string FindImage(const std::string& search_path,
+                      const std::vector<std::string>& pattern) {
+  const std::string& search_path_extend = search_path + "/";
+  for (const auto& name : pattern) {
+    const std::string image = search_path_extend + name;
+    if (FileExists(image)) {
+      return image;
+    }
+  }
+  return "";
+}
+
+/**
+ * Split a string into arguments based on shell tokenization rules.
+ *
+ * This behaves like `shlex.split` from python where arguments are separated
+ * based on whitespace, but quoting and quote escaping is respected. This
+ * function effectively removes one level of quoting from its inputs while
+ * making the split.
+ */
+Result<std::vector<std::string>> BashTokenize(const std::string& str) {
+  Command command("bash");
+  command.AddParameter("-c");
+  command.AddParameter("printf '%s\n' ", str);
+  std::string stdout_str;
+  std::string stderr_str;
+  auto ret = RunWithManagedStdio(std::move(command), nullptr, &stdout_str,
+                                 &stderr_str);
+  CF_EXPECT(ret == 0,
+            "printf fail \"" << stdout_str << "\", \"" << stderr_str << "\"");
+  return android::base::Split(stdout_str, "\n");
+}
+
+}  // namespace
+
+// body of pure virtual destructor required by C++
+ConvertAcloudCreateCommand::~ConvertAcloudCreateCommand() {}
+
+class ConvertAcloudCreateCommandImpl : public ConvertAcloudCreateCommand {
+ public:
+  INJECT(ConvertAcloudCreateCommandImpl()) {}
+  ~ConvertAcloudCreateCommandImpl() override = default;
+
+  Result<ConvertedAcloudCreateCommand> Convert(
+      const RequestWithStdio& request) {
+    auto arguments = ParseInvocation(request.Message()).arguments;
+    CF_EXPECT(arguments.size() > 0);
+    CF_EXPECT(arguments[0] == "create");
+    arguments.erase(arguments.begin());
+
+    const auto& request_command = request.Message().command_request();
+
+    std::vector<Flag> flags;
+    bool local_instance_set;
+    std::optional<int> local_instance;
+    auto local_instance_flag = Flag();
+    local_instance_flag.Alias(
+        {FlagAliasMode::kFlagConsumesArbitrary, "--local-instance"});
+    local_instance_flag.Setter([&local_instance_set,
+                                &local_instance](const FlagMatch& m) {
+      local_instance_set = true;
+      if (m.value != "" && local_instance) {
+        LOG(ERROR) << "Instance number already set, was \"" << *local_instance
+                   << "\", now set to \"" << m.value << "\"";
+        return false;
+      } else if (m.value != "" && !local_instance) {
+        local_instance = std::stoi(m.value);
+      }
+      return true;
+    });
+    flags.emplace_back(local_instance_flag);
+
+    std::optional<std::string> flavor;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--config"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--flavor"})
+            .Setter([&flavor](const FlagMatch& m) {
+              flavor = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> local_kernel_image;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--local-kernel-image"})
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--local-boot-image"})
+                           .Setter([&local_kernel_image](const FlagMatch& m) {
+                             local_kernel_image = m.value;
+                             return true;
+                           }));
+
+    std::optional<std::string> image_download_dir;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--image-download-dir"})
+                           .Setter([&image_download_dir](const FlagMatch& m) {
+                             image_download_dir = m.value;
+                             return true;
+                           }));
+
+    verbose_ = false;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagExact, "-v"})
+                           .Alias({FlagAliasMode::kFlagExact, "-vv"})
+                           .Alias({FlagAliasMode::kFlagExact, "--verbose"})
+                           .Setter([this](const FlagMatch&) {
+                             verbose_ = true;
+                             return true;
+                           }));
+
+    std::optional<std::string> branch;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--branch"})
+            .Setter([&branch](const FlagMatch& m) {
+              branch = m.value;
+              return true;
+            }));
+
+    bool local_image;
+    std::optional<std::string> local_image_path;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesArbitrary, "--local-image"})
+            .Setter([&local_image,
+                     &local_image_path](const FlagMatch& m) {
+              local_image = true;
+              if (m.value != "") {
+                local_image_path = m.value;
+              }
+              return true;
+            }));
+
+    std::optional<std::string> build_id;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build-id"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build_id"})
+            .Setter([&build_id](const FlagMatch& m) {
+              build_id = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> build_target;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build-target"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build_target"})
+            .Setter([&build_target](const FlagMatch& m) {
+              build_target = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> config_file;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--config-file"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--config_file"})
+            .Setter([&config_file](const FlagMatch& m) {
+              config_file = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> bootloader_build_id;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--bootloader-build-id"})
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--bootloader_build_id"})
+                           .Setter([&bootloader_build_id](const FlagMatch& m) {
+                             bootloader_build_id = m.value;
+                             return true;
+                           }));
+    std::optional<std::string> bootloader_build_target;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                    "--bootloader-build-target"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                    "--bootloader_build_target"})
+            .Setter([&bootloader_build_target](const FlagMatch& m) {
+              bootloader_build_target = m.value;
+              return true;
+            }));
+    std::optional<std::string> bootloader_branch;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--bootloader-branch"})
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--bootloader_branch"})
+                           .Setter([&bootloader_branch](const FlagMatch& m) {
+                             bootloader_branch = m.value;
+                             return true;
+                           }));
+
+    std::optional<std::string> boot_build_id;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot-build-id"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot_build_id"})
+            .Setter([&boot_build_id](const FlagMatch& m) {
+              boot_build_id = m.value;
+              return true;
+            }));
+    std::optional<std::string> boot_build_target;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--boot-build-target"})
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--boot_build_target"})
+                           .Setter([&boot_build_target](const FlagMatch& m) {
+                             boot_build_target = m.value;
+                             return true;
+                           }));
+    std::optional<std::string> boot_branch;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot-branch"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot_branch"})
+            .Setter([&boot_branch](const FlagMatch& m) {
+              boot_branch = m.value;
+              return true;
+            }));
+    std::optional<std::string> boot_artifact;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot-artifact"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--boot_artifact"})
+            .Setter([&boot_artifact](const FlagMatch& m) {
+              boot_artifact = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> ota_build_id;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--ota-build-id"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--ota_build_id"})
+            .Setter([&ota_build_id](const FlagMatch& m) {
+              ota_build_id = m.value;
+              return true;
+            }));
+    std::optional<std::string> ota_build_target;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--ota-build-target"})
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--ota_build_target"})
+                           .Setter([&ota_build_target](const FlagMatch& m) {
+                             ota_build_target = m.value;
+                             return true;
+                           }));
+    std::optional<std::string> ota_branch;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--ota-branch"})
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--ota_branch"})
+            .Setter([&ota_branch](const FlagMatch& m) {
+              ota_branch = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> launch_args;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--launch-args"})
+            .Setter([&launch_args](const FlagMatch& m) {
+              launch_args = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> system_branch;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--system-branch"})
+            .Setter([&system_branch](const FlagMatch& m) {
+              system_branch = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> system_build_target;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--system-build-target"})
+                           .Setter([&system_build_target](const FlagMatch& m) {
+                             system_build_target = m.value;
+                             return true;
+                           }));
+
+    std::optional<std::string> system_build_id;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--system-build-id"})
+            .Setter([&system_build_id](const FlagMatch& m) {
+              system_build_id = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> kernel_branch;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--kernel-branch"})
+            .Setter([&kernel_branch](const FlagMatch& m) {
+              kernel_branch = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> kernel_build_target;
+    flags.emplace_back(Flag()
+                           .Alias({FlagAliasMode::kFlagConsumesFollowing,
+                                   "--kernel-build-target"})
+                           .Setter([&kernel_build_target](const FlagMatch& m) {
+                             kernel_build_target = m.value;
+                             return true;
+                           }));
+
+    std::optional<std::string> kernel_build_id;
+    flags.emplace_back(
+        Flag()
+            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--kernel-build-id"})
+            .Setter([&kernel_build_id](const FlagMatch& m) {
+              kernel_build_id = m.value;
+              return true;
+            }));
+
+    std::optional<std::string> pet_name;
+    Flag pet_name_gflag = GflagsCompatFlag("pet-name");
+    flags.emplace_back(
+        GflagsCompatFlag("pet-name")
+            .Getter([&pet_name]() { return (pet_name ? *pet_name : ""); })
+            .Setter([&pet_name](const FlagMatch& match) {
+              pet_name = match.value;
+              return true;
+            }));
+
+    CF_EXPECT(ParseFlags(flags, arguments));
+    CF_EXPECT(arguments.size() == 0,
+              "Unrecognized arguments:'"
+                  << android::base::Join(arguments, "', '") << "'");
+
+    CF_EXPECT(local_instance_set == true,
+              "Only '--local-instance' is supported");
+    auto host_dir = TempDir() + "/acloud_image_artifacts/";
+    if (image_download_dir) {
+      host_dir = image_download_dir.value() + "/acloud_image_artifacts/";
+    }
+
+    auto host_artifacts_path = request_command.env().find(kAndroidHostOut);
+    CF_EXPECT(host_artifacts_path != request_command.env().end(),
+              "Missing " << kAndroidHostOut);
+
+    std::vector<cvd::Request> request_protos;
+    const uid_t uid = request.Credentials()->uid;
+    // default user config path
+    std::string user_config_path = CF_EXPECT(GetDefaultConfigFile(uid));
+
+    if (config_file) {
+      user_config_path = config_file.value();
+    }
+    AcloudConfig acloud_config =
+        CF_EXPECT(LoadAcloudConfig(user_config_path, uid));
+
+    if (local_image) {
+      CF_EXPECT(!(system_branch || system_build_target || system_build_id),
+                "--local-image incompatible with --system-* flags");
+      CF_EXPECT(!(bootloader_branch || bootloader_build_target ||
+                  bootloader_build_id),
+                "--local-image incompatible with --bootloader-* flags");
+      CF_EXPECT(
+          !(boot_branch || boot_build_target || boot_build_id || boot_artifact),
+          "--local-image incompatible with --boot-* flags");
+      CF_EXPECT(!(ota_branch || ota_build_target || ota_build_id),
+                "--local-image incompatible with --ota-* flags");
+    } else {
+      if (!DirectoryExists(host_dir)) {
+        // fetch/download directory doesn't exist, create directory
+        cvd::Request& mkdir_request = request_protos.emplace_back();
+        auto& mkdir_command = *mkdir_request.mutable_command_request();
+        mkdir_command.add_args("cvd");
+        mkdir_command.add_args("mkdir");
+        mkdir_command.add_args("-p");
+        mkdir_command.add_args(host_dir);
+        auto& mkdir_env = *mkdir_command.mutable_env();
+        mkdir_env[kAndroidHostOut] = host_artifacts_path->second;
+      }
+      if (branch || build_id || build_target) {
+        auto target = build_target ? *build_target : "";
+        auto build = build_id.value_or(branch.value_or("aosp-master"));
+        host_dir += (build + target);
+      } else {
+        host_dir += "aosp-master";
+      }
+      // TODO(weihsu): if we fetch default ID such as aosp-master,
+      // cvd fetch will fetch the latest release. There is a potential
+      // issue that two different fetch with same default ID may
+      // download different releases.
+      // Eventually, we should match python acloud behavior to translate
+      // default ID (aosp-master) to real ID to solve this issue.
+
+      cvd::Request& fetch_request = request_protos.emplace_back();
+      auto& fetch_command = *fetch_request.mutable_command_request();
+      fetch_command.add_args("cvd");
+      fetch_command.add_args("fetch");
+      fetch_command.add_args("--directory");
+      fetch_command.add_args(host_dir);
+      fetch_command_str_ = "";
+      if (branch || build_id || build_target) {
+        fetch_command.add_args("--default_build");
+        fetch_command_str_ += "--default_build=";
+        auto target = build_target ? "/" + *build_target : "";
+        auto build = build_id.value_or(branch.value_or("aosp-master"));
+        fetch_command.add_args(build + target);
+        fetch_command_str_ += (build + target);
+      }
+      if (system_branch || system_build_id || system_build_target) {
+        fetch_command.add_args("--system_build");
+        fetch_command_str_ += " --system_build=";
+        auto target = system_build_target.value_or(build_target.value_or(""));
+        if (target != "") {
+          target = "/" + target;
+        }
+        auto build =
+            system_build_id.value_or(system_branch.value_or("aosp-master"));
+        fetch_command.add_args(build + target);
+        fetch_command_str_ += (build + target);
+      }
+      if (bootloader_branch || bootloader_build_id || bootloader_build_target) {
+        fetch_command.add_args("--bootloader_build");
+        fetch_command_str_ += " --bootloader_build=";
+        auto target = bootloader_build_target.value_or("");
+        if (target != "") {
+          target = "/" + target;
+        }
+        auto build = bootloader_build_id.value_or(
+            bootloader_branch.value_or("aosp_u-boot-mainline"));
+        fetch_command.add_args(build + target);
+        fetch_command_str_ += (build + target);
+      }
+      if (boot_branch || boot_build_id || boot_build_target) {
+        fetch_command.add_args("--boot_build");
+        fetch_command_str_ += " --boot_build=";
+        auto target = boot_build_target.value_or("");
+        if (target != "") {
+          target = "/" + target;
+        }
+        auto build =
+            boot_build_id.value_or(boot_branch.value_or("aosp-master"));
+        fetch_command.add_args(build + target);
+        fetch_command_str_ += (build + target);
+      }
+      if (boot_artifact) {
+        CF_EXPECT(boot_branch || boot_build_target || boot_build_id,
+                  "--boot-artifact must combine with other --boot-* flags");
+        fetch_command.add_args("--boot_artifact");
+        fetch_command_str_ += " --boot_artifact=";
+        auto target = boot_artifact.value_or("");
+        fetch_command.add_args(target);
+        fetch_command_str_ += (target);
+      }
+      if (ota_branch || ota_build_id || ota_build_target) {
+        fetch_command.add_args("--otatools_build");
+        fetch_command_str_ += " --otatools_build=";
+        auto target = ota_build_target.value_or("");
+        if (target != "") {
+          target = "/" + target;
+        }
+        auto build = ota_build_id.value_or(ota_branch.value_or(""));
+        fetch_command.add_args(build + target);
+        fetch_command_str_ += (build + target);
+      }
+      if (kernel_branch || kernel_build_id || kernel_build_target) {
+        fetch_command.add_args("--kernel_build");
+        fetch_command_str_ += " --kernel_build=";
+        auto target = kernel_build_target.value_or("kernel_virt_x86_64");
+        auto build = kernel_build_id.value_or(
+            branch.value_or("aosp_kernel-common-android-mainline"));
+        fetch_command.add_args(build + "/" + target);
+        fetch_command_str_ += (build + "/" + target);
+      }
+      auto& fetch_env = *fetch_command.mutable_env();
+      fetch_env[kAndroidHostOut] = host_artifacts_path->second;
+
+      fetch_cvd_args_file_ = host_dir + "/fetch-cvd-args.txt";
+      if (FileExists(fetch_cvd_args_file_)) {
+        // file exists
+        std::string read_str;
+        using android::base::ReadFileToString;
+        CF_EXPECT(ReadFileToString(fetch_cvd_args_file_.c_str(), &read_str,
+                                   /* follow_symlinks */ true));
+        if (read_str == fetch_command_str_) {
+          // same fetch cvd command, reuse original dir
+          fetch_command_str_ = "";
+          request_protos.pop_back();
+        }
+      }
+    }
+
+    cvd::Request start_request;
+    auto& start_command = *start_request.mutable_command_request();
+    start_command.add_args("cvd");
+    start_command.add_args("start");
+    start_command.add_args("--daemon");
+    start_command.add_args("--undefok");
+    start_command.add_args("report_anonymous_usage_stats");
+    start_command.add_args("--report_anonymous_usage_stats");
+    start_command.add_args("y");
+    if (flavor) {
+      start_command.add_args("-config");
+      start_command.add_args(flavor.value());
+    }
+
+    if (local_kernel_image) {
+      // kernel image has 1st priority than boot image
+      struct stat statbuf;
+      std::string local_boot_image = "";
+      std::string vendor_boot_image = "";
+      std::string kernel_image = "";
+      std::string initramfs_image = "";
+      if (stat(local_kernel_image.value().c_str(), &statbuf) == 0) {
+        if (statbuf.st_mode & S_IFDIR) {
+          // it's a directory, deal with kernel image case first
+          kernel_image =
+              FindImage(local_kernel_image.value(), _KERNEL_IMAGE_NAMES);
+          initramfs_image =
+              FindImage(local_kernel_image.value(), _INITRAMFS_IMAGE_NAME);
+          // This is the original python acloud behavior, it
+          // expects both kernel and initramfs files, however,
+          // there are some very old kernels that are built without
+          // an initramfs.img file,
+          // e.g. aosp_kernel-common-android-4.14-stable
+          if (kernel_image != "" && initramfs_image != "") {
+            start_command.add_args("-kernel_path");
+            start_command.add_args(kernel_image);
+            start_command.add_args("-initramfs_path");
+            start_command.add_args(initramfs_image);
+          } else {
+            // boot.img case
+            // adding boot.img and vendor_boot.img to the path
+            local_boot_image =
+                FindImage(local_kernel_image.value(), _BOOT_IMAGE_NAME);
+            vendor_boot_image =
+                FindImage(local_kernel_image.value(), _VENDOR_BOOT_IMAGE_NAME);
+            start_command.add_args("-boot_image");
+            start_command.add_args(local_boot_image);
+            // vendor boot image may not exist
+            if (vendor_boot_image != "") {
+              start_command.add_args("-vendor_boot_image");
+              start_command.add_args(vendor_boot_image);
+            }
+          }
+        } else if (statbuf.st_mode & S_IFREG) {
+          // it's a file which directly points to boot.img
+          local_boot_image = local_kernel_image.value();
+          start_command.add_args("-boot_image");
+          start_command.add_args(local_boot_image);
+        }
+      }
+    }
+
+    if (launch_args) {
+      for (const auto& arg : CF_EXPECT(BashTokenize(*launch_args))) {
+        start_command.add_args(arg);
+      }
+    }
+    if (acloud_config.launch_args != "") {
+      for (const auto& arg :
+           CF_EXPECT(BashTokenize(acloud_config.launch_args))) {
+        start_command.add_args(arg);
+      }
+    }
+    start_command.mutable_selector_opts()->add_args(
+        std::string("--") + selector::SelectorFlags::kDisableDefaultGroup +
+        "=true");
+    if (pet_name) {
+      const auto [group_name, instance_name] =
+          CF_EXPECT(selector::BreakDeviceName(*pet_name),
+                    *pet_name << " must be a group name followed by - "
+                              << "followed by an instance name.");
+      std::string group_name_arg = "--";
+      group_name_arg.append(selector::SelectorFlags::kGroupName)
+          .append("=")
+          .append(group_name);
+      std::string instance_name_arg = "--";
+      instance_name_arg.append(selector::SelectorFlags::kInstanceName)
+          .append("=")
+          .append(instance_name);
+      start_command.mutable_selector_opts()->add_args(group_name_arg);
+      start_command.mutable_selector_opts()->add_args(instance_name_arg);
+    }
+
+    auto& start_env = *start_command.mutable_env();
+    if (local_image) {
+      if (local_image_path) {
+        std::string local_image_path_str = local_image_path.value();
+        // Python acloud source: local_image_local_instance.py;l=81
+        // this acloud flag is equal to launch_cvd flag system_image_dir
+        start_command.add_args("-system_image_dir");
+        start_command.add_args(local_image_path_str);
+      }
+
+      start_env[kAndroidHostOut] = host_artifacts_path->second;
+
+      auto product_out = request_command.env().find(kAndroidProductOut);
+      CF_EXPECT(product_out != request_command.env().end(),
+                "Missing " << kAndroidProductOut);
+      start_env[kAndroidProductOut] = product_out->second;
+    } else {
+      start_env[kAndroidHostOut] = host_dir;
+      start_env[kAndroidProductOut] = host_dir;
+    }
+    if (Contains(start_env, kCuttlefishInstanceEnvVarName)) {
+      // Python acloud does not use this variable.
+      // this variable will confuse cvd start, though
+      start_env.erase(kCuttlefishInstanceEnvVarName);
+    }
+    if (local_instance) {
+      start_env[kCuttlefishInstanceEnvVarName] =
+          std::to_string(*local_instance);
+    }
+    // we don't know which HOME is assigned by cvd start.
+    // cvd server does not rely on the working directory for cvd start
+    *start_command.mutable_working_directory() =
+        request_command.working_directory();
+    std::vector<SharedFD> fds;
+    if (verbose_) {
+      fds = request.FileDescriptors();
+    } else {
+      auto dev_null = SharedFD::Open("/dev/null", O_RDWR);
+      CF_EXPECT(dev_null->IsOpen(), dev_null->StrError());
+      fds = {dev_null, dev_null, dev_null};
+    }
+
+    ConvertedAcloudCreateCommand ret{
+        .start_request = RequestWithStdio(request.Client(), start_request, fds,
+                                          request.Credentials())};
+    for (auto& request_proto : request_protos) {
+      ret.prep_requests.emplace_back(request.Client(), request_proto, fds,
+                                     request.Credentials());
+    }
+    return ret;
+  }
+
+  const std::string& FetchCvdArgsFile() const override {
+    return fetch_cvd_args_file_;
+  }
+
+  const std::string& FetchCommandString() const override {
+    return fetch_command_str_;
+  }
+  bool Verbose() const { return verbose_; }
+
+ private:
+  std::string fetch_cvd_args_file_;
+  std::string fetch_command_str_;
+  bool verbose_;
+};
+
+fruit::Component<ConvertAcloudCreateCommand>
+AcloudCreateConvertCommandComponent() {
+  return fruit::createComponent()
+      .bind<ConvertAcloudCreateCommand, ConvertAcloudCreateCommandImpl>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/acloud/converter.h b/host/commands/cvd/acloud/converter.h
new file mode 100644
index 0000000..2b209c6
--- /dev/null
+++ b/host/commands/cvd/acloud/converter.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/server_client.h"
+
+namespace cuttlefish {
+
+struct ConvertedAcloudCreateCommand {
+  std::vector<RequestWithStdio> prep_requests;
+  RequestWithStdio start_request;
+};
+
+class ConvertAcloudCreateCommand {
+ public:
+  virtual Result<ConvertedAcloudCreateCommand> Convert(
+      const RequestWithStdio& request) = 0;
+  virtual const std::string& FetchCvdArgsFile() const = 0;
+  virtual const std::string& FetchCommandString() const = 0;
+  virtual bool Verbose() const = 0;
+  /*
+   * Android prouction build system appears to mandate virtual
+   * destructor.
+   */
+  virtual ~ConvertAcloudCreateCommand() = 0;
+};
+
+fruit::Component<ConvertAcloudCreateCommand>
+AcloudCreateConvertCommandComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/acloud/data/default.config b/host/commands/cvd/acloud/data/default.config
new file mode 100644
index 0000000..7fa77aa
--- /dev/null
+++ b/host/commands/cvd/acloud/data/default.config
@@ -0,0 +1,180 @@
+min_machine_size: "n1-standard-4"
+disk_image_name: "avd-system.tar.gz"
+disk_image_mime_type: "application/x-tar"
+disk_image_extension: ".tar.gz"
+disk_raw_image_name: "disk.raw"
+disk_raw_image_extension: ".img"
+default_extra_data_disk_device: "/dev/block/sdb"
+creds_cache_file: ".acloud_oauth2.dat"
+user_agent: "acloud"
+
+# [GOLDFISH only] The emulator build target: "emulator-linux_x64_internal".
+# We use it to get build id if build id is not provided and It's very unlikely
+# that this will ever change.
+emulator_build_target: "emulator-linux_x64_internal"
+
+default_usr_cfg {
+  machine_type: "n1-standard-4"
+  network: "default"
+  extra_data_disk_size_gb: 0
+  instance_name_pattern: "ins-{uuid}-{build_id}-{build_target}"
+  fetch_cvd_version: "9123511"
+
+  metadata_variable {
+    key: "camera_front"
+    value: "1,32,24,checker-sliding"
+  }
+
+  metadata_variable {
+    key: "camera_back"
+    value: "1,640,480,checker-fixed"
+  }
+
+  metadata_variable {
+    key: "cfg_sta_ephemeral_cache_size_mb"
+    value: "512"
+  }
+
+  metadata_variable {
+    key: "cfg_sta_ephemeral_data_size_mb"
+    value: "2048"
+  }
+
+  metadata_variable {
+    key: "cfg_sta_persistent_data_device"
+    value: "default"
+  }
+
+  metadata_variable {
+    key: "gps_coordinates"
+    value: "37.422,122.084,100,0,1,1"
+  }
+}
+
+common_hw_property_map {
+  key: "local-auto"
+  value: "cpu:4,resolution:1280x800,dpi:160,memory:4g"
+}
+
+common_hw_property_map {
+  key: "local-wear"
+  value: "cpu:4,resolution:320x320,dpi:240,memory:2g"
+}
+
+common_hw_property_map {
+  key: "local-tablet"
+  value: "cpu:4,resolution:2560x1800,dpi:320,memory:4g"
+}
+
+common_hw_property_map {
+  key: "local-foldable"
+  value: "cpu:4,resolution:1768x2208,dpi:386,memory:4g"
+}
+
+common_hw_property_map {
+  key: "phone"
+  value: "cpu:4,resolution:720x1280,dpi:320,memory:2g"
+}
+
+common_hw_property_map {
+  key: "auto"
+  value: "cpu:4,resolution:1280x800,dpi:160,memory:4g"
+}
+
+common_hw_property_map {
+  key: "wear"
+  value: "cpu:4,resolution:320x320,dpi:240,memory:2g"
+}
+
+common_hw_property_map {
+  key: "tablet"
+  value: "cpu:4,resolution:2560x1800,dpi:320,memory:4g"
+}
+
+common_hw_property_map {
+  key: "tv"
+  value: "cpu:4,resolution:1920x1080,dpi:213,memory:2g"
+}
+
+common_hw_property_map {
+  key: "foldable"
+  value: "cpu:4,resolution:1768x2208,dpi:386,memory:4g"
+}
+
+# Device resolution
+device_resolution_map {
+  key: "nexus5"
+  value: "1080x1920x32x480"
+}
+
+device_resolution_map {
+  key: "nexus6"
+  value: "1440x2560x32x560"
+}
+
+# nexus7 (2012)
+device_resolution_map {
+  key: "nexus7_2012"
+  value: "800x1280x32x213"
+}
+
+device_resolution_map {
+  key: "nexus7_2013"
+  value: "1200x1920x32x320"
+}
+
+device_resolution_map {
+  key: "nexus9"
+  value: "1536x2048x32x320"
+}
+
+device_resolution_map {
+  key: "nexus10"
+  value: "1600x2560x32x320"
+}
+
+# Default orientation
+
+device_default_orientation_map {
+  key: "nexus5"
+  value: "portrait"
+}
+
+device_default_orientation_map {
+  key: "nexus6"
+  value: "landscape"
+}
+
+device_default_orientation_map {
+  key: "nexus7_2012"
+  value: "landscape"
+}
+
+device_default_orientation_map {
+  key: "nexus7_2013"
+  value: "landscape"
+}
+
+device_default_orientation_map {
+  key: "nexus9"
+  value: "landscape"
+}
+
+device_default_orientation_map {
+  key: "nexus10"
+  value: "landscape"
+}
+
+# Precreated data images.
+precreated_data_image {
+  key: 4
+  value: "extradisk-image-4gb"
+}
+precreated_data_image {
+  key: 10
+  value: "extradisk-image-10gb"
+}
+precreated_data_image {
+  key: 100
+  value: "extradisk-image-100gb"
+}
diff --git a/host/commands/cvd/acloud_command.cpp b/host/commands/cvd/acloud_command.cpp
deleted file mode 100644
index cc53d54..0000000
--- a/host/commands/cvd/acloud_command.cpp
+++ /dev/null
@@ -1,335 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "host/commands/cvd/server.h"
-
-#include <optional>
-#include <vector>
-
-#include <android-base/strings.h>
-
-#include "cvd_server.pb.h"
-
-#include "common/libs/fs/shared_buf.h"
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/flag_parser.h"
-#include "common/libs/utils/result.h"
-#include "common/libs/utils/subprocess.h"
-#include "host/commands/cvd/command_sequence.h"
-#include "host/commands/cvd/instance_lock.h"
-#include "host/commands/cvd/server_client.h"
-
-namespace cuttlefish {
-
-namespace {
-
-struct ConvertedAcloudCreateCommand {
-  InstanceLockFile lock;
-  std::vector<RequestWithStdio> requests;
-};
-
-/**
- * Split a string into arguments based on shell tokenization rules.
- *
- * This behaves like `shlex.split` from python where arguments are separated
- * based on whitespace, but quoting and quote escaping is respected. This
- * function effectively removes one level of quoting from its inputs while
- * making the split.
- */
-Result<std::vector<std::string>> BashTokenize(const std::string& str) {
-  Command command("bash");
-  command.AddParameter("-c");
-  command.AddParameter("printf '%s\n' ", str);
-  std::string stdout;
-  std::string stderr;
-  auto ret = RunWithManagedStdio(std::move(command), nullptr, &stdout, &stderr);
-  CF_EXPECT(ret == 0, "printf fail \"" << stdout << "\", \"" << stderr << "\"");
-  return android::base::Split(stdout, "\n");
-}
-
-class ConvertAcloudCreateCommand {
- public:
-  INJECT(ConvertAcloudCreateCommand(InstanceLockFileManager& lock_file_manager))
-      : lock_file_manager_(lock_file_manager) {}
-
-  Result<ConvertedAcloudCreateCommand> Convert(
-      const RequestWithStdio& request) {
-    auto arguments = ParseInvocation(request.Message()).arguments;
-    CF_EXPECT(arguments.size() > 0);
-    CF_EXPECT(arguments[0] == "create");
-    arguments.erase(arguments.begin());
-
-    const auto& request_command = request.Message().command_request();
-
-    std::vector<Flag> flags;
-    bool local_instance_set;
-    std::optional<int> local_instance;
-    auto local_instance_flag = Flag();
-    local_instance_flag.Alias(
-        {FlagAliasMode::kFlagConsumesArbitrary, "--local-instance"});
-    local_instance_flag.Setter([&local_instance_set,
-                                &local_instance](const FlagMatch& m) {
-      local_instance_set = true;
-      if (m.value != "" && local_instance) {
-        LOG(ERROR) << "Instance number already set, was \"" << *local_instance
-                   << "\", now set to \"" << m.value << "\"";
-        return false;
-      } else if (m.value != "" && !local_instance) {
-        local_instance = std::stoi(m.value);
-      }
-      return true;
-    });
-    flags.emplace_back(local_instance_flag);
-
-    bool verbose = false;
-    flags.emplace_back(Flag()
-                           .Alias({FlagAliasMode::kFlagExact, "-v"})
-                           .Alias({FlagAliasMode::kFlagExact, "-vv"})
-                           .Alias({FlagAliasMode::kFlagExact, "--verbose"})
-                           .Setter([&verbose](const FlagMatch&) {
-                             verbose = true;
-                             return true;
-                           }));
-
-    std::optional<std::string> branch;
-    flags.emplace_back(
-        Flag()
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--branch"})
-            .Setter([&branch](const FlagMatch& m) {
-              branch = m.value;
-              return true;
-            }));
-
-    bool local_image;
-    flags.emplace_back(
-        Flag()
-            .Alias({FlagAliasMode::kFlagConsumesArbitrary, "--local-image"})
-            .Setter([&local_image](const FlagMatch& m) {
-              local_image = true;
-              return m.value == "";
-            }));
-
-    std::optional<std::string> build_id;
-    flags.emplace_back(
-        Flag()
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build-id"})
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build_id"})
-            .Setter([&build_id](const FlagMatch& m) {
-              build_id = m.value;
-              return true;
-            }));
-
-    std::optional<std::string> build_target;
-    flags.emplace_back(
-        Flag()
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build-target"})
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--build_target"})
-            .Setter([&build_target](const FlagMatch& m) {
-              build_target = m.value;
-              return true;
-            }));
-
-    std::optional<std::string> launch_args;
-    flags.emplace_back(
-        Flag()
-            .Alias({FlagAliasMode::kFlagConsumesFollowing, "--launch-args"})
-            .Setter([&launch_args](const FlagMatch& m) {
-              launch_args = m.value;
-              return true;
-            }));
-
-    CF_EXPECT(ParseFlags(flags, arguments));
-    CF_EXPECT(arguments.size() == 0,
-              "Unrecognized arguments:'"
-                  << android::base::Join(arguments, "', '") << "'");
-
-    CF_EXPECT(local_instance_set == true,
-              "Only '--local-instance' is supported");
-    std::optional<InstanceLockFile> lock;
-    if (local_instance.has_value()) {
-      // TODO(schuffelen): Block here if it can be interruptible
-      lock = CF_EXPECT(lock_file_manager_.TryAcquireLock(*local_instance));
-    } else {
-      lock = CF_EXPECT(lock_file_manager_.TryAcquireUnusedLock());
-    }
-    CF_EXPECT(lock.has_value(), "Could not acquire instance lock");
-    CF_EXPECT(CF_EXPECT(lock->Status()) == InUseState::kNotInUse);
-
-    auto dir = TempDir() + "/acloud_cvd_temp/local-instance-" +
-               std::to_string(lock->Instance());
-
-    static constexpr char kAndroidHostOut[] = "ANDROID_HOST_OUT";
-
-    auto host_artifacts_path = request_command.env().find(kAndroidHostOut);
-    CF_EXPECT(host_artifacts_path != request_command.env().end(),
-              "Missing " << kAndroidHostOut);
-
-    std::vector<cvd::Request> request_protos;
-    if (local_image) {
-      cvd::Request& mkdir_request = request_protos.emplace_back();
-      auto& mkdir_command = *mkdir_request.mutable_command_request();
-      mkdir_command.add_args("cvd");
-      mkdir_command.add_args("mkdir");
-      mkdir_command.add_args("-p");
-      mkdir_command.add_args(dir);
-      auto& mkdir_env = *mkdir_command.mutable_env();
-      mkdir_env[kAndroidHostOut] = host_artifacts_path->second;
-      *mkdir_command.mutable_working_directory() = dir;
-    } else {
-      cvd::Request& fetch_request = request_protos.emplace_back();
-      auto& fetch_command = *fetch_request.mutable_command_request();
-      fetch_command.add_args("cvd");
-      fetch_command.add_args("fetch");
-      fetch_command.add_args("--directory");
-      fetch_command.add_args(dir);
-      if (branch || build_id || build_target) {
-        fetch_command.add_args("--default_build");
-        auto target = build_target ? "/" + *build_target : "";
-        auto build = build_id.value_or(branch.value_or("aosp-master"));
-        fetch_command.add_args(build + target);
-      }
-      *fetch_command.mutable_working_directory() = dir;
-      auto& fetch_env = *fetch_command.mutable_env();
-      fetch_env[kAndroidHostOut] = host_artifacts_path->second;
-    }
-
-    cvd::Request& start_request = request_protos.emplace_back();
-    auto& start_command = *start_request.mutable_command_request();
-    start_command.add_args("cvd");
-    start_command.add_args("start");
-    start_command.add_args("--daemon");
-    start_command.add_args("--undefok");
-    start_command.add_args("report_anonymous_usage_stats");
-    start_command.add_args("--report_anonymous_usage_stats");
-    start_command.add_args("y");
-    if (launch_args) {
-      for (const auto& arg : CF_EXPECT(BashTokenize(*launch_args))) {
-        start_command.add_args(arg);
-      }
-    }
-    static constexpr char kAndroidProductOut[] = "ANDROID_PRODUCT_OUT";
-    auto& start_env = *start_command.mutable_env();
-    if (local_image) {
-      start_env[kAndroidHostOut] = host_artifacts_path->second;
-
-      auto product_out = request_command.env().find(kAndroidProductOut);
-      CF_EXPECT(product_out != request_command.env().end(),
-                "Missing " << kAndroidProductOut);
-      start_env[kAndroidProductOut] = product_out->second;
-    } else {
-      start_env[kAndroidHostOut] = dir;
-      start_env[kAndroidProductOut] = dir;
-    }
-    start_env["CUTTLEFISH_INSTANCE"] = std::to_string(lock->Instance());
-    start_env["HOME"] = dir;
-    *start_command.mutable_working_directory() = dir;
-
-    std::vector<SharedFD> fds;
-    if (verbose) {
-      fds = request.FileDescriptors();
-    } else {
-      auto dev_null = SharedFD::Open("/dev/null", O_RDWR);
-      CF_EXPECT(dev_null->IsOpen(), dev_null->StrError());
-      fds = {dev_null, dev_null, dev_null};
-    }
-
-    ConvertedAcloudCreateCommand ret = {
-        .lock = {std::move(*lock)},
-    };
-    for (auto& request_proto : request_protos) {
-      ret.requests.emplace_back(request_proto, fds, request.Credentials());
-    }
-    return ret;
-  }
-
- private:
-  InstanceLockFileManager& lock_file_manager_;
-};
-
-class TryAcloudCreateCommand : public CvdServerHandler {
- public:
-  INJECT(TryAcloudCreateCommand(ConvertAcloudCreateCommand& converter))
-      : converter_(converter) {}
-  ~TryAcloudCreateCommand() = default;
-
-  Result<bool> CanHandle(const RequestWithStdio& request) const override {
-    auto invocation = ParseInvocation(request.Message());
-    return invocation.command == "try-acloud" &&
-           invocation.arguments.size() >= 1 &&
-           invocation.arguments[0] == "create";
-  }
-  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
-    CF_EXPECT(converter_.Convert(request));
-    return CF_ERR("Unreleased");
-  }
-  Result<void> Interrupt() override { return CF_ERR("Can't be interrupted."); }
-
- private:
-  ConvertAcloudCreateCommand& converter_;
-};
-
-class AcloudCreateCommand : public CvdServerHandler {
- public:
-  INJECT(AcloudCreateCommand(CommandSequenceExecutor& executor,
-                             ConvertAcloudCreateCommand& converter))
-      : executor_(executor), converter_(converter) {}
-  ~AcloudCreateCommand() = default;
-
-  Result<bool> CanHandle(const RequestWithStdio& request) const override {
-    auto invocation = ParseInvocation(request.Message());
-    return invocation.command == "acloud" && invocation.arguments.size() >= 1 &&
-           invocation.arguments[0] == "create";
-  }
-  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
-    std::unique_lock interrupt_lock(interrupt_mutex_);
-    if (interrupted_) {
-      return CF_ERR("Interrupted");
-    }
-    CF_EXPECT(CanHandle(request));
-
-    auto converted = CF_EXPECT(converter_.Convert(request));
-    interrupt_lock.unlock();
-    CF_EXPECT(executor_.Execute(converted.requests, request.Err()));
-
-    CF_EXPECT(converted.lock.Status(InUseState::kInUse));
-
-    cvd::Response response;
-    response.mutable_command_response();
-    return response;
-  }
-  Result<void> Interrupt() override {
-    std::scoped_lock interrupt_lock(interrupt_mutex_);
-    interrupted_ = true;
-    CF_EXPECT(executor_.Interrupt());
-    return {};
-  }
-
- private:
-  CommandSequenceExecutor& executor_;
-  ConvertAcloudCreateCommand& converter_;
-
-  std::mutex interrupt_mutex_;
-  bool interrupted_ = false;
-};
-
-}  // namespace
-
-fruit::Component<fruit::Required<CvdCommandHandler>> AcloudCommandComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CvdServerHandler, AcloudCreateCommand>()
-      .addMultibinding<CvdServerHandler, TryAcloudCreateCommand>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/build_api.cpp b/host/commands/cvd/build_api.cpp
new file mode 100644
index 0000000..479294c
--- /dev/null
+++ b/host/commands/cvd/build_api.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/build_api.h"
+
+#include "host/libs/web/http_client/http_client.h"
+
+namespace cuttlefish {
+
+fruit::Component<BuildApi> BuildApiModule() {
+  return fruit::createComponent().registerProvider(
+      []() { return new BuildApi(); });
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/build_api.h b/host/commands/cvd/build_api.h
new file mode 100644
index 0000000..eb34427
--- /dev/null
+++ b/host/commands/cvd/build_api.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/libs/web/build_api.h"
+
+namespace cuttlefish {
+
+fruit::Component<BuildApi> BuildApiModule();
+
+}
diff --git a/host/commands/cvd/client.cpp b/host/commands/cvd/client.cpp
new file mode 100644
index 0000000..5333746
--- /dev/null
+++ b/host/commands/cvd/client.cpp
@@ -0,0 +1,373 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "client.h"
+
+#include <stdlib.h>
+
+#include <iostream>
+#include <sstream>
+
+#include <android-base/file.h>
+#include <google/protobuf/text_format.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/server_constants.h"
+#include "host/libs/config/host_tools_version.h"
+
+namespace cuttlefish {
+namespace {
+
+Result<SharedFD> ConnectToServer() {
+  auto connection =
+      SharedFD::SocketLocalClient(cvd::kServerSocketPath,
+                                  /*is_abstract=*/true, SOCK_SEQPACKET);
+  if (!connection->IsOpen()) {
+    auto connection =
+        SharedFD::SocketLocalClient(cvd::kServerSocketPath,
+                                    /*is_abstract=*/true, SOCK_STREAM);
+  }
+  if (!connection->IsOpen()) {
+    return CF_ERR("Failed to connect to server" << connection->StrError());
+  }
+  return connection;
+}
+
+[[noreturn]] void CallPythonAcloud(std::vector<std::string>& args) {
+  auto android_top = StringFromEnv("ANDROID_BUILD_TOP", "");
+  if (android_top == "") {
+    LOG(FATAL) << "Could not find android environment. Please run "
+               << "\"source build/envsetup.sh\".";
+    abort();
+  }
+  // TODO(b/206893146): Detect what the platform actually is.
+  auto py_acloud_path =
+      android_top + "/prebuilts/asuite/acloud/linux-x86/acloud";
+  std::unique_ptr<char*[]> new_argv(new char*[args.size() + 1]);
+  for (size_t i = 0; i < args.size(); i++) {
+    new_argv[i] = args[i].data();
+  }
+  new_argv[args.size()] = nullptr;
+  execv(py_acloud_path.data(), new_argv.get());
+  PLOG(FATAL) << "execv(" << py_acloud_path << ", ...) failed";
+  abort();
+}
+
+}  // end of namespace
+
+cvd::Version CvdClient::GetClientVersion() {
+  cvd::Version client_version;
+  client_version.set_major(cvd::kVersionMajor);
+  client_version.set_minor(cvd::kVersionMinor);
+  client_version.set_build(android::build::GetBuildNumber());
+  client_version.set_crc32(FileCrc(kServerExecPath));
+  return client_version;
+}
+
+Result<cvd::Version> CvdClient::GetServerVersion() {
+  cvd::Request request;
+  request.mutable_version_request();
+  auto response = SendRequest(request);
+
+  // If cvd_server is not running, start and wait before checking its version.
+  if (!response.ok()) {
+    CF_EXPECT(StartCvdServer());
+    response = CF_EXPECT(SendRequest(request));
+  }
+  CF_EXPECT(CheckStatus(response->status(), "GetVersion"));
+  CF_EXPECT(response->has_version_response(),
+            "GetVersion call missing VersionResponse.");
+
+  return response->version_response().version();
+}
+
+Result<void> CvdClient::ValidateServerVersion(const int num_retries) {
+  auto server_version = CF_EXPECT(GetServerVersion());
+  if (server_version.major() != cvd::kVersionMajor) {
+    return CF_ERR("Major version difference: cvd("
+                  << cvd::kVersionMajor << "." << cvd::kVersionMinor
+                  << ") != cvd_server(" << server_version.major() << "."
+                  << server_version.minor()
+                  << "). Try `cvd kill-server` or `pkill cvd_server`.");
+  }
+  if (server_version.minor() < cvd::kVersionMinor) {
+    std::cerr << "Minor version of cvd_server is older than latest. "
+              << "Attempting to restart..." << std::endl;
+    CF_EXPECT(StopCvdServer(/*clear=*/false));
+    CF_EXPECT(StartCvdServer());
+    if (num_retries > 0) {
+      CF_EXPECT(ValidateServerVersion(num_retries - 1));
+      return {};
+    } else {
+      return CF_ERR("Unable to start the cvd_server with version "
+                    << cvd::kVersionMajor << "." << cvd::kVersionMinor);
+    }
+  }
+  if (server_version.build() != android::build::GetBuildNumber()) {
+    LOG(VERBOSE) << "cvd_server client version ("
+                 << android::build::GetBuildNumber()
+                 << ") does not match server version ("
+                 << server_version.build() << std::endl;
+  }
+  auto self_crc32 = FileCrc(kServerExecPath);
+  if (server_version.crc32() != self_crc32) {
+    LOG(VERBOSE) << "cvd_server client checksum (" << self_crc32
+                 << ") doesn't match server checksum ("
+                 << server_version.crc32() << std::endl;
+  }
+  return {};
+}
+
+Result<void> CvdClient::StopCvdServer(bool clear) {
+  if (!server_) {
+    // server_ may not represent a valid connection even while the server is
+    // running, if we haven't tried to connect. This establishes first whether
+    // the server is running.
+    auto connection_attempt = ConnectToServer();
+    if (!connection_attempt.ok()) {
+      return {};
+    }
+  }
+
+  cvd::Request request;
+  auto shutdown_request = request.mutable_shutdown_request();
+  if (clear) {
+    shutdown_request->set_clear(true);
+  }
+
+  // Send the server a pipe with the Shutdown request that it
+  // will close when it fully exits.
+  SharedFD read_pipe, write_pipe;
+  CF_EXPECT(cuttlefish::SharedFD::Pipe(&read_pipe, &write_pipe),
+            "Unable to create shutdown pipe: " << strerror(errno));
+
+  auto response =
+      SendRequest(request, OverrideFd{/* override none of 0, 1, 2 */},
+                  /*extra_fd=*/write_pipe);
+
+  // If the server is already not running then SendRequest will fail.
+  // We treat this as success.
+  if (!response.ok()) {
+    server_.reset();
+    return {};
+  }
+
+  CF_EXPECT(CheckStatus(response->status(), "Shutdown"));
+  CF_EXPECT(response->has_shutdown_response(),
+            "Shutdown call missing ShutdownResponse.");
+
+  // Clear out the server_ socket.
+  server_.reset();
+
+  // Close the write end of the pipe in this process. Now the only
+  // process that may have the write end still open is the cvd_server.
+  write_pipe->Close();
+
+  // Wait for the pipe to close by attempting to read from the pipe.
+  char buf[1];  // Any size >0 should work for read attempt.
+  CF_EXPECT(read_pipe->Read(buf, sizeof(buf)) <= 0,
+            "Unexpected read value from cvd_server shutdown pipe.");
+  return {};
+}
+
+Result<cvd::Response> CvdClient::HandleCommand(
+    const std::vector<std::string>& cvd_process_args,
+    const std::unordered_map<std::string, std::string>& env,
+    const std::vector<std::string>& selector_args,
+    const OverrideFd& new_control_fd) {
+  std::optional<SharedFD> exe_fd;
+  // actual commandline arguments are packed in selector_args
+  if (selector_args.size() > 2 &&
+      android::base::Basename(selector_args[0]) == "cvd" &&
+      selector_args[1] == "restart-server" &&
+      selector_args[2] == "match-client") {
+    exe_fd = SharedFD::Open(kServerExecPath, O_RDONLY);
+    CF_EXPECT((*exe_fd)->IsOpen(), "Failed to open \""
+                                       << kServerExecPath << "\": \""
+                                       << (*exe_fd)->StrError() << "\"");
+  }
+  cvd::Request request = MakeRequest({.cmd_args = cvd_process_args,
+                                      .env = env,
+                                      .selector_args = selector_args},
+                                     cvd::WAIT_BEHAVIOR_COMPLETE);
+  auto response = CF_EXPECT(SendRequest(request, new_control_fd, exe_fd));
+  CF_EXPECT(CheckStatus(response.status(), "HandleCommand"));
+  CF_EXPECT(response.has_command_response(),
+            "HandleCommand call missing CommandResponse.");
+  return {response};
+}
+
+Result<void> CvdClient::SetServer(const SharedFD& server) {
+  CF_EXPECT(!server_, "Already have a server");
+  CF_EXPECT(server->IsOpen(), server->StrError());
+  server_ = UnixMessageSocket(server);
+  CF_EXPECT(server_->EnableCredentials(true).ok(),
+            "Unable to enable UnixMessageSocket credentials.");
+  return {};
+}
+
+Result<cvd::Response> CvdClient::SendRequest(const cvd::Request& request,
+                                             const OverrideFd& new_control_fds,
+                                             std::optional<SharedFD> extra_fd) {
+  if (!server_) {
+    CF_EXPECT(SetServer(CF_EXPECT(ConnectToServer())));
+  }
+  // Serialize and send the request.
+  std::string serialized;
+  CF_EXPECT(request.SerializeToString(&serialized),
+            "Unable to serialize request proto.");
+  UnixSocketMessage request_message;
+
+  std::vector<SharedFD> control_fds = {
+      (new_control_fds.stdin_override_fd ? *new_control_fds.stdin_override_fd
+                                         : SharedFD::Dup(0)),
+      (new_control_fds.stdout_override_fd ? *new_control_fds.stdout_override_fd
+                                          : SharedFD::Dup(1)),
+      (new_control_fds.stderr_override_fd ? *new_control_fds.stderr_override_fd
+                                          : SharedFD::Dup(2))};
+  if (extra_fd) {
+    control_fds.push_back(*extra_fd);
+  }
+  auto control = CF_EXPECT(ControlMessage::FromFileDescriptors(control_fds));
+  request_message.control.emplace_back(std::move(control));
+
+  request_message.data =
+      std::vector<char>(serialized.begin(), serialized.end());
+  CF_EXPECT(server_->WriteMessage(request_message));
+
+  // Read and parse the response.
+  auto read_result = CF_EXPECT(server_->ReadMessage());
+  serialized = std::string(read_result.data.begin(), read_result.data.end());
+  cvd::Response response;
+  CF_EXPECT(response.ParseFromString(serialized),
+            "Unable to parse serialized response proto.");
+  return response;
+}
+
+Result<void> CvdClient::StartCvdServer() {
+  SharedFD server_fd =
+      SharedFD::SocketLocalServer(cvd::kServerSocketPath,
+                                  /*is_abstract=*/true, SOCK_SEQPACKET, 0666);
+  CF_EXPECT(server_fd->IsOpen(), server_fd->StrError());
+
+  Command command(kServerExecPath);
+  command.AddParameter("-INTERNAL_server_fd=", server_fd);
+  SubprocessOptions options;
+  options.ExitWithParent(false);
+  command.Start(options);
+
+  // Connect to the server_fd, which waits for startup.
+  CF_EXPECT(SetServer(SharedFD::SocketLocalClient(cvd::kServerSocketPath,
+                                                  /*is_abstract=*/true,
+                                                  SOCK_SEQPACKET)));
+  return {};
+}
+
+Result<void> CvdClient::CheckStatus(const cvd::Status& status,
+                                    const std::string& rpc) {
+  if (status.code() == cvd::Status::OK) {
+    return {};
+  }
+  return CF_ERR("Received error response for \"" << rpc << "\":\n"
+                                                 << status.message()
+                                                 << "\nIn client");
+}
+
+Result<void> CvdClient::HandleAcloud(
+    const std::vector<std::string>& args,
+    const std::unordered_map<std::string, std::string>& env) {
+  auto server_running = ValidateServerVersion();
+
+  std::vector<std::string> args_copy{args};
+
+  // TODO(b/206893146): Make this decision inside the server.
+  if (!server_running.ok()) {
+    CallPythonAcloud(args_copy);
+    // no return
+  }
+
+  args_copy[0] = "try-acloud";
+  auto attempt = HandleCommand(args_copy, env, {});
+  if (!attempt.ok()) {
+    CallPythonAcloud(args_copy);
+    // no return
+  }
+
+  args_copy[0] = "acloud";
+  CF_EXPECT(HandleCommand(args_copy, env, {}));
+  return {};
+}
+
+Result<std::string> CvdClient::HandleVersion() {
+  using google::protobuf::TextFormat;
+  std::stringstream result;
+  std::string output;
+  auto server_version = CF_EXPECT(GetServerVersion());
+  CF_EXPECT(TextFormat::PrintToString(server_version, &output),
+            "converting server_version to string failed");
+  result << "Server version:" << std::endl << std::endl << output << std::endl;
+
+  CF_EXPECT(TextFormat::PrintToString(CvdClient::GetClientVersion(), &output),
+            "converting client version to string failed");
+  result << "Client version:" << std::endl << std::endl << output << std::endl;
+  return {result.str()};
+}
+
+Result<Json::Value> CvdClient::ListSubcommands(const cvd_common::Envs& envs) {
+  cvd_common::Args args{"cvd", "cmd-list"};
+  SharedFD read_pipe, write_pipe;
+  CF_EXPECT(cuttlefish::SharedFD::Pipe(&read_pipe, &write_pipe),
+            "Unable to create shutdown pipe: " << strerror(errno));
+  OverrideFd new_control_fd{.stdout_override_fd = write_pipe};
+  CF_EXPECT(
+      HandleCommand(args, envs, std::vector<std::string>{}, new_control_fd));
+
+  write_pipe->Close();
+  const int kChunkSize = 512;
+  char buf[kChunkSize + 1] = {0};
+  std::stringstream ss;
+  do {
+    auto n_read = ReadExact(read_pipe, buf, kChunkSize);
+    CF_EXPECT(n_read >= 0 && (n_read <= kChunkSize));
+    if (n_read == 0) {
+      break;
+    }
+    buf[n_read] = 0;  // null-terminate the C-style string
+    ss << buf;
+    if (n_read < sizeof(buf) - 1) {
+      break;
+    }
+  } while (true);
+  auto json_output = CF_EXPECT(ParseJson(ss.str()));
+  return json_output;
+}
+
+Result<cvd_common::Args> CvdClient::ValidSubcmdsList(
+    const cvd_common::Envs& envs) {
+  auto valid_subcmd_json = CF_EXPECT(ListSubcommands(envs));
+  CF_EXPECT(valid_subcmd_json.isMember("subcmd"),
+            "Server returned the list of subcommands in Json but it is missing "
+                << " \"subcmd\" field");
+  std::string valid_subcmd_string = valid_subcmd_json["subcmd"].asString();
+  auto valid_subcmds = android::base::Tokenize(valid_subcmd_string, ",");
+  return valid_subcmds;
+}
+
+}  // end of namespace cuttlefish
diff --git a/host/commands/cvd/client.h b/host/commands/cvd/client.h
new file mode 100644
index 0000000..7105ed5
--- /dev/null
+++ b/host/commands/cvd/client.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <android-base/result.h>
+#include <build/version.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/json.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/unix_sockets.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+struct OverrideFd {
+  std::optional<SharedFD> stdin_override_fd;
+  std::optional<SharedFD> stdout_override_fd;
+  std::optional<SharedFD> stderr_override_fd;
+};
+
+class CvdClient {
+ public:
+  Result<void> ValidateServerVersion(const int num_retries = 1);
+  Result<void> StopCvdServer(bool clear);
+  Result<void> HandleAcloud(
+      const std::vector<std::string>& args,
+      const std::unordered_map<std::string, std::string>& env);
+  Result<cvd::Response> HandleCommand(
+      const std::vector<std::string>& args,
+      const std::unordered_map<std::string, std::string>& env,
+      const std::vector<std::string>& selector_args,
+      const OverrideFd& control_fds);
+  Result<cvd::Response> HandleCommand(
+      const std::vector<std::string>& args,
+      const std::unordered_map<std::string, std::string>& env,
+      const std::vector<std::string>& selector_args) {
+    auto response = CF_EXPECT(
+        HandleCommand(args, env, selector_args,
+                      OverrideFd{std::nullopt, std::nullopt, std::nullopt}));
+    return response;
+  }
+  Result<std::string> HandleVersion();
+  Result<cvd_common::Args> ValidSubcmdsList(const cvd_common::Envs& envs);
+
+ private:
+  std::optional<UnixMessageSocket> server_;
+
+  Result<void> SetServer(const SharedFD& server);
+  Result<cvd::Response> SendRequest(const cvd::Request& request,
+                                    const OverrideFd& new_control_fds = {},
+                                    std::optional<SharedFD> extra_fd = {});
+  Result<void> StartCvdServer();
+  Result<void> CheckStatus(const cvd::Status& status, const std::string& rpc);
+  Result<cvd::Version> GetServerVersion();
+
+  Result<Json::Value> ListSubcommands(const cvd_common::Envs& envs);
+  static cvd::Version GetClientVersion();
+};
+
+}  // end of namespace cuttlefish
diff --git a/host/commands/cvd/command_sequence.cpp b/host/commands/cvd/command_sequence.cpp
index 21b146c..a7a6cf2 100644
--- a/host/commands/cvd/command_sequence.cpp
+++ b/host/commands/cvd/command_sequence.cpp
@@ -20,6 +20,7 @@
 #include "common/libs/fs/shared_buf.h"
 #include "host/commands/cvd/server.h"
 #include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/types.h"
 
 namespace cuttlefish {
 namespace {
@@ -51,8 +52,20 @@
   for (const auto& [name, val] : command.env()) {
     effective_command << BashEscape(name) << "=" << BashEscape(val) << " ";
   }
-  for (const auto& argument : command.args()) {
-    effective_command << BashEscape(argument) << " ";
+  auto args = cvd_common::ConvertToArgs(command.args());
+  auto selector_args =
+      cvd_common::ConvertToArgs(command.selector_opts().args());
+  if (args.empty()) {
+    return effective_command.str();
+  }
+  const auto& cmd = args.front();
+  cvd_common::Args cmd_args{args.begin() + 1, args.end()};
+  effective_command << BashEscape(cmd) << " ";
+  for (const auto& selector_arg : selector_args) {
+    effective_command << BashEscape(selector_arg) << " ";
+  }
+  for (const auto& cmd_arg : cmd_args) {
+    effective_command << BashEscape(cmd_arg) << " ";
   }
   effective_command.seekp(-1, effective_command.cur);
   effective_command << "`\n";  // Overwrite last space
@@ -61,42 +74,75 @@
 
 }  // namespace
 
-CommandSequenceExecutor::CommandSequenceExecutor(
-    CvdCommandHandler& inner_handler)
-    : inner_handler_(inner_handler) {}
+CommandSequenceExecutor::CommandSequenceExecutor() {}
 
-Result<void> CommandSequenceExecutor::Interrupt() {
-  CF_EXPECT(inner_handler_.Interrupt());
+Result<void> CommandSequenceExecutor::LateInject(fruit::Injector<>& injector) {
+  server_handlers_ = injector.getMultibindings<CvdServerHandler>();
   return {};
 }
 
-Result<void> CommandSequenceExecutor::Execute(
+Result<void> CommandSequenceExecutor::Interrupt() {
+  std::unique_lock interrupt_lock(interrupt_mutex_);
+  interrupted_ = true;
+  if (handler_stack_.empty()) {
+    return {};
+  }
+  CF_EXPECT(handler_stack_.back()->Interrupt());
+  return {};
+}
+
+Result<std::vector<cvd::Response>> CommandSequenceExecutor::Execute(
     const std::vector<RequestWithStdio>& requests, SharedFD report) {
   std::unique_lock interrupt_lock(interrupt_mutex_);
-  if (interrupted_) {
-    return CF_ERR("Interrupted");
-  }
+  CF_EXPECT(!interrupted_, "Interrupted");
+
+  std::vector<cvd::Response> responses;
   for (const auto& request : requests) {
     auto& inner_proto = request.Message();
-    CF_EXPECT(inner_proto.has_command_request());
-    auto& command = inner_proto.command_request();
-    std::string str = FormattedCommand(command);
-    CF_EXPECT(WriteAll(report, str) == str.size(), report->StrError());
-
-    interrupt_lock.unlock();
-    auto response = CF_EXPECT(inner_handler_.Handle(request));
-    interrupt_lock.lock();
-    if (interrupted_) {
-      return CF_ERR("Interrupted");
+    if (inner_proto.has_command_request()) {
+      auto& command = inner_proto.command_request();
+      std::string str = FormattedCommand(command);
+      CF_EXPECT(WriteAll(report, str) == str.size(), report->StrError());
     }
+
+    auto handler = CF_EXPECT(RequestHandler(request, server_handlers_));
+    handler_stack_.push_back(handler);
+    interrupt_lock.unlock();
+    auto response = CF_EXPECT(handler->Handle(request));
+    interrupt_lock.lock();
+    handler_stack_.pop_back();
+
+    CF_EXPECT(interrupted_ == false, "Interrupted");
     CF_EXPECT(response.status().code() == cvd::Status::OK,
               "Reason: \"" << response.status().message() << "\"");
 
-    static const char kDoneMsg[] = "Done\n";
-    CF_EXPECT(WriteAll(request.Err(), kDoneMsg) == sizeof(kDoneMsg) - 1,
-              request.Err()->StrError());
+    responses.emplace_back(std::move(response));
   }
-  return {};
+  return {responses};
+}
+
+Result<cvd::Response> CommandSequenceExecutor::ExecuteOne(
+    const RequestWithStdio& request, SharedFD report) {
+  auto response_in_vector = CF_EXPECT(Execute({request}, report));
+  CF_EXPECT_EQ(response_in_vector.size(), 1);
+  return response_in_vector.front();
+}
+
+std::vector<std::string> CommandSequenceExecutor::CmdList() const {
+  std::unordered_set<std::string> subcmds;
+  for (const auto& handler : server_handlers_) {
+    auto&& cmds_list = handler->CmdList();
+    for (const auto& cmd : cmds_list) {
+      subcmds.insert(cmd);
+    }
+  }
+  // duplication removed
+  return std::vector<std::string>{subcmds.begin(), subcmds.end()};
+}
+
+fruit::Component<CommandSequenceExecutor> CommandSequenceExecutorComponent() {
+  return fruit::createComponent()
+      .addMultibinding<LateInjected, CommandSequenceExecutor>();
 }
 
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/command_sequence.h b/host/commands/cvd/command_sequence.h
index bea305b..e9e18aa 100644
--- a/host/commands/cvd/command_sequence.h
+++ b/host/commands/cvd/command_sequence.h
@@ -20,22 +20,33 @@
 #include <fruit/fruit.h>
 
 #include "common/libs/fs/shared_fd.h"
-#include "host/commands/cvd/server.h"
+#include "cvd_server.pb.h"
 #include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/libs/config/inject.h"
 
 namespace cuttlefish {
 
-class CommandSequenceExecutor {
+class CommandSequenceExecutor : public LateInjected {
  public:
-  INJECT(CommandSequenceExecutor(CvdCommandHandler& inner_handler));
+  INJECT(CommandSequenceExecutor());
+
+  Result<void> LateInject(fruit::Injector<>&) override;
 
   Result<void> Interrupt();
-  Result<void> Execute(const std::vector<RequestWithStdio>&, SharedFD report);
+  Result<std::vector<cvd::Response>> Execute(
+      const std::vector<RequestWithStdio>&, SharedFD report);
+  Result<cvd::Response> ExecuteOne(const RequestWithStdio&, SharedFD report);
+
+  std::vector<std::string> CmdList() const;
 
  private:
+  std::vector<CvdServerHandler*> server_handlers_;
+  std::vector<CvdServerHandler*> handler_stack_;
   std::mutex interrupt_mutex_;
   bool interrupted_ = false;
-  CvdCommandHandler& inner_handler_;
 };
 
+fruit::Component<CommandSequenceExecutor> CommandSequenceExecutorComponent();
+
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/common_utils.cpp b/host/commands/cvd/common_utils.cpp
new file mode 100644
index 0000000..4d92fac
--- /dev/null
+++ b/host/commands/cvd/common_utils.cpp
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/common_utils.h"
+
+#include <unistd.h>
+
+#include <algorithm>
+#include <memory>
+#include <sstream>
+#include <stack>
+
+#include <android-base/file.h>
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/users.h"
+
+namespace cuttlefish {
+
+cvd::Request MakeRequest(const MakeRequestForm& request_form) {
+  return MakeRequest(request_form, cvd::WAIT_BEHAVIOR_COMPLETE);
+}
+
+cvd::Request MakeRequest(const MakeRequestForm& request_form,
+                         cvd::WaitBehavior wait_behavior) {
+  const auto& args = request_form.cmd_args;
+  const auto& env = request_form.env;
+  const auto& selector_args = request_form.selector_args;
+  cvd::Request request;
+  auto command_request = request.mutable_command_request();
+  for (const std::string& arg : args) {
+    command_request->add_args(arg);
+  }
+  auto selector_opts = command_request->mutable_selector_opts();
+  for (const std::string& selector_arg : selector_args) {
+    selector_opts->add_args(selector_arg);
+  }
+
+  for (const auto& [key, value] : env) {
+    (*command_request->mutable_env())[key] = value;
+  }
+
+  /*
+   * the client must set the kAndroidHostOut environment variable. There were,
+   * however, a few branches where kAndroidSoongHostOut replaced
+   * kAndroidHostOut. Cvd server eventually read kAndroidHostOut only and set
+   * both for the subtools.
+   *
+   * If none of the two are set, cvd server tries to use the parent directory of
+   * the client cvd executable as env[kAndroidHostOut].
+   *
+   */
+  if (!Contains(command_request->env(), kAndroidHostOut)) {
+    const std::string new_android_host_out =
+        Contains(command_request->env(), kAndroidSoongHostOut)
+            ? (*command_request->mutable_env())[kAndroidSoongHostOut]
+            : android::base::Dirname(android::base::GetExecutableDirectory());
+    (*command_request->mutable_env())[kAndroidHostOut] = new_android_host_out;
+  }
+
+  if (!request_form.working_dir) {
+    std::unique_ptr<char, void (*)(void*)> cwd(getcwd(nullptr, 0), &free);
+    command_request->set_working_directory(cwd.get());
+  } else {
+    command_request->set_working_directory(request_form.working_dir.value());
+  }
+  command_request->set_wait_behavior(wait_behavior);
+
+  return request;
+}
+
+// given /a/b/c/d/e, ensures
+// all directories from /a through /a/b/c/d/e exist
+Result<void> EnsureDirectoryExistsAllTheWay(const std::string& dir) {
+  CF_EXPECT(!dir.empty() && dir.at(0) == '/',
+            "EnsureDirectoryExistsAllTheWay() handles absolute paths only.");
+  if (dir == "/") {
+    return {};
+  }
+  std::string path_exclude_root = dir.substr(1);
+  std::vector<std::string> tokens =
+      android::base::Tokenize(path_exclude_root, "/");
+  std::string current_dir = "/";
+  for (int i = 0; i < tokens.size(); i++) {
+    current_dir.append(tokens[i]);
+    CF_EXPECT(EnsureDirectoryExists(current_dir),
+              current_dir << " does not exist and cannot be created.");
+    current_dir.append("/");
+  }
+  return {};
+}
+
+static std::vector<std::string> Reverse(std::stack<std::string>& s) {
+  std::vector<std::string> reversed;
+  while (!s.empty()) {
+    reversed.push_back(s.top());
+    s.pop();
+  }
+  std::reverse(reversed.begin(), reversed.end());
+  return reversed;
+}
+
+static std::vector<std::string> EmulateAbsolutePathImpl(
+    std::stack<std::string>& so_far, const std::vector<std::string>& tokens,
+    const size_t idx = 0) {
+  if (idx == tokens.size()) {
+    return Reverse(so_far);
+  }
+  const std::string& token = tokens.at(idx);
+  if (token == "." || token.empty()) {
+    // If token is empty, it might be //, so should be simply ignored
+    return EmulateAbsolutePathImpl(so_far, tokens, idx + 1);
+  }
+  if (token == "..") {
+    if (!so_far.empty()) {
+      // at /, ls ../../.. shows just the root. So, if too many ..s are here,
+      // we silently ignore them
+      so_far.pop();
+    }
+    return EmulateAbsolutePathImpl(so_far, tokens, idx + 1);
+  }
+  so_far.push(token);
+  return EmulateAbsolutePathImpl(so_far, tokens, idx + 1);
+}
+
+template <typename T>
+std::ostream& operator<<(std::ostream& out, const std::vector<T>& v) {
+  if (v.empty()) {
+    out << "{}";
+    return out;
+  }
+  if (v.size() == 1) {
+    out << "{" << v.front() << "}";
+    return out;
+  }
+  out << "{";
+  for (size_t i = 0; i != v.size() - 1; i++) {
+    out << v.at(i) << ", ";
+  }
+  out << v.back() << "}";
+  return out;
+}
+
+Result<std::string> EmulateAbsolutePath(const InputPathForm& path_info) {
+  const auto& path = path_info.path_to_convert;
+  std::string working_dir;
+  if (path_info.current_working_dir) {
+    working_dir = *path_info.current_working_dir;
+  } else {
+    std::unique_ptr<char, void (*)(void*)> cwd(getcwd(nullptr, 0), &free);
+    std::string process_cwd(cwd.get());
+    working_dir = std::move(process_cwd);
+  }
+  CF_EXPECT(android::base::StartsWith(working_dir, '/'),
+            "Current working directory should be given in an absolute path.");
+
+  const std::string home_dir = path_info.home_dir
+                                   ? *path_info.home_dir
+                                   : CF_EXPECT(SystemWideUserHome());
+  CF_EXPECT(android::base::StartsWith(home_dir, '/'),
+            "Home directory should be given in an absolute path.");
+
+  if (path.empty()) {
+    LOG(ERROR) << "The requested path to convert an absolute path is empty.";
+    return "";
+  }
+  if (path == "/") {
+    return path;
+  }
+  std::vector<std::string> tokens = android::base::Tokenize(path, "/");
+  std::stack<std::string> prefix_dir_stack;
+  if (path == "~" || android::base::StartsWith(path, "~/")) {
+    // tokens == {"~", "some", "dir", "file"}
+    std::vector<std::string> home_dir_tokens =
+        android::base::Tokenize(home_dir, "/");
+    tokens.erase(tokens.begin());
+    for (const auto& home_dir_token : home_dir_tokens) {
+      prefix_dir_stack.push(home_dir_token);
+    }
+  } else if (!android::base::StartsWith(path, "/")) {
+    // path was like "a/b/c", which should be expanded to $PWD/a/b/c
+    std::vector<std::string> working_dir_tokens =
+        android::base::Tokenize(working_dir, "/");
+    for (const auto& working_dir_token : working_dir_tokens) {
+      prefix_dir_stack.push(working_dir_token);
+    }
+  }
+
+  auto result = EmulateAbsolutePathImpl(prefix_dir_stack, tokens, 0);
+  std::stringstream assemble_output;
+  assemble_output << "/";
+  if (!result.empty()) {
+    assemble_output << android::base::Join(result, "/");
+  }
+  if (path_info.follow_symlink) {
+    return AbsolutePath(assemble_output.str());
+  }
+  return assemble_output.str();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/common_utils.h b/host/commands/cvd/common_utils.h
new file mode 100644
index 0000000..623b0fc
--- /dev/null
+++ b/host/commands/cvd/common_utils.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <sstream>
+#include <unordered_map>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+// utility struct for std::variant uses
+template <typename... Ts>
+struct Overload : Ts... {
+  using Ts::operator()...;
+};
+
+template <typename... Ts>
+Overload(Ts...) -> Overload<Ts...>;
+
+struct MakeRequestForm {
+  cvd_common::Args cmd_args;
+  cvd_common::Envs env;
+  cvd_common::Args selector_args;
+  std::optional<std::string> working_dir;
+};
+
+cvd::Request MakeRequest(const MakeRequestForm& request_form,
+                         const cvd::WaitBehavior wait_behavior);
+
+cvd::Request MakeRequest(const MakeRequestForm& request_form);
+
+// name of environment variable to mark the launch_cvd initiated by the cvd
+// server
+static constexpr char kCvdMarkEnv[] = "_STARTED_BY_CVD_SERVER_";
+
+constexpr char kServerExecPath[] = "/proc/self/exe";
+
+// The name of environment variable that points to the host out directory
+constexpr char kAndroidHostOut[] = "ANDROID_HOST_OUT";
+// kAndroidHostOut for old branches
+constexpr char kAndroidSoongHostOut[] = "ANDROID_SOONG_HOST_OUT";
+constexpr char kAndroidProductOut[] = "ANDROID_PRODUCT_OUT";
+constexpr char kLaunchedByAcloud[] = "LAUNCHED_BY_ACLOUD";
+
+template <typename Ostream, typename... Args>
+Ostream& ConcatToStream(Ostream& out, Args&&... args) {
+  (out << ... << std::forward<Args>(args));
+  return out;
+}
+
+template <typename... Args>
+std::string ConcatToString(Args&&... args) {
+  std::stringstream concatenator;
+  return ConcatToStream(concatenator, std::forward<Args>(args)...).str();
+}
+
+// given /a/b/c/d/e, ensures
+// all directories from /a through /a/b/c/d/e exist
+Result<void> EnsureDirectoryExistsAllTheWay(const std::string& dir);
+
+struct InputPathForm {
+  /** If nullopt, uses the process' current working dir
+   *  But if there is no preceding .. or ., this field is not used.
+   */
+  std::optional<std::string> current_working_dir;
+  /** If nullopt, use SystemWideUserHome()
+   *  But, if there's no preceding ~, this field is not used.
+   */
+  std::optional<std::string> home_dir;
+  std::string path_to_convert;
+  bool follow_symlink;
+};
+
+/**
+ * Returns emulated absolute path with a different process'/thread's
+ * context.
+ *
+ * This is useful when daemon(0, 0)-started server process wants to
+ * figure out a relative path that came from its client.
+ *
+ * The call mostly succeeds. It fails only if:
+ *  home_dir isn't given so supposed to relies on the local SystemWideUserHome()
+ *  but SystemWideUserHome() call fails.
+ */
+Result<std::string> EmulateAbsolutePath(const InputPathForm& path_info);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/demo_multi_vd.cpp b/host/commands/cvd/demo_multi_vd.cpp
new file mode 100644
index 0000000..f16bc73
--- /dev/null
+++ b/host/commands/cvd/demo_multi_vd.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/demo_multi_vd.h"
+
+#include "host/commands/cvd/server_command/serial_launch.h"
+#include "host/commands/cvd/server_command/serial_preset.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+DemoMultiVdComponent() {
+  return fruit::createComponent()
+      .install(cvdSerialLaunchComponent)
+      .install(cvdSerialPresetComponent);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/demo_multi_vd.h b/host/commands/cvd/demo_multi_vd.h
new file mode 100644
index 0000000..f0202ee
--- /dev/null
+++ b/host/commands/cvd/demo_multi_vd.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+DemoMultiVdComponent();
+
+}
diff --git a/host/commands/cvd/driver_flags.cpp b/host/commands/cvd/driver_flags.cpp
new file mode 100644
index 0000000..3b13f64
--- /dev/null
+++ b/host/commands/cvd/driver_flags.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/driver_flags.h"
+
+#include <sstream>
+
+namespace cuttlefish {
+
+CvdFlag<bool> DriverFlags::HelpFlag() {
+  const bool default_val = false;
+  CvdFlag<bool> help_flag(kHelp, default_val);
+  std::stringstream help;
+  help << "--" << kHelp << "to print this message.";
+  help_flag.SetHelpMessage(help.str());
+  return help_flag;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/driver_flags.h b/host/commands/cvd/driver_flags.h
new file mode 100644
index 0000000..a93191d
--- /dev/null
+++ b/host/commands/cvd/driver_flags.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/flag.h"
+
+namespace cuttlefish {
+
+/**
+ * The authentic collection of cvd driver flags
+ *
+ */
+// names of the flags, which are also used for search
+
+class DriverFlags {
+ public:
+  static constexpr char kHelp[] = "help";
+  static const DriverFlags& Get();
+
+  Result<CvdFlagProxy> GetFlag(const std::string& search_key) const {
+    auto flag = CF_EXPECT(flags_.GetFlag(search_key));
+    return flag;
+  }
+
+  std::vector<CvdFlagProxy> Flags() const { return flags_.Flags(); }
+  CvdFlag<bool> HelpFlag();
+
+ private:
+  DriverFlags() { flags_.EnrollFlag(HelpFlag()); }
+
+  FlagCollection flags_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/epoll_loop.cpp b/host/commands/cvd/epoll_loop.cpp
index 51a6f6a..a0543bf 100644
--- a/host/commands/cvd/epoll_loop.cpp
+++ b/host/commands/cvd/epoll_loop.cpp
@@ -20,42 +20,25 @@
 
 #include "common/libs/fs/epoll.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/result.h"
 
 namespace cuttlefish {
 
-EpollPool::EpollPool(Epoll epoll) : epoll_(std::move(epoll)) {}
-
-EpollPool::EpollPool(EpollPool&& other) {
-  std::unique_lock own_lock(instance_mutex_, std::defer_lock);
-  std::unique_lock other_lock(other.instance_mutex_, std::defer_lock);
-  std::unique_lock own_cb_lock(callbacks_mutex_, std::defer_lock);
-  std::unique_lock other_cb_lock(other.callbacks_mutex_, std::defer_lock);
-  std::lock(own_lock, other_lock, own_cb_lock, other_cb_lock);
-  epoll_ = std::move(other.epoll_);
-  callbacks_ = std::move(other.callbacks_);
-}
-
-EpollPool& EpollPool::operator=(EpollPool&& other) {
-  std::unique_lock own_lock(instance_mutex_, std::defer_lock);
-  std::unique_lock other_lock(other.instance_mutex_, std::defer_lock);
-  std::unique_lock own_cb_lock(callbacks_mutex_, std::defer_lock);
-  std::unique_lock other_cb_lock(other.callbacks_mutex_, std::defer_lock);
-  std::lock(own_lock, other_lock, own_cb_lock, other_cb_lock);
-  epoll_ = std::move(other.epoll_);
-  callbacks_ = std::move(other.callbacks_);
-
-  return *this;
+EpollPool::EpollPool() {
+  auto epoll = Epoll::Create();
+  if (!epoll.ok()) {
+    LOG(ERROR) << epoll.error().Message();
+    LOG(DEBUG) << epoll.error().Trace();
+    abort();
+  }
+  epoll_ = std::move(*epoll);
 }
 
 Result<void> EpollPool::Register(SharedFD fd, uint32_t events,
                                  EpollCallback callback) {
-  std::shared_lock instance_lock(instance_mutex_, std::defer_lock);
-  std::unique_lock callbacks_lock(callbacks_mutex_, std::defer_lock);
-  std::lock(instance_lock, callbacks_lock);
-  if (callbacks_.find(fd) != callbacks_.end()) {
-    return CF_ERR("Already have a callback created");
-  }
+  std::lock_guard callbacks_lock(callbacks_mutex_);
+  CF_EXPECT(!Contains(callbacks_, fd), "Already have a callback created");
   CF_EXPECT(epoll_.AddOrModify(fd, events | EPOLLONESHOT));
   callbacks_[fd] = std::move(callback);
   return {};
@@ -68,7 +51,7 @@
   }
   EpollCallback callback;
   {
-    std::lock_guard lock(callbacks_mutex_);
+    std::lock_guard callbacks_lock(callbacks_mutex_);
     auto it = callbacks_.find(event->fd);
     CF_EXPECT(it != callbacks_.end(), "Could not find event callback");
     callback = std::move(it->second);
@@ -79,19 +62,14 @@
 }
 
 Result<void> EpollPool::Remove(SharedFD fd) {
-  std::shared_lock instance_lock(instance_mutex_, std::defer_lock);
-  std::unique_lock callbacks_lock(callbacks_mutex_, std::defer_lock);
-  std::lock(instance_lock, callbacks_lock);
+  std::lock_guard callbacks_lock(callbacks_mutex_);
   CF_EXPECT(epoll_.Delete(fd), "No callback registered with epoll");
   callbacks_.erase(fd);
   return {};
 }
 
 fruit::Component<EpollPool> EpollLoopComponent() {
-  return fruit::createComponent()
-      .registerProvider([]() -> EpollPool {
-        return EpollPool(OR_FATAL(Epoll::Create()));
-      });
+  return fruit::createComponent();
 }
 
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/epoll_loop.h b/host/commands/cvd/epoll_loop.h
index 8ae48d4..54b79cb 100644
--- a/host/commands/cvd/epoll_loop.h
+++ b/host/commands/cvd/epoll_loop.h
@@ -32,9 +32,7 @@
 
 class EpollPool {
  public:
-  EpollPool(Epoll);
-  EpollPool(EpollPool&&);
-  EpollPool& operator=(EpollPool&&);
+  INJECT(EpollPool());
 
   /**
    * The `callback` function will be invoked with an EpollEvent containing `fd`
@@ -51,7 +49,6 @@
   Result<void> Remove(SharedFD fd);
 
  private:
-  std::shared_mutex instance_mutex_;
   Epoll epoll_;
   std::mutex callbacks_mutex_;
   std::map<SharedFD, EpollCallback> callbacks_;
diff --git a/host/commands/cvd/fetch/Android.bp b/host/commands/cvd/fetch/Android.bp
new file mode 100644
index 0000000..c8fe48b
--- /dev/null
+++ b/host/commands/cvd/fetch/Android.bp
@@ -0,0 +1,26 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libcvd_fetch",
+    srcs: [
+        "fetch_cvd.cc"
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
diff --git a/host/commands/cvd/fetch/fetch_cvd.cc b/host/commands/cvd/fetch/fetch_cvd.cc
new file mode 100644
index 0000000..9418071
--- /dev/null
+++ b/host/commands/cvd/fetch/fetch_cvd.cc
@@ -0,0 +1,647 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/fetch/fetch_cvd.h"
+
+#include <sys/stat.h>
+
+#include <chrono>
+#include <fstream>
+#include <future>
+#include <iostream>
+#include <iterator>
+#include <memory>
+#include <optional>
+#include <string>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <curl/curl.h>
+#include <gflags/gflags.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/archive.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/config/fetcher_config.h"
+#include "host/libs/web/build_api.h"
+#include "host/libs/web/credential_source.h"
+
+namespace cuttlefish {
+namespace {
+
+const std::string DEFAULT_BRANCH = "aosp-master";
+const std::string DEFAULT_BUILD_TARGET = "aosp_cf_x86_64_phone-userdebug";
+const std::string HOST_TOOLS = "cvd-host_package.tar.gz";
+const std::string KERNEL = "kernel";
+const std::string OTA_TOOLS = "otatools.zip";
+const std::string OTA_TOOLS_DIR = "/otatools/";
+const int DEFAULT_RETRY_PERIOD = 20;
+const std::string USAGE_MESSAGE =
+    "<flags>\n"
+    "\n"
+    "\"*_build\" flags accept values in the following format:\n"
+    "\"branch/build_target\" - latest build of \"branch\" for "
+    "\"build_target\"\n"
+    "\"build_id/build_target\" - build \"build_id\" for \"build_target\"\n"
+    "\"branch\" - latest build of \"branch\" for "
+    "\"aosp_cf_x86_phone-userdebug\"\n"
+    "\"build_id\" - build \"build_id\" for \"aosp_cf_x86_phone-userdebug\"\n";
+const mode_t RWX_ALL_MODE = S_IRWXU | S_IRWXG | S_IRWXO;
+
+struct BuildApiFlags {
+  std::string api_key = "";
+  std::string credential_source = "";
+  std::chrono::seconds wait_retry_period =
+      std::chrono::seconds(DEFAULT_RETRY_PERIOD);
+  bool external_dns_resolver =
+#ifdef __BIONIC__
+      true;
+#else
+      false;
+#endif
+};
+
+struct BuildSourceFlags {
+  std::string default_build = DEFAULT_BRANCH + "/" + DEFAULT_BUILD_TARGET;
+  std::string system_build = "";
+  std::string kernel_build = "";
+  std::string boot_build = "";
+  std::string bootloader_build = "";
+  std::string otatools_build = "";
+  std::string host_package_build = "";
+};
+
+struct DownloadFlags {
+  std::string boot_artifact = "";
+  bool download_img_zip = true;
+  bool download_target_files_zip = false;
+};
+
+struct FetchFlags {
+  std::string target_directory = "";
+  bool keep_downloaded_archives = false;
+  bool helpxml = false;
+  BuildApiFlags build_api_flags;
+  BuildSourceFlags build_source_flags;
+  DownloadFlags download_flags;
+};
+
+struct Builds {
+  Build default_build;
+  std::optional<Build> system;
+  std::optional<Build> kernel;
+  std::optional<Build> boot;
+  std::optional<Build> bootloader;
+  std::optional<Build> otatools;
+  std::optional<Build> host_package;
+};
+
+std::vector<Flag> GetFlagsVector(FetchFlags& fetch_flags,
+                                 BuildApiFlags& build_api_flags,
+                                 BuildSourceFlags& build_source_flags,
+                                 DownloadFlags& download_flags,
+                                 int& retry_period, std::string& directory) {
+  std::vector<Flag> flags;
+  flags.emplace_back(
+      GflagsCompatFlag("directory", directory)
+          .Help("Target directory to fetch files into. (deprecated)"));
+  flags.emplace_back(
+      GflagsCompatFlag("target_directory", fetch_flags.target_directory)
+          .Help("Target directory to fetch files into."));
+  flags.emplace_back(GflagsCompatFlag("keep_downloaded_archives",
+                                      fetch_flags.keep_downloaded_archives)
+                         .Help("Keep downloaded zip/tar."));
+
+  flags.emplace_back(GflagsCompatFlag("api_key", build_api_flags.api_key)
+                         .Help("API key ofr the Android Build API"));
+  flags.emplace_back(
+      GflagsCompatFlag("credential_source", build_api_flags.credential_source)
+          .Help("Build API credential source"));
+  flags.emplace_back(GflagsCompatFlag("wait_retry_period", retry_period)
+                         .Help("Retry period for pending builds given in "
+                               "seconds. Set to 0 to not wait."));
+  flags.emplace_back(
+      GflagsCompatFlag("external_dns_resolver",
+                       build_api_flags.external_dns_resolver)
+          .Help("Use an out-of-process mechanism to resolve DNS queries"));
+
+  flags.emplace_back(
+      GflagsCompatFlag("default_build", build_source_flags.default_build)
+          .Help("source for the cuttlefish build to use (vendor.img + host)"));
+  flags.emplace_back(
+      GflagsCompatFlag("system_build", build_source_flags.system_build)
+          .Help("source for system.img and product.img"));
+  flags.emplace_back(
+      GflagsCompatFlag("kernel_build", build_source_flags.kernel_build)
+          .Help("source for the kernel or gki target"));
+  flags.emplace_back(
+      GflagsCompatFlag("boot_build", build_source_flags.boot_build)
+          .Help("source for the boot or gki target"));
+  flags.emplace_back(
+      GflagsCompatFlag("bootloader_build", build_source_flags.bootloader_build)
+          .Help("source for the bootloader target"));
+  flags.emplace_back(
+      GflagsCompatFlag("otatools_build", build_source_flags.otatools_build)
+          .Help("source for the host ota tools"));
+  flags.emplace_back(GflagsCompatFlag("host_package_build",
+                                      build_source_flags.host_package_build)
+                         .Help("source for the host cvd tools"));
+
+  flags.emplace_back(
+      GflagsCompatFlag("boot_artifact", download_flags.boot_artifact)
+          .Help("name of the boot image in boot_build"));
+  flags.emplace_back(
+      GflagsCompatFlag("download_img_zip", download_flags.download_img_zip)
+          .Help("Whether to fetch the -img-*.zip file."));
+  flags.emplace_back(
+      GflagsCompatFlag("download_target_files_zip",
+                       download_flags.download_target_files_zip)
+          .Help("Whether to fetch the -target_files-*.zip file."));
+
+  flags.emplace_back(UnexpectedArgumentGuard());
+  flags.emplace_back(HelpFlag(flags, USAGE_MESSAGE));
+  flags.emplace_back(
+      HelpXmlFlag(flags, std::cout, fetch_flags.helpxml, USAGE_MESSAGE));
+  return flags;
+}
+
+Result<FetchFlags> GetFlagValues(int argc, char** argv) {
+  FetchFlags fetch_flags;
+  BuildApiFlags build_api_flags;
+  BuildSourceFlags build_source_flags;
+  DownloadFlags download_flags;
+  int retry_period = DEFAULT_RETRY_PERIOD;
+  std::string directory = "";
+
+  std::vector<Flag> flags =
+      GetFlagsVector(fetch_flags, build_api_flags, build_source_flags,
+                     download_flags, retry_period, directory);
+  std::vector<std::string> args = ArgsToVec(argc - 1, argv + 1);
+  CF_EXPECT(ParseFlags(flags, args), "Could not process command line flags.");
+
+  build_api_flags.wait_retry_period = std::chrono::seconds(retry_period);
+  if (directory != "") {
+    LOG(ERROR) << "Please use --target_directory instead of --directory";
+    if (fetch_flags.target_directory == "") {
+      fetch_flags.target_directory = directory;
+    }
+  } else {
+    if (fetch_flags.target_directory == "") {
+      fetch_flags.target_directory = CurrentDirectory();
+    }
+  }
+
+  fetch_flags.build_api_flags = build_api_flags;
+  fetch_flags.build_source_flags = build_source_flags;
+  fetch_flags.download_flags = download_flags;
+  return {fetch_flags};
+}
+
+Result<std::string> DownloadImageZip(BuildApi& build_api, const Build& build,
+                                     const std::string& target_directory) {
+  std::string img_zip_name = GetBuildZipName(build, "img");
+  return build_api.DownloadFile(build, target_directory, img_zip_name);
+}
+
+Result<std::vector<std::string>> DownloadImages(
+    BuildApi& build_api, const Build& build,
+    const std::string& target_directory, const std::vector<std::string>& images,
+    const bool keep_archives) {
+  std::string local_path =
+      CF_EXPECT(DownloadImageZip(build_api, build, target_directory));
+  std::vector<std::string> files = CF_EXPECT(
+      ExtractImages(local_path, target_directory, images, keep_archives));
+  return files;
+}
+
+Result<std::string> DownloadTargetFiles(BuildApi& build_api, const Build& build,
+                                        const std::string& target_directory) {
+  std::string target_files_name = GetBuildZipName(build, "target_files");
+  return build_api.DownloadFile(build, target_directory, target_files_name);
+}
+
+Result<std::vector<std::string>> DownloadHostPackage(
+    BuildApi& build_api, const Build& build,
+    const std::string& target_directory, const bool keep_archives) {
+  std::string local_path =
+      CF_EXPECT(build_api.DownloadFile(build, target_directory, HOST_TOOLS));
+  return ExtractArchiveContents(local_path, target_directory, keep_archives);
+}
+
+Result<std::vector<std::string>> DownloadOtaTools(
+    BuildApi& build_api, const Build& build,
+    const std::string& target_directory, const bool keep_archives) {
+  std::string local_path =
+      CF_EXPECT(build_api.DownloadFile(build, target_directory, OTA_TOOLS));
+  std::string otatools_dir = target_directory + OTA_TOOLS_DIR;
+  CF_EXPECT(EnsureDirectoryExists(otatools_dir, RWX_ALL_MODE));
+  return ExtractArchiveContents(local_path, otatools_dir, keep_archives);
+}
+
+Result<std::string> DownloadMiscInfo(BuildApi& build_api, const Build& build,
+                                     const std::string& target_dir) {
+  return build_api.DownloadFile(build, target_dir, "misc_info.txt");
+}
+
+Result<std::vector<std::string>> DownloadBoot(
+    BuildApi& build_api, const Build& build,
+    const std::string& specified_artifact, const std::string& target_dir,
+    const bool keep_archives) {
+  std::string target_boot = target_dir + "/boot.img";
+  const std::string& boot_artifact =
+      specified_artifact != "" ? specified_artifact : "boot.img";
+  if (specified_artifact != "") {
+    Result<std::string> artifact_result =
+        build_api.DownloadFile(build, target_dir, specified_artifact);
+    if (artifact_result.ok()) {
+      RenameFile(artifact_result.value(), target_boot);
+      return {{target_boot}};
+    }
+    LOG(INFO) << "Find " << boot_artifact << " in the img zip";
+  }
+
+  std::vector<std::string> files{target_boot};
+  std::string img_zip =
+      CF_EXPECT(DownloadImageZip(build_api, build, target_dir));
+  const bool keep_img_zip_archive_for_vendor_boot = true;
+  std::string extracted_boot =
+      CF_EXPECT(ExtractImage(img_zip, target_dir, boot_artifact,
+                             keep_img_zip_archive_for_vendor_boot));
+  if (extracted_boot != target_boot) {
+    CF_EXPECT(RenameFile(extracted_boot, target_boot));
+  }
+  Result<std::string> extracted_vendor_boot_result =
+      ExtractImage(img_zip, target_dir, "vendor_boot.img", keep_archives);
+  if (extracted_vendor_boot_result.ok()) {
+    files.push_back(extracted_vendor_boot_result.value());
+  }
+  return files;
+}
+
+Result<void> AddFilesToConfig(FileSource purpose, const Build& build,
+                              const std::vector<std::string>& paths,
+                              FetcherConfig* config,
+                              const std::string& directory_prefix,
+                              bool override_entry = false) {
+  for (const std::string& path : paths) {
+    std::string_view local_path(path);
+    if (!android::base::ConsumePrefix(&local_path, directory_prefix)) {
+      LOG(ERROR) << "Failed to remove prefix " << directory_prefix << " from "
+                 << local_path;
+    }
+    while (android::base::StartsWith(local_path, "/")) {
+      android::base::ConsumePrefix(&local_path, "/");
+    }
+    // TODO(schuffelen): Do better for local builds here.
+    auto id = std::visit([](auto&& arg) { return arg.id; }, build);
+    auto target = std::visit([](auto&& arg) { return arg.target; }, build);
+    CvdFile file(purpose, id, target, std::string(local_path));
+    CF_EXPECT(config->add_cvd_file(file, override_entry),
+              "Duplicate file \"" << file << "\", Existing file: \""
+                                  << config->get_cvd_files()[path]
+                                  << "\". Failed to add path \"" << path
+                                  << "\"");
+  }
+  return {};
+}
+
+std::unique_ptr<CredentialSource> TryOpenServiceAccountFile(
+    HttpClient& http_client, const std::string& path) {
+  LOG(VERBOSE) << "Attempting to open service account file \"" << path << "\"";
+  Json::CharReaderBuilder builder;
+  std::ifstream ifs(path);
+  Json::Value content;
+  std::string errorMessage;
+  if (!Json::parseFromStream(builder, ifs, &content, &errorMessage)) {
+    LOG(VERBOSE) << "Could not read config file \"" << path
+                 << "\": " << errorMessage;
+    return {};
+  }
+  static constexpr char BUILD_SCOPE[] =
+      "https://www.googleapis.com/auth/androidbuild.internal";
+  auto result = ServiceAccountOauthCredentialSource::FromJson(
+      http_client, content, BUILD_SCOPE);
+  if (!result.ok()) {
+    LOG(VERBOSE) << "Failed to load service account json file: \n"
+                 << result.error().Trace();
+    return {};
+  }
+  return std::unique_ptr<CredentialSource>(
+      new ServiceAccountOauthCredentialSource(std::move(*result)));
+}
+
+Result<void> ProcessHostPackage(BuildApi& build_api, const Build& build,
+                                const std::string& target_dir,
+                                FetcherConfig* config,
+                                const std::string& host_package_build,
+                                const bool keep_archives) {
+  std::vector<std::string> host_package_files = CF_EXPECT(
+      DownloadHostPackage(build_api, build, target_dir, keep_archives));
+  CF_EXPECT(AddFilesToConfig(host_package_build != ""
+                                 ? FileSource::HOST_PACKAGE_BUILD
+                                 : FileSource::DEFAULT_BUILD,
+                             build, host_package_files, config, target_dir));
+  return {};
+}
+
+BuildApi GetBuildApi(const BuildApiFlags& flags) {
+  auto resolver =
+      flags.external_dns_resolver ? GetEntDnsResolve : NameResolver();
+  std::unique_ptr<HttpClient> curl = HttpClient::CurlClient(resolver);
+  std::unique_ptr<HttpClient> retrying_http_client =
+      HttpClient::ServerErrorRetryClient(*curl, 10,
+                                         std::chrono::milliseconds(5000));
+  std::unique_ptr<CredentialSource> credential_source;
+  if (auto crds = TryOpenServiceAccountFile(*curl, flags.credential_source)) {
+    credential_source = std::move(crds);
+  } else if (flags.credential_source == "gce") {
+    credential_source =
+        GceMetadataCredentialSource::make(*retrying_http_client);
+  } else if (flags.credential_source == "") {
+    std::string file = StringFromEnv("HOME", ".") + "/.acloud_oauth2.dat";
+    LOG(VERBOSE) << "Probing acloud credentials at " << file;
+    if (FileExists(file)) {
+      std::ifstream stream(file);
+      auto attempt_load =
+          RefreshCredentialSource::FromOauth2ClientFile(*curl, stream);
+      if (attempt_load.ok()) {
+        credential_source.reset(
+            new RefreshCredentialSource(std::move(*attempt_load)));
+      } else {
+        LOG(VERBOSE) << "Failed to load acloud credentials: "
+                     << attempt_load.error().Trace();
+      }
+    } else {
+      LOG(INFO) << "\"" << file << "\" missing, running without credentials";
+    }
+  } else {
+    credential_source = FixedCredentialSource::make(flags.credential_source);
+  }
+  return BuildApi(std::move(retrying_http_client), std::move(curl),
+                  std::move(credential_source), flags.api_key,
+                  flags.wait_retry_period);
+}
+
+Result<std::optional<Build>> GetBuildHelper(BuildApi& build_api,
+                                            const std::string& build_source,
+                                            const std::string& build_target) {
+  if (build_source == "") {
+    return std::nullopt;
+  }
+  return CF_EXPECT(build_api.ArgumentToBuild(build_source, build_target),
+                   "Unable to create build from source ("
+                       << build_source << ") and target (" << build_target
+                       << ")");
+}
+
+Result<Builds> GetBuildsFromSources(BuildApi& build_api,
+                                    const BuildSourceFlags& build_sources) {
+  std::optional<Build> default_build = CF_EXPECT(GetBuildHelper(
+      build_api, build_sources.default_build, DEFAULT_BUILD_TARGET));
+  CF_EXPECT(default_build.has_value());
+  Builds result = Builds{
+      .default_build = default_build.value(),
+      .system = CF_EXPECT(GetBuildHelper(build_api, build_sources.system_build,
+                                         DEFAULT_BUILD_TARGET)),
+      .kernel = CF_EXPECT(
+          GetBuildHelper(build_api, build_sources.kernel_build, KERNEL)),
+      .boot = CF_EXPECT(GetBuildHelper(build_api, build_sources.boot_build,
+                                       "gki_x86_64-user")),
+      .bootloader = CF_EXPECT(GetBuildHelper(
+          build_api, build_sources.bootloader_build, "u-boot_crosvm_x86_64")),
+      .otatools = CF_EXPECT(GetBuildHelper(
+          build_api, build_sources.otatools_build, DEFAULT_BUILD_TARGET)),
+      .host_package = CF_EXPECT(GetBuildHelper(
+          build_api, build_sources.host_package_build, DEFAULT_BUILD_TARGET)),
+  };
+  if (!result.otatools.has_value()) {
+    if (result.system.has_value()) {
+      result.otatools = result.system.value();
+    } else if (result.kernel.has_value()) {
+      result.otatools = result.default_build;
+    }
+  }
+  if (!result.host_package.has_value()) {
+    result.host_package = result.default_build;
+  }
+  return {result};
+}
+
+}  // namespace
+
+Result<void> FetchCvdMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  const FetchFlags flags = CF_EXPECT(GetFlagValues(argc, argv));
+
+#ifdef __BIONIC__
+  // TODO(schuffelen): Find a better way to deal with tzdata
+  setenv("ANDROID_TZDATA_ROOT", "/", /* overwrite */ 0);
+  setenv("ANDROID_ROOT", "/", /* overwrite */ 0);
+#endif
+
+  std::string target_dir = AbsolutePath(flags.target_directory);
+  CF_EXPECT(EnsureDirectoryExists(target_dir, RWX_ALL_MODE));
+  FetcherConfig config;
+  curl_global_init(CURL_GLOBAL_DEFAULT);
+  {
+    BuildApi build_api = GetBuildApi(flags.build_api_flags);
+    const Builds builds =
+        CF_EXPECT(GetBuildsFromSources(build_api, flags.build_source_flags));
+
+    auto process_pkg_ret = std::async(
+        std::launch::async, ProcessHostPackage, std::ref(build_api),
+        std::cref(builds.host_package.value()), std::cref(target_dir), &config,
+        std::cref(flags.build_source_flags.host_package_build),
+        std::cref(flags.keep_downloaded_archives));
+
+    if (builds.otatools.has_value()) {
+      std::vector<std::string> ota_tools_files = CF_EXPECT(
+          DownloadOtaTools(build_api, builds.otatools.value(), target_dir,
+                           flags.keep_downloaded_archives));
+      CF_EXPECT(AddFilesToConfig(FileSource::DEFAULT_BUILD,
+                                 builds.default_build, ota_tools_files, &config,
+                                 target_dir));
+    }
+    if (flags.download_flags.download_img_zip) {
+      std::string local_path = CF_EXPECT(
+          DownloadImageZip(build_api, builds.default_build, target_dir));
+      std::vector<std::string> image_files = CF_EXPECT(ExtractArchiveContents(
+          local_path, target_dir, flags.keep_downloaded_archives));
+      LOG(INFO) << "Adding img-zip files for default build";
+      for (auto& file : image_files) {
+        LOG(INFO) << file;
+      }
+      CF_EXPECT(AddFilesToConfig(FileSource::DEFAULT_BUILD,
+                                 builds.default_build, image_files, &config,
+                                 target_dir));
+    }
+    if (builds.system.has_value() ||
+        flags.download_flags.download_target_files_zip) {
+      std::string default_target_dir = target_dir + "/default";
+      CF_EXPECT(EnsureDirectoryExists(default_target_dir), RWX_ALL_MODE);
+      std::string target_files = CF_EXPECT(DownloadTargetFiles(
+          build_api, builds.default_build, default_target_dir));
+      LOG(INFO) << "Adding target files for default build";
+      CF_EXPECT(AddFilesToConfig(FileSource::DEFAULT_BUILD,
+                                 builds.default_build, {target_files}, &config,
+                                 target_dir));
+    }
+
+    if (builds.system.has_value()) {
+      bool system_in_img_zip = true;
+      if (flags.download_flags.download_img_zip) {
+        auto image_files = DownloadImages(
+            build_api, builds.system.value(), target_dir,
+            {"system.img", "product.img"}, flags.keep_downloaded_archives);
+        if (!image_files.ok() || image_files->empty()) {
+          LOG(INFO)
+              << "Could not find system image for " << builds.system.value()
+              << "in the img zip. Assuming a super image build, which will "
+              << "get the system image from the target zip.";
+          system_in_img_zip = false;
+        } else {
+          LOG(INFO) << "Adding img-zip files for system build";
+          CF_EXPECT(AddFilesToConfig(FileSource::SYSTEM_BUILD,
+                                     builds.system.value(), *image_files,
+                                     &config, target_dir, true));
+        }
+      }
+      std::string system_target_dir = target_dir + "/system";
+      CF_EXPECT(EnsureDirectoryExists(system_target_dir, RWX_ALL_MODE));
+      std::string target_files = CF_EXPECT(DownloadTargetFiles(
+          build_api, builds.system.value(), system_target_dir));
+      CF_EXPECT(AddFilesToConfig(FileSource::SYSTEM_BUILD,
+                                 builds.system.value(), {target_files}, &config,
+                                 target_dir));
+      if (!system_in_img_zip) {
+        std::string extracted_system = CF_EXPECT(
+            ExtractImage(target_files, target_dir, "IMAGES/system.img",
+                         flags.keep_downloaded_archives));
+        CF_EXPECT(RenameFile(extracted_system, target_dir + "/system.img"));
+
+        Result<std::string> extracted_product_result =
+            ExtractImage(target_files, target_dir, "IMAGES/product.img",
+                         flags.keep_downloaded_archives);
+        if (extracted_product_result.ok()) {
+          CF_EXPECT(RenameFile(extracted_product_result.value(),
+                               target_dir + "/product.img"));
+        }
+
+        Result<std::string> extracted_system_ext_result =
+            ExtractImage(target_files, target_dir, "IMAGES/system_ext.img",
+                         flags.keep_downloaded_archives);
+        if (extracted_system_ext_result.ok()) {
+          CF_EXPECT(RenameFile(extracted_system_ext_result.value(),
+                               target_dir + "/system_ext.img"));
+        }
+
+        Result<std::string> extracted_vbmeta_system =
+            ExtractImage(target_files, target_dir, "IMAGES/vbmeta_system.img",
+                         flags.keep_downloaded_archives);
+        if (extracted_vbmeta_system.ok()) {
+          CF_EXPECT(RenameFile(extracted_vbmeta_system.value(),
+                               target_dir + "/vbmeta_system.img"));
+        }
+        // This should technically call AddFilesToConfig with the produced
+        // files, but it will conflict with the ones produced from the default
+        // system image and pie doesn't care about the produced file list
+        // anyway.
+      }
+    }
+
+    if (builds.kernel.has_value()) {
+      std::string local_path = target_dir + "/kernel";
+      // If the kernel is from an arm/aarch64 build, the artifact will be called
+      // Image.
+      std::string kernel_filepath = CF_EXPECT(build_api.DownloadFileWithBackup(
+          builds.kernel.value(), target_dir, "bzImage", "Image"));
+      RenameFile(kernel_filepath, local_path);
+      CF_EXPECT(AddFilesToConfig(FileSource::KERNEL_BUILD,
+                                 builds.kernel.value(), {local_path}, &config,
+                                 target_dir));
+
+      // Certain kernel builds do not have corresponding ramdisks.
+      Result<std::string> initramfs_img_result = build_api.DownloadFile(
+          builds.kernel.value(), target_dir, "initramfs.img");
+      if (initramfs_img_result.ok()) {
+        CF_EXPECT(AddFilesToConfig(
+            FileSource::KERNEL_BUILD, builds.kernel.value(),
+            {initramfs_img_result.value()}, &config, target_dir));
+      }
+    }
+
+    if (builds.boot.has_value()) {
+      std::vector<std::string> boot_files = CF_EXPECT(DownloadBoot(
+          build_api, builds.boot.value(), flags.download_flags.boot_artifact,
+          target_dir, flags.keep_downloaded_archives));
+      CF_EXPECT(AddFilesToConfig(FileSource::BOOT_BUILD, builds.boot.value(),
+                                 boot_files, &config, target_dir, true));
+    }
+
+    // Some older builds might not have misc_info.txt, so permit errors on
+    // fetching misc_info.txt
+    auto misc_info =
+        DownloadMiscInfo(build_api, builds.default_build, target_dir);
+    if (misc_info.ok()) {
+      CF_EXPECT(AddFilesToConfig(FileSource::DEFAULT_BUILD,
+                                 builds.default_build, {misc_info.value()},
+                                 &config, target_dir, true));
+    }
+
+    if (builds.bootloader.has_value()) {
+      std::string local_path = target_dir + "/bootloader";
+      // If the bootloader is from an arm/aarch64 build, the artifact will be of
+      // filetype bin.
+      std::string bootloader_filepath =
+          CF_EXPECT(build_api.DownloadFileWithBackup(builds.bootloader.value(),
+                                                     target_dir, "u-boot.rom",
+                                                     "u-boot.bin"));
+      RenameFile(bootloader_filepath, local_path);
+      CF_EXPECT(AddFilesToConfig(FileSource::BOOTLOADER_BUILD,
+                                 builds.bootloader.value(), {local_path},
+                                 &config, target_dir, true));
+    }
+
+    // Wait for ProcessHostPackage to return.
+    CF_EXPECT(process_pkg_ret.get(),
+              "Could not download host package for " << builds.default_build);
+  }
+  curl_global_cleanup();
+
+  // Due to constraints of the build system, artifacts intentionally cannot
+  // determine their own build id. So it's unclear which build number fetch_cvd
+  // itself was built at.
+  // https://android.googlesource.com/platform/build/+/979c9f3/Changes.md#build_number
+  std::string fetcher_path = target_dir + "/fetcher_config.json";
+  CF_EXPECT(AddFilesToConfig(GENERATED, DeviceBuild("", ""), {fetcher_path},
+                             &config, target_dir));
+  config.SaveToFile(fetcher_path);
+
+  for (const auto& file : config.get_cvd_files()) {
+    std::cout << target_dir << "/" << file.second.file_path << "\n";
+  }
+  std::cout << std::flush;
+
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/fetch/fetch_cvd.h b/host/commands/cvd/fetch/fetch_cvd.h
new file mode 100644
index 0000000..e99395f
--- /dev/null
+++ b/host/commands/cvd/fetch/fetch_cvd.h
@@ -0,0 +1,21 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+Result<void> FetchCvdMain(int argc, char** argv);
+}
diff --git a/host/commands/cvd/flag.cpp b/host/commands/cvd/flag.cpp
new file mode 100644
index 0000000..cbd32f5
--- /dev/null
+++ b/host/commands/cvd/flag.cpp
@@ -0,0 +1,146 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/flag.h"
+
+#include "host/commands/cvd/common_utils.h"
+
+namespace cuttlefish {
+
+Result<std::string> CvdFlagProxy::Name() const {
+  CF_EXPECT(GetType() != FlagType::kUnknown, "Unsupported flag type");
+  auto decode_name = Overload{
+      [](auto&& param) -> std::string { return param.Name(); },
+  };
+  return std::visit(decode_name, flag_);
+}
+
+CvdFlagProxy::FlagType CvdFlagProxy::GetType() const {
+  auto decode_type = Overload{
+      [](const CvdFlag<bool>&) -> FlagType { return FlagType::kBool; },
+      [](const CvdFlag<std::int32_t>&) -> FlagType { return FlagType::kInt32; },
+      [](const CvdFlag<std::string>&) -> FlagType { return FlagType::kString; },
+      [](auto) -> FlagType { return FlagType::kUnknown; },
+  };
+  return std::visit(decode_type, flag_);
+}
+
+Result<bool> CvdFlagProxy::HasDefaultValue() const {
+  CF_EXPECT(GetType() != FlagType::kUnknown, "Unsupported flag type of typeid");
+  auto decode_default_value = Overload{
+      [](auto&& flag) -> bool { return flag.HasDefaultValue(); },
+  };
+  return std::visit(decode_default_value, flag_);
+}
+
+std::vector<CvdFlagProxy> FlagCollection::Flags() const {
+  std::vector<CvdFlagProxy> flags;
+  flags.reserve(name_flag_map_.size());
+  for (const auto& [name, flag] : name_flag_map_) {
+    flags.push_back(flag);
+  }
+  return flags;
+}
+
+template <typename T>
+static Result<std::optional<CvdFlagProxy::ValueVariant>> FilterKnownTypeFlag(
+    const CvdFlag<T>& flag, cvd_common::Args& args) {
+  std::optional<T> opt = CF_EXPECT(flag.FilterFlag(args));
+  if (!opt) {
+    return std::nullopt;
+  }
+  CvdFlagProxy::ValueVariant value_variant = *opt;
+  return value_variant;
+}
+
+Result<std::optional<CvdFlagProxy::ValueVariant>> CvdFlagProxy::FilterFlag(
+    cvd_common::Args& args) const {
+  CF_EXPECT(GetType() != FlagType::kUnknown, "Unsupported flag type of typeid");
+  std::optional<CvdFlagProxy::ValueVariant> output;
+  auto filter_flag = Overload{
+      [&args](const CvdFlag<std::int32_t>& int32_t_flag)
+          -> Result<std::optional<ValueVariant>> {
+        return FilterKnownTypeFlag(int32_t_flag, args);
+      },
+      [&args](const CvdFlag<bool>& bool_flag)
+          -> Result<std::optional<ValueVariant>> {
+        return FilterKnownTypeFlag(bool_flag, args);
+      },
+      [&args](const CvdFlag<std::string>& string_flag)
+          -> Result<std::optional<ValueVariant>> {
+        return FilterKnownTypeFlag(string_flag, args);
+      },
+      [](auto) -> Result<std::optional<ValueVariant>> {
+        return CF_ERR("Invalid type is passed to FlagCollection::FilterFlags");
+      },
+  };
+  output = CF_EXPECT(std::visit(filter_flag, flag_));
+  return output;
+}
+
+Result<std::unordered_map<std::string, FlagCollection::FlagValuePair>>
+FlagCollection::FilterFlags(cvd_common::Args& args) const {
+  std::unordered_map<std::string, FlagCollection::FlagValuePair> output;
+  for (const auto& [name, flag_proxy] : name_flag_map_) {
+    auto value_opt = CF_EXPECT(flag_proxy.FilterFlag(args));
+    if (!value_opt) {
+      continue;
+    }
+    output.emplace(name,
+                   FlagValuePair{.flag = flag_proxy, .value = *value_opt});
+  }
+  return output;
+}
+
+Result<std::unordered_map<std::string, FlagCollection::FlagValuePair>>
+FlagCollection::CalculateFlags(cvd_common::Args& args) const {
+  auto output = CF_EXPECT(FilterFlags(args));
+  for (const auto& [name, flag_proxy] : name_flag_map_) {
+    if (Contains(output, name)) {
+      // the flag was given with a value, there is no need for update it
+      continue;
+    }
+    if (!CF_EXPECT(flag_proxy.HasDefaultValue())) {
+      continue;
+    }
+    switch (flag_proxy.GetType()) {
+      case CvdFlagProxy::FlagType::kBool:
+        output.emplace(
+            name,
+            FlagValuePair{.flag = flag_proxy,
+                          .value = CF_EXPECT(flag_proxy.DefaultValue<bool>())});
+        break;
+      case CvdFlagProxy::FlagType::kInt32:
+        output.emplace(
+            name, FlagValuePair{.flag = flag_proxy,
+                                .value = CF_EXPECT(
+                                    flag_proxy.DefaultValue<std::int32_t>())});
+        break;
+      case CvdFlagProxy::FlagType::kString:
+        output.emplace(
+            name, FlagValuePair{.flag = flag_proxy,
+                                .value = CF_EXPECT(
+                                    flag_proxy.DefaultValue<std::string>())});
+        break;
+      default:
+        return CF_ERR("Unsupported FlagType in "
+                      << "--" << name);
+    }
+  }
+  return output;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/flag.h b/host/commands/cvd/flag.h
new file mode 100644
index 0000000..4d1d7ad
--- /dev/null
+++ b/host/commands/cvd/flag.h
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <variant>
+#include <vector>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+/**
+ * Data structure to represent cvd user-facing flags
+ *
+ * Flag in flag_parser.h is more on parsing. gflags library would be
+ * slowly depreicated. The cvd driver and selector flags are a specification for
+ * a user-facing flag.
+ */
+template <typename T>
+class CvdFlag {
+ public:
+  using GflagFactoryCallback =
+      std::function<Flag(const std::string& name, T& value_out)>;
+  CvdFlag(const std::string& name)
+      : name_(name),
+        gflag_factory_cb([](const std::string& name, T& value_out) {
+          return GflagsCompatFlag(name, value_out);
+        }) {}
+
+  CvdFlag(const std::string& name, const T& default_value)
+      : name_(name),
+        default_value_(default_value),
+        gflag_factory_cb([](const std::string& name, T& value_out) {
+          return GflagsCompatFlag(name, value_out);
+        }) {}
+
+  std::string Name() const { return name_; }
+  std::string HelpMessage() const { return help_msg_; }
+  CvdFlag& SetHelpMessage(const std::string& help_msg) & {
+    help_msg_ = help_msg;
+    return *this;
+  }
+  CvdFlag SetHelpMessage(const std::string& help_msg) && {
+    help_msg_ = help_msg;
+    return *this;
+  }
+  bool HasDefaultValue() const { return default_value_ != std::nullopt; }
+  Result<T> DefaultValue() const {
+    CF_EXPECT(HasDefaultValue());
+    return *default_value_;
+  }
+
+  CvdFlag& SetGflagFactory(GflagFactoryCallback factory) & {
+    gflag_factory_cb = std::move(factory);
+    return *this;
+  }
+  CvdFlag SetGflagFactory(GflagFactoryCallback factory) && {
+    gflag_factory_cb = std::move(factory);
+    return *this;
+  }
+
+  // returns CF_ERR if parsing error,
+  // returns std::nullopt if parsing was okay but the flag wasn't given
+  Result<std::optional<T>> FilterFlag(cvd_common::Args& args) const {
+    const int args_initial_size = args.size();
+    if (args_initial_size == 0) {
+      return std::nullopt;
+    }
+    T value;
+    CF_EXPECT(ParseFlags({gflag_factory_cb(name_, value)}, args),
+              "Failed to parse --" << name_);
+    if (args.size() == args_initial_size) {
+      // not consumed
+      return std::nullopt;
+    }
+    return value;
+  }
+
+  // Parses the arguments. If flag is given, returns the parsed value. If not,
+  // returns the default value if any. If no default value, it returns CF_ERR.
+  Result<T> CalculateFlag(cvd_common::Args& args) const {
+    auto value_opt = CF_EXPECT(FilterFlag(args));
+    if (!value_opt) {
+      CF_EXPECT(default_value_ != std::nullopt);
+      value_opt = default_value_;
+    }
+    return *value_opt;
+  }
+
+ private:
+  const std::string name_;
+  std::string help_msg_;
+  std::optional<T> default_value_;
+  /**
+   * A callback function to generate Flag defined in
+   * common/libs/utils/flag_parser.h. The name is this CvdFlag's name.
+   * The value is a buffer that is kept in this object
+   */
+  GflagFactoryCallback gflag_factory_cb;
+};
+
+class CvdFlagProxy {
+  friend class FlagCollection;
+
+ public:
+  enum class FlagType : std::uint32_t {
+    kUnknown = 0,
+    kBool,
+    kInt32,
+    kString,
+  };
+
+  static std::string ToString(const FlagType flag_type) {
+    switch (flag_type) {
+      case FlagType::kUnknown:
+        return "kUnknown";
+      case FlagType::kBool:
+        return "bool";
+      case FlagType::kInt32:
+        return "std::int32_t";
+      case FlagType::kString:
+        return "std::string";
+    }
+  }
+
+  template <typename T>
+  CvdFlagProxy(CvdFlag<T>&& flag) : flag_{std::move(flag)} {}
+
+  template <typename T>
+  const CvdFlag<T>* GetFlag() const {
+    return std::get_if<CvdFlag<T>>(&flag_);
+  }
+
+  template <typename T>
+  CvdFlag<T>* GetFlag() {
+    return std::get_if<CvdFlag<T>>(&flag_);
+  }
+
+  /*
+   * If the actual type of flag_ is not handled by SelectorFlagProxy, it is a
+   * developer error, and the Name() and HasDefaultValue() will returns
+   * CF_ERR
+   */
+  Result<std::string> Name() const;
+  Result<bool> HasDefaultValue() const;
+
+  FlagType GetType() const;
+
+  template <typename T>
+  Result<T> DefaultValue() const {
+    const bool has_default_value = CF_EXPECT(HasDefaultValue());
+    CF_EXPECT(has_default_value == true);
+    const auto* ptr = CF_EXPECT(std::get_if<CvdFlag<T>>(&flag_));
+    CF_EXPECT(ptr != nullptr);
+    return ptr->DefaultValue();
+  }
+
+  // returns CF_ERR if parsing error,
+  // returns std::nullopt if parsing was okay but the flag wasn't given
+  template <typename T>
+  Result<std::optional<T>> FilterFlag(cvd_common::Args& args) const {
+    std::optional<T> output;
+    const auto* ptr = CF_EXPECT(std::get_if<CvdFlag<T>>(&flag_));
+    CF_EXPECT(ptr != nullptr);
+    output = CF_EXPECT(ptr->FilterFlag(args));
+    return output;
+  }
+
+  // Parses the arguments. If flag is given, returns the parsed value. If not,
+  // returns the default value if any. If no default value, it returns CF_ERR.
+  template <typename T>
+  Result<T> CalculateFlag(cvd_common::Args& args) const {
+    bool has_default_value = CF_EXPECT(HasDefaultValue());
+    CF_EXPECT(has_default_value == true);
+    const auto* ptr = CF_EXPECT(std::get_if<CvdFlag<T>>(&flag_));
+    CF_EXPECT(ptr != nullptr);
+    T output = CF_EXPECT(ptr->CalculateFlag(args));
+    return output;
+  }
+
+  using ValueVariant = std::variant<std::int32_t, bool, std::string>;
+
+  // Returns std::nullopt when the parsing goes okay but the flag wasn't given
+  // Returns ValueVariant when the flag was given in args
+  // Returns CF_ERR when the parsing failed or the type is not supported
+  Result<std::optional<ValueVariant>> FilterFlag(cvd_common::Args& args) const;
+
+ private:
+  std::variant<CvdFlag<std::int32_t>, CvdFlag<bool>, CvdFlag<std::string>>
+      flag_;
+};
+
+class FlagCollection {
+ public:
+  using ValueVariant = CvdFlagProxy::ValueVariant;
+
+  Result<void> EnrollFlag(CvdFlagProxy&& flag) {
+    auto name = CF_EXPECT(flag.Name());
+    CF_EXPECT(!Contains(name_flag_map_, name),
+              name << " is already registered.");
+    name_flag_map_.emplace(name, std::move(flag));
+    return {};
+  }
+
+  template <typename T>
+  Result<void> EnrollFlag(CvdFlag<T>&& flag) {
+    CF_EXPECT(EnrollFlag(CvdFlagProxy(std::move(flag))));
+    return {};
+  }
+
+  Result<CvdFlagProxy> GetFlag(const std::string& name) const {
+    const auto itr = name_flag_map_.find(name);
+    CF_EXPECT(itr != name_flag_map_.end(),
+              "Flag \"" << name << "\" is not found.");
+    const CvdFlagProxy& flag_proxy = itr->second;
+    return flag_proxy;
+  }
+
+  std::vector<CvdFlagProxy> Flags() const;
+
+  struct FlagValuePair {
+    ValueVariant value;
+    CvdFlagProxy flag;
+  };
+
+  /* does not consider default values
+   * so, if not default value and the flag wasn't given, it won't be found
+   * in the returned map
+   */
+  Result<std::unordered_map<std::string, FlagValuePair>> FilterFlags(
+      cvd_common::Args& args) const;
+
+  /* considers default values
+   * so, if the flag wasn't given, the default value will be used to fill
+   * out the returned map. If a default value isn't available and the flag
+   * isn't given either, the entry won't be in the returned map
+   */
+  Result<std::unordered_map<std::string, FlagValuePair>> CalculateFlags(
+      cvd_common::Args& args) const;
+
+  template <typename T>
+  static Result<T> GetValue(const ValueVariant& value_variant) {
+    auto* value_ptr = std::get_if<T>(std::addressof(value_variant));
+    CF_EXPECT(value_ptr != nullptr,
+              "GetValue template function was instantiated with a wrong type.");
+    return *value_ptr;
+  }
+
+  template <typename T>
+  static Result<T> GetValue(const FlagValuePair& flag_and_value) {
+    std::string flag_type_string =
+        CvdFlagProxy::ToString(flag_and_value.flag.GetType());
+    auto* value_ptr = std::get_if<T>(std::addressof(flag_and_value.value));
+    CF_EXPECT(value_ptr != nullptr,
+              "The actual flag type is " << flag_type_string);
+    return *value_ptr;
+  }
+
+ private:
+  std::unordered_map<std::string, CvdFlagProxy> name_flag_map_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/frontline_parser.cpp b/host/commands/cvd/frontline_parser.cpp
new file mode 100644
index 0000000..6c33309
--- /dev/null
+++ b/host/commands/cvd/frontline_parser.cpp
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/frontline_parser.h"
+
+#include <sstream>
+#include <type_traits>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/flag_parser.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+
+Result<std::unique_ptr<FrontlineParser>> FrontlineParser::Parse(
+    ParserParam param) {
+  CF_EXPECT(!param.all_args.empty());
+  FrontlineParser* frontline_parser = new FrontlineParser(param);
+  CF_EXPECT(frontline_parser != nullptr,
+            "Memory allocation for FrontlineParser failed.");
+  CF_EXPECT(frontline_parser->Separate());
+  return std::unique_ptr<FrontlineParser>(frontline_parser);
+}
+
+FrontlineParser::FrontlineParser(const ParserParam& param)
+    : server_supported_subcmds_{param.server_supported_subcmds},
+      all_args_(param.all_args),
+      internal_cmds_(param.internal_cmds),
+      cvd_flags_(param.cvd_flags) {}
+
+Result<void> FrontlineParser::Separate() {
+  arguments_separator_ = CF_EXPECT(CallSeparator());
+  return {};
+}
+
+Result<cvd_common::Args> FrontlineParser::ValidSubcmdsList() {
+  cvd_common::Args valid_subcmds(server_supported_subcmds_);
+  std::copy(internal_cmds_.cbegin(), internal_cmds_.cend(),
+            std::back_inserter(valid_subcmds));
+  return valid_subcmds;
+}
+
+static Result<std::unordered_set<std::string>> BoolFlagNames(
+    const std::vector<CvdFlagProxy>& flags) {
+  std::unordered_set<std::string> output;
+  for (const auto& flag : flags) {
+    if (flag.GetType() == CvdFlagProxy::FlagType::kBool) {
+      output.insert(CF_EXPECT(flag.Name()));
+    }
+  }
+  return output;
+}
+
+static Result<std::unordered_set<std::string>> ValueFlagNames(
+    const std::vector<CvdFlagProxy>& flags) {
+  std::unordered_set<std::string> output;
+  for (const auto& flag : flags) {
+    if (flag.GetType() == CvdFlagProxy::FlagType::kInt32 ||
+        flag.GetType() == CvdFlagProxy::FlagType::kString) {
+      output.insert(CF_EXPECT(flag.Name()));
+    }
+  }
+  return output;
+}
+
+Result<std::unique_ptr<selector::ArgumentsSeparator>>
+FrontlineParser::CallSeparator() {
+  auto valid_subcmds_vector = CF_EXPECT(ValidSubcmdsList());
+  std::unordered_set<std::string> valid_subcmds{valid_subcmds_vector.begin(),
+                                                valid_subcmds_vector.end()};
+  auto cvd_flags = cvd_flags_.Flags();
+
+  auto known_bool_flags = CF_EXPECT(BoolFlagNames(cvd_flags));
+  auto known_value_flags = CF_EXPECT(ValueFlagNames(cvd_flags));
+
+  ArgumentsSeparator::FlagsRegistration flag_registration{
+      .known_boolean_flags = known_bool_flags,
+      .known_value_flags = known_value_flags,
+      .valid_subcommands = valid_subcmds};
+  auto arguments_separator =
+      CF_EXPECT(ArgumentsSeparator::Parse(flag_registration, all_args_));
+  CF_EXPECT(arguments_separator != nullptr);
+  return arguments_separator;
+}
+
+const std::string& FrontlineParser::ProgPath() const {
+  return arguments_separator_->ProgPath();
+}
+
+std::optional<std::string> FrontlineParser::SubCmd() const {
+  return arguments_separator_->SubCmd();
+}
+
+const cvd_common::Args& FrontlineParser::SubCmdArgs() const {
+  return arguments_separator_->SubCmdArgs();
+}
+
+const cvd_common::Args& FrontlineParser::CvdArgs() const {
+  return arguments_separator_->CvdArgs();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/frontline_parser.h b/host/commands/cvd/frontline_parser.h
new file mode 100644
index 0000000..5becead
--- /dev/null
+++ b/host/commands/cvd/frontline_parser.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/client.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/selector/arguments_separator.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+/* the very first command line parser
+ *
+ * Being aware of valid subcommands and cvd-specific commands, it will
+ * separate the command line arguments into:
+ *
+ *  1. program path/name
+ *  2. cvd-specific arguments
+ *     a) selector flags
+ *     b) non-selector flags
+ *  3. subcommand
+ *  4. subcommand arguments
+ *
+ * This is currently on the client side but will be moved to the server
+ * side.
+ */
+class FrontlineParser {
+  using ArgumentsSeparator = selector::ArgumentsSeparator;
+
+ public:
+  struct ParserParam {
+    // commands supported by the server
+    std::vector<std::string> server_supported_subcmds;
+    // commands supported by the client itself
+    std::vector<std::string> internal_cmds;
+    cvd_common::Args all_args;
+    FlagCollection cvd_flags;
+  };
+
+  // This call must guarantee all public methods will be valid
+  static Result<std::unique_ptr<FrontlineParser>> Parse(ParserParam param);
+
+  const std::string& ProgPath() const;
+  std::optional<std::string> SubCmd() const;
+  const cvd_common::Args& SubCmdArgs() const;
+  const cvd_common::Args& CvdArgs() const;
+
+ private:
+  FrontlineParser(const ParserParam& parser);
+
+  // internal workers in order
+  Result<void> Separate();
+  Result<cvd_common::Args> ValidSubcmdsList();
+  Result<std::unique_ptr<ArgumentsSeparator>> CallSeparator();
+  struct FilterOutput {
+    bool clean;
+    bool help;
+    cvd_common::Args selector_args;
+  };
+  Result<FilterOutput> FilterNonSelectorArgs();
+
+  cvd_common::Args server_supported_subcmds_;
+  const cvd_common::Args all_args_;
+  const std::vector<std::string> internal_cmds_;
+  FlagCollection cvd_flags_;
+  std::unique_ptr<ArgumentsSeparator> arguments_separator_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/handle_reset.cpp b/host/commands/cvd/handle_reset.cpp
new file mode 100644
index 0000000..df9f4de
--- /dev/null
+++ b/host/commands/cvd/handle_reset.cpp
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/handle_reset.h"
+
+#include <errno.h>
+#include <semaphore.h>
+#include <sys/mman.h>
+#include <sys/time.h>
+#include <sys/wait.h>
+#include <unistd.h>
+
+#include <chrono>
+#include <iostream>
+#include <string>
+#include <thread>
+
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/reset_client_utils.h"
+
+namespace cuttlefish {
+
+struct ParsedFlags {
+  bool is_help;
+  bool clean_runtime_dir;
+  bool device_by_cvd_only;
+  bool is_confirmed_by_flag;
+};
+
+static Result<ParsedFlags> ParseResetFlags(cvd_common::Args subcmd_args) {
+  if (subcmd_args.size() > 2 && subcmd_args.at(2) == "help") {
+    // unfortunately, {FlagAliasMode::kFlagExact, "help"} is not allowed
+    subcmd_args[2] = "--help";
+  }
+
+  bool is_help = false;
+  bool clean_runtime_dir = true;
+  bool device_by_cvd_only = false;
+  bool is_confirmed_by_flag = false;
+  Flag y_flag = Flag()
+                    .Alias({FlagAliasMode::kFlagExact, "-y"})
+                    .Alias({FlagAliasMode::kFlagExact, "--yes"})
+                    .Setter([&is_confirmed_by_flag](const FlagMatch&) {
+                      is_confirmed_by_flag = true;
+                      return true;
+                    });
+  Flag help_flag = Flag()
+                       .Alias({FlagAliasMode::kFlagExact, "-h"})
+                       .Alias({FlagAliasMode::kFlagExact, "--help"})
+                       .Setter([&is_help](const FlagMatch&) {
+                         is_help = true;
+                         return true;
+                       });
+  std::vector<Flag> flags{
+      GflagsCompatFlag("device-by-cvd-only", device_by_cvd_only), y_flag,
+      GflagsCompatFlag("clean-runtime-dir", clean_runtime_dir), help_flag,
+      UnexpectedArgumentGuard()};
+  CF_EXPECT(ParseFlags(flags, subcmd_args));
+
+  return ParsedFlags{.is_help = is_help,
+                     .clean_runtime_dir = clean_runtime_dir,
+                     .device_by_cvd_only = device_by_cvd_only,
+                     .is_confirmed_by_flag = is_confirmed_by_flag};
+}
+
+static bool GetUserConfirm() {
+  std::cout << "Are you sure to reset all the devices, runtime files, "
+            << "and the cvd server if any [y/n]? ";
+  std::string user_confirm;
+  std::getline(std::cin, user_confirm);
+  std::transform(user_confirm.begin(), user_confirm.end(), user_confirm.begin(),
+                 ::tolower);
+  return (user_confirm == "y" || user_confirm == "yes");
+}
+
+/*
+ * Try client.StopCvdServer(), and wait for a while.
+ *
+ * There should be two threads or processes. One is to call
+ * "StopCvdServer()," which could hang forever. The other is waiting
+ * for the thread/process, and should kill it after timeout.
+ *
+ * In that sense, a process is easy to kill in the middle (kill -9).
+ *
+ */
+static Result<void> TimedKillCvdServer(CvdClient& client, const int timeout) {
+  sem_t* binary_sem = (sem_t*)mmap(NULL, sizeof(sem_t), PROT_READ | PROT_WRITE,
+                                   MAP_ANONYMOUS | MAP_SHARED, 0, 0);
+  CF_EXPECT(binary_sem != nullptr,
+            "Failed to allocated shm for inter-process semaphore."
+                << "(errno: " << errno << ")");
+  CF_EXPECT_EQ(sem_init(binary_sem, 1, 0), 0,
+               "Failed to initialized inter-process semaphore"
+                   << "(errno: " << errno << ")");
+  pid_t pid = fork();
+  CF_EXPECT(pid >= 0, "fork() failed in TimedKillCvdServer");
+  if (pid == 0) {
+    LOG(ERROR) << "Stopping the cvd server...";
+    constexpr bool clear_running_devices_first = true;
+    auto stop_server_result = client.StopCvdServer(clear_running_devices_first);
+    if (!stop_server_result.ok()) {
+      LOG(ERROR) << "cvd kill-server returned error"
+                 << stop_server_result.error().Trace();
+      LOG(ERROR) << "However, cvd reset will continue cleaning up.";
+    }
+    sem_post(binary_sem);
+    // exit 0. This is a short-living worker process
+    exit(0);
+  }
+
+  Subprocess worker_process(pid);
+  struct timespec waiting_time;
+  if (clock_gettime(CLOCK_MONOTONIC, &waiting_time) == -1) {
+    // cannot set up an alarm clock. Not sure how long it should wait
+    // for the worker process. Thus, we wait for a certain amount of time,
+    // and send SIGKILL to the cvd server process and the worker process.
+    LOG(ERROR) << "Could not get the CLOCK_REALTIME.";
+    LOG(ERROR) << "Sleeping " << timeout << " seconds, and "
+               << "will send sigkill to the server.";
+    using namespace std::chrono_literals;
+    std::this_thread::sleep_for(operator""s((unsigned long long)timeout));
+    auto result_kill = KillCvdServerProcess();
+    worker_process.Stop();
+    // TODO(kwstephenkim): Compose error messages, and propagate
+    CF_EXPECT(result_kill.ok(), "KillCvdServerProcess() failed.");
+    return {};
+  }
+
+  // timed wait for the binary semaphore
+  waiting_time.tv_sec += timeout;
+  auto ret_code = sem_timedwait(binary_sem, &waiting_time);
+
+  // ret_code == 0 means sem_wait succeeded before timeout.
+  if (ret_code == 0) {
+    worker_process.Wait();
+    CF_EXPECT(KillCvdServerProcess());
+    return {};
+  }
+
+  // worker process is still running.
+  worker_process.Stop();
+  CF_EXPECT(KillCvdServerProcess());
+  return {};
+}
+
+Result<void> HandleReset(CvdClient& client,
+                         const cvd_common::Args& subcmd_args) {
+  auto options = CF_EXPECT(ParseResetFlags(subcmd_args));
+  if (options.is_help) {
+    std::cout << kHelpMessage << std::endl;
+    return {};
+  }
+
+  // cvd reset. Give one more opportunity
+  if (!options.is_confirmed_by_flag && !GetUserConfirm()) {
+    std::cout << "For more details: "
+              << "  cvd reset --help" << std::endl;
+    return {};
+  }
+
+  auto result = TimedKillCvdServer(client, 50);
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Trace();
+    LOG(ERROR) << "Cvd reset continues cleaning up devices.";
+  }
+  // cvd reset handler placeholder. identical to cvd kill-server for now.
+  CF_EXPECT(KillAllCuttlefishInstances(
+      {.cvd_server_children_only = options.device_by_cvd_only,
+       .clear_instance_dirs = options.clean_runtime_dir}));
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/handle_reset.h b/host/commands/cvd/handle_reset.h
new file mode 100644
index 0000000..56428f1
--- /dev/null
+++ b/host/commands/cvd/handle_reset.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/client.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+Result<void> HandleReset(CvdClient& client,
+                         const cvd_common::Args& subcmd_args);
+
+static constexpr char kHelpMessage[] = R"(usage: cvd reset <args>
+
+* Warning: Cvd reset is an experimental implementation. When you are in panic,
+cvd reset is the last resort.
+
+args:
+  --help                 Prints this message.
+    help
+
+  --device-by-cvd-only   Terminates devices that a cvd server started
+                         This excludes the devices launched by "launch_cvd"
+                         or "cvd_internal_start" directly (default: false)
+
+  --clean-runtime-dir    Cleans up the runtime directory for the devices
+                         Yet to be implemented. For now, if true, only if
+                         stop_cvd supports --clear_instance_dirs and the
+                         device could be stopped by stop_cvd, the flag takes
+                         effects. (default: true)
+
+  --yes                  Resets without asking the user confirmation.
+   -y
+
+description:
+
+  1. Gracefully stops all devices that the cvd client can reach.
+  2. Forcefully stops all run_cvd processes and their subprocesses.
+  3. Kill the cvd server itself if unresponsive.
+  4. Reset the states of the involved instance lock files
+     -- If cvd reset stops a device, it resets the corresponding lock file.
+  5. Optionally, cleans up the runtime files of the stopped devices.)";
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/instance_lock.cpp b/host/commands/cvd/instance_lock.cpp
index de8b7b5..93546f6 100644
--- a/host/commands/cvd/instance_lock.cpp
+++ b/host/commands/cvd/instance_lock.cpp
@@ -19,43 +19,36 @@
 #include <sys/file.h>
 
 #include <algorithm>
+#include <regex>
 #include <sstream>
 #include <string>
+#include <unordered_map>
 
 #include <android-base/file.h>
+#include <android-base/parseint.h>
 #include <android-base/strings.h>
 #include <fruit/fruit.h>
 
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/environment.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/result.h"
 
 namespace cuttlefish {
 
-InstanceLockFile::InstanceLockFile(SharedFD fd, int instance_num)
-    : fd_(fd), instance_num_(instance_num) {}
+InstanceLockFile::InstanceLockFile(LockFile&& lock_file, const int instance_num)
+    : lock_file_(std::move(lock_file)), instance_num_(instance_num) {}
 
 int InstanceLockFile::Instance() const { return instance_num_; }
 
 Result<InUseState> InstanceLockFile::Status() const {
-  CF_EXPECT(fd_->LSeek(0, SEEK_SET) == 0, fd_->StrError());
-  char state_char = static_cast<char>(InUseState::kNotInUse);
-  CF_EXPECT(fd_->Read(&state_char, 1) >= 0, fd_->StrError());
-  switch (state_char) {
-    case static_cast<char>(InUseState::kInUse):
-      return InUseState::kInUse;
-    case static_cast<char>(InUseState::kNotInUse):
-      return InUseState::kNotInUse;
-    default:
-      return CF_ERR("Unexpected state value \"" << state_char << "\"");
-  }
+  auto in_use_state = CF_EXPECT(lock_file_.Status());
+  return in_use_state;
 }
 
 Result<void> InstanceLockFile::Status(InUseState state) {
-  CF_EXPECT(fd_->LSeek(0, SEEK_SET) == 0, fd_->StrError());
-  char state_char = static_cast<char>(state);
-  CF_EXPECT(fd_->Write(&state_char, 1) == 1, fd_->StrError());
+  CF_EXPECT(lock_file_.Status(state));
   return {};
 }
 
@@ -63,44 +56,25 @@
   if (instance_num_ != other.instance_num_) {
     return instance_num_ < other.instance_num_;
   }
-  return fd_ < other.fd_;
+  return lock_file_ < other.lock_file_;
 }
 
-InstanceLockFileManager::InstanceLockFileManager() = default;
+InstanceLockFileManager::InstanceLockFileManager() {}
 
-// Replicates tempfile.gettempdir() in Python
-std::string TempDir() {
-  std::vector<std::string> try_dirs = {
-      StringFromEnv("TMPDIR", ""),
-      StringFromEnv("TEMP", ""),
-      StringFromEnv("TMP", ""),
-      "/tmp",
-      "/var/tmp",
-      "/usr/tmp",
-  };
-  for (const auto& try_dir : try_dirs) {
-    if (DirectoryExists(try_dir)) {
-      return try_dir;
-    }
-  }
-  return CurrentDirectory();
-}
-
-static Result<SharedFD> OpenLockFile(int instance_num) {
+Result<std::string> InstanceLockFileManager::LockFilePath(int instance_num) {
   std::stringstream path;
   path << TempDir() << "/acloud_cvd_temp/";
   CF_EXPECT(EnsureDirectoryExists(path.str()));
   path << "local-instance-" << instance_num << ".lock";
-  auto fd = SharedFD::Open(path.str(), O_CREAT | O_RDWR, 0666);
-  CF_EXPECT(fd->IsOpen(), "open(\"" << path.str() << "\"): " << fd->StrError());
-  return fd;
+  return path.str();
 }
 
 Result<InstanceLockFile> InstanceLockFileManager::AcquireLock(
     int instance_num) {
-  auto fd = CF_EXPECT(OpenLockFile(instance_num));
-  CF_EXPECT(fd->Flock(LOCK_EX), fd->StrError());
-  return InstanceLockFile(fd, instance_num);
+  const auto lock_file_path = CF_EXPECT(LockFilePath(instance_num));
+  LockFile lock_file =
+      CF_EXPECT(lock_file_manager_.AcquireLock(lock_file_path));
+  return InstanceLockFile(std::move(lock_file), instance_num);
 }
 
 Result<std::set<InstanceLockFile>> InstanceLockFileManager::AcquireLocks(
@@ -114,14 +88,13 @@
 
 Result<std::optional<InstanceLockFile>> InstanceLockFileManager::TryAcquireLock(
     int instance_num) {
-  auto fd = CF_EXPECT(OpenLockFile(instance_num));
-  int flock_result = fd->Flock(LOCK_EX | LOCK_NB);
-  if (flock_result == 0) {
-    return InstanceLockFile(fd, instance_num);
-  } else if (flock_result == -1 && fd->GetErrno() == EWOULDBLOCK) {
-    return {};
+  const auto lock_file_path = CF_EXPECT(LockFilePath(instance_num));
+  std::optional<LockFile> lock_file_opt =
+      CF_EXPECT(lock_file_manager_.TryAcquireLock(lock_file_path));
+  if (!lock_file_opt) {
+    return std::nullopt;
   }
-  return CF_ERR("flock " << instance_num << " failed: " << fd->StrError());
+  return InstanceLockFile(std::move(*lock_file_opt), instance_num);
 }
 
 Result<std::set<InstanceLockFile>> InstanceLockFileManager::TryAcquireLocks(
@@ -136,7 +109,62 @@
   return locks;
 }
 
-static Result<std::set<int>> AllInstanceNums() {
+Result<std::vector<InstanceLockFile>>
+InstanceLockFileManager::LockAllAvailable() {
+  if (!all_instance_nums_) {
+    all_instance_nums_ = CF_EXPECT(FindPotentialInstanceNumsFromNetDevices());
+  }
+
+  std::vector<InstanceLockFile> acquired_lock_files;
+  for (const auto num : *all_instance_nums_) {
+    auto lock = CF_EXPECT(TryAcquireLock(num));
+    if (!lock) {
+      continue;
+    }
+    auto status = CF_EXPECT(lock->Status());
+    if (status != InUseState::kNotInUse) {
+      continue;
+    }
+    acquired_lock_files.emplace_back(std::move(*lock));
+  }
+  return acquired_lock_files;
+}
+
+static std::string DevicePatternString(
+    const std::unordered_map<std::string, std::set<int>>& device_to_ids_map) {
+  std::string device_pattern_str("^[[:space:]]*cvd-(");
+  for (const auto& [key, _] : device_to_ids_map) {
+    device_pattern_str.append(key).append("|");
+  }
+  if (!device_to_ids_map.empty()) {
+    *device_pattern_str.rbegin() = ')';
+  }
+  device_pattern_str.append("-[0-9]+");
+  return device_pattern_str;
+}
+
+struct TypeAndId {
+  std::string device_type;
+  int id;
+};
+// call this if the line is a network device line
+static Result<TypeAndId> ParseMatchedLine(
+    const std::smatch& device_string_match) {
+  std::string device_string = *device_string_match.begin();
+  auto tokens = android::base::Tokenize(device_string, "-");
+  CF_EXPECT_GE(tokens.size(), 3);
+  const auto cvd = tokens.front();
+  int id = 0;
+  CF_EXPECT(android::base::ParseInt(tokens.back(), &id));
+  // '-'.join(tokens[1:-1])
+  tokens.pop_back();
+  tokens.erase(tokens.begin());
+  const auto device_type = android::base::Join(tokens, "-");
+  return TypeAndId{.device_type = device_type, .id = id};
+}
+
+Result<std::set<int>>
+InstanceLockFileManager::FindPotentialInstanceNumsFromNetDevices() {
   // Estimate this by looking at available tap devices
   // clang-format off
   /** Sample format:
@@ -149,34 +177,55 @@
   std::string proc_net_dev;
   using android::base::ReadFileToString;
   CF_EXPECT(ReadFileToString(kPath, &proc_net_dev, /* follow_symlinks */ true));
+
   auto lines = android::base::Split(proc_net_dev, "\n");
-  std::set<int> etaps, mtaps, wtaps;
+  std::unordered_map<std::string, std::set<int>> device_to_ids_map{
+      {"etap", std::set<int>{}},
+      {"mtap", std::set<int>{}},
+      {"wtap", std::set<int>{}},
+      {"wifiap", std::set<int>{}},
+  };
+  // "^[[:space:]]*cvd-(etap|mtap|wtap|wifiap)-[0-9]+"
+  std::string device_pattern_str = DevicePatternString(device_to_ids_map);
+
+  std::regex device_pattern(device_pattern_str);
   for (const auto& line : lines) {
-    std::set<int>* tap_set = nullptr;
-    if (android::base::StartsWith(line, "cvd-etap-")) {
-      tap_set = &etaps;
-    } else if (android::base::StartsWith(line, "cvd-mtap-")) {
-      tap_set = &mtaps;
-    } else if (android::base::StartsWith(line, "cvd-wtap-")) {
-      tap_set = &wtaps;
-    } else {
+    std::smatch device_string_match;
+    if (!std::regex_search(line, device_string_match, device_pattern)) {
       continue;
     }
-    tap_set->insert(std::stoi(line.substr(std::string{"cvd-etap-"}.size())));
+    const auto [device_type, id] =
+        CF_EXPECT(ParseMatchedLine(device_string_match));
+    CF_EXPECT(Contains(device_to_ids_map, device_type));
+    device_to_ids_map[device_type].insert(id);
   }
-  std::set<int> emtaps;
-  std::set_intersection(etaps.begin(), etaps.end(), mtaps.begin(), mtaps.end(),
-                        std::inserter(emtaps, emtaps.begin()));
-  std::set<int> emwtaps;
-  std::set_intersection(emtaps.begin(), emtaps.end(), wtaps.begin(),
-                        wtaps.end(), std::inserter(emwtaps, emwtaps.begin()));
-  return emwtaps;
+
+  std::set<int> result{device_to_ids_map["etap"]};  // any set except "wifiap"
+  for (const auto& [device_type, id_set] : device_to_ids_map) {
+    /*
+     * b/2457509
+     *
+     * Until the debian host packages are sufficiently up-to-date, the wifiap
+     * devices wouldn't show up in /proc/net/dev.
+     */
+    if (device_type == "wifiap" && id_set.empty()) {
+      continue;
+    }
+    std::set<int> tmp;
+    std::set_intersection(result.begin(), result.end(), id_set.begin(),
+                          id_set.end(), std::inserter(tmp, tmp.begin()));
+    result = std::move(tmp);
+  }
+  return result;
 }
 
 Result<std::optional<InstanceLockFile>>
 InstanceLockFileManager::TryAcquireUnusedLock() {
-  auto nums = CF_EXPECT(AllInstanceNums());
-  for (const auto& num : nums) {
+  if (!all_instance_nums_) {
+    all_instance_nums_ = CF_EXPECT(FindPotentialInstanceNumsFromNetDevices());
+  }
+
+  for (const auto num : *all_instance_nums_) {
     auto lock = CF_EXPECT(TryAcquireLock(num));
     if (lock && CF_EXPECT(lock->Status()) == InUseState::kNotInUse) {
       return std::move(*lock);
diff --git a/host/commands/cvd/instance_lock.h b/host/commands/cvd/instance_lock.h
index 5d3deea..0b510ac 100644
--- a/host/commands/cvd/instance_lock.h
+++ b/host/commands/cvd/instance_lock.h
@@ -16,26 +16,19 @@
 #pragma once
 
 #include <set>
+#include <string>
 
 #include <fruit/fruit.h>
 
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/result.h"
+#include "host/commands/cvd/lock_file.h"
 
 namespace cuttlefish {
 
-class InstanceLockFileManager;
-
-enum class InUseState : char {
-  kInUse = 'I',
-  kNotInUse = 'N',
-};
-
-// Replicates tempfile.gettempdir() in Python
-std::string TempDir();
-
 // This class is not thread safe.
 class InstanceLockFile {
+  friend class InstanceLockFileManager;
+  using LockFile = cvd_impl::LockFile;
+
  public:
   int Instance() const;
   Result<InUseState> Status() const;
@@ -44,15 +37,15 @@
   bool operator<(const InstanceLockFile&) const;
 
  private:
-  friend class InstanceLockFileManager;
-
-  InstanceLockFile(SharedFD fd, int instance_num);
-
-  SharedFD fd_;
-  int instance_num_;
+  InstanceLockFile(LockFile&& lock_file, const int instance_num);
+  LockFile lock_file_;
+  const int instance_num_;
 };
 
 class InstanceLockFileManager {
+  using LockFile = cvd_impl::LockFile;
+  using LockFileManager = cvd_impl::LockFileManager;
+
  public:
   INJECT(InstanceLockFileManager());
 
@@ -64,6 +57,17 @@
 
   // Best-effort attempt to find a free instance id.
   Result<std::optional<InstanceLockFile>> TryAcquireUnusedLock();
+
+  Result<std::vector<InstanceLockFile>> LockAllAvailable();
+
+ private:
+  /*
+   * Generate value to initialize
+   */
+  Result<std::set<int>> FindPotentialInstanceNumsFromNetDevices();
+  static Result<std::string> LockFilePath(int instance_num);
+  std::optional<std::set<int>> all_instance_nums_;
+  LockFileManager lock_file_manager_;
 };
 
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/instance_manager.cpp b/host/commands/cvd/instance_manager.cpp
index 7ea1b74..a87e87e 100644
--- a/host/commands/cvd/instance_manager.cpp
+++ b/host/commands/cvd/instance_manager.cpp
@@ -16,106 +16,288 @@
 
 #include "host/commands/cvd/instance_manager.h"
 
+#include <signal.h>
+
 #include <map>
 #include <mutex>
-#include <optional>
-#include <thread>
+#include <sstream>
 
 #include <android-base/file.h>
-#include <android-base/logging.h>
 #include <fruit/fruit.h>
 
-#include "cvd_server.pb.h"
-
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
 #include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
 #include "host/commands/cvd/server_constants.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/known_paths.h"
 
 namespace cuttlefish {
+namespace {
 
-std::optional<std::string> GetCuttlefishConfigPath(const std::string& home) {
-  std::string home_realpath;
-  if (DirectoryExists(home)) {
-    CHECK(android::base::Realpath(home, &home_realpath));
-    static const char kSuffix[] = "/cuttlefish_assembly/cuttlefish_config.json";
-    std::string config_path = AbsolutePath(home_realpath + kSuffix);
-    if (FileExists(config_path)) {
-      return config_path;
-    }
+// Returns true only if command terminated normally, and returns 0
+Result<void> RunCommand(Command&& command) {
+  auto subprocess = std::move(command.Start());
+  siginfo_t infop{};
+  // This blocks until the process exits, but doesn't reap it.
+  auto result = subprocess.Wait(&infop, WEXITED);
+  CF_EXPECT(result != -1, "Lost track of subprocess pid");
+  CF_EXPECT(infop.si_code == CLD_EXITED && infop.si_status == 0);
+  return {};
+}
+
+}  // namespace
+
+Result<std::string> InstanceManager::GetCuttlefishConfigPath(
+    const std::string& home) {
+  return selector::GetCuttlefishConfigPath(home);
+}
+
+InstanceManager::InstanceManager(
+    InstanceLockFileManager& lock_manager,
+    HostToolTargetManager& host_tool_target_manager)
+    : lock_manager_(lock_manager),
+      host_tool_target_manager_(host_tool_target_manager) {}
+
+selector::InstanceDatabase& InstanceManager::GetInstanceDB(const uid_t uid) {
+  if (!Contains(instance_dbs_, uid)) {
+    instance_dbs_.try_emplace(uid);
+  }
+  return instance_dbs_[uid];
+}
+
+Result<Json::Value> InstanceManager::Serialize(const uid_t uid) {
+  std::lock_guard lock(instance_db_mutex_);
+  const auto& db = GetInstanceDB(uid);
+  return db.Serialize();
+}
+
+Result<void> InstanceManager::LoadFromJson(const uid_t uid,
+                                           const Json::Value& db_json) {
+  std::lock_guard lock(instance_db_mutex_);
+  CF_EXPECT(!Contains(instance_dbs_, uid));
+  auto& db = GetInstanceDB(uid);
+  CF_EXPECT(db.LoadFromJson(db_json));
+  return {};
+}
+
+Result<InstanceManager::GroupCreationInfo> InstanceManager::Analyze(
+    const std::string& sub_cmd, const CreationAnalyzerParam& param,
+    const ucred& credential) {
+  const uid_t uid = credential.uid;
+  std::unique_lock lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  lock.unlock();
+
+  auto group_creation_info = CF_EXPECT(CreationAnalyzer::Analyze(
+      sub_cmd, param, credential, instance_db, lock_manager_));
+  return {group_creation_info};
+}
+
+Result<InstanceManager::LocalInstanceGroup> InstanceManager::SelectGroup(
+    const cvd_common::Args& selector_args, const cvd_common::Envs& envs,
+    const uid_t uid) {
+  return SelectGroup(selector_args, {}, envs, uid);
+}
+
+Result<InstanceManager::LocalInstanceGroup> InstanceManager::SelectGroup(
+    const cvd_common::Args& selector_args, const Queries& extra_queries,
+    const cvd_common::Envs& envs, const uid_t uid) {
+  std::unique_lock lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  auto group_selector = CF_EXPECT(
+      GroupSelector::GetSelector(selector_args, extra_queries, envs, uid));
+  auto group = CF_EXPECT(group_selector.FindGroup(instance_db));
+  return group;
+}
+
+Result<InstanceManager::LocalInstance::Copy> InstanceManager::SelectInstance(
+    const cvd_common::Args& selector_args, const cvd_common::Envs& envs,
+    const uid_t uid) {
+  return SelectInstance(selector_args, {}, envs, uid);
+}
+
+Result<InstanceManager::LocalInstance::Copy> InstanceManager::SelectInstance(
+    const cvd_common::Args& selector_args, const Queries& extra_queries,
+    const cvd_common::Envs& envs, const uid_t uid) {
+  std::unique_lock lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  auto instance_selector = CF_EXPECT(
+      InstanceSelector::GetSelector(selector_args, extra_queries, envs, uid));
+  auto instance_copy = CF_EXPECT(instance_selector.FindInstance(instance_db));
+  return instance_copy;
+}
+
+bool InstanceManager::HasInstanceGroups(const uid_t uid) {
+  std::lock_guard lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  return !instance_db.IsEmpty();
+}
+
+Result<void> InstanceManager::SetInstanceGroup(
+    const uid_t uid, const selector::GroupCreationInfo& group_info) {
+  std::lock_guard assemblies_lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+
+  const auto group_name = group_info.group_name;
+  const auto home_dir = group_info.home;
+  const auto host_artifacts_path = group_info.host_artifacts_path;
+  const auto product_out_path = group_info.product_out_path;
+  const auto& per_instance_info = group_info.instances;
+
+  auto new_group = CF_EXPECT(
+      instance_db.AddInstanceGroup({.group_name = group_name,
+                                    .home_dir = home_dir,
+                                    .host_artifacts_path = host_artifacts_path,
+                                    .product_out_path = product_out_path}));
+
+  using InstanceInfo = selector::InstanceDatabase::InstanceInfo;
+  std::vector<InstanceInfo> instances_info;
+  for (const auto& instance : per_instance_info) {
+    InstanceInfo info{.name = instance.per_instance_name_,
+                      .id = instance.instance_id_};
+    instances_info.push_back(info);
+  }
+  auto result = instance_db.AddInstances(group_name, instances_info);
+  if (!result.ok()) {
+    /*
+     * The way InstanceManager uses the database is that it adds an empty
+     * group, gets an handle, and add instances to it. Thus, failing to adding
+     * an instance to the group does not always mean that the instance group
+     * addition fails. It is up to the caller. In this case, however, failing
+     * to add an instance to a new group means failing to create an instance
+     * group itself. Thus, we should remove the new instance group from the
+     * database.
+     *
+     */
+    instance_db.RemoveInstanceGroup(new_group.Get());
+    return CF_ERR(result.error().Trace());
   }
   return {};
 }
 
-InstanceManager::InstanceManager(InstanceLockFileManager& lock_manager)
-    : lock_manager_(lock_manager) {}
-
-bool InstanceManager::HasInstanceGroups() const {
-  std::lock_guard lock(instance_groups_mutex_);
-  return !instance_groups_.empty();
+Result<void> InstanceManager::SetBuildId(const uid_t uid,
+                                         const std::string& group_name,
+                                         const std::string& build_id) {
+  std::lock_guard assemblies_lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  CF_EXPECT(instance_db.SetBuildId(group_name, build_id));
+  return {};
 }
 
-void InstanceManager::SetInstanceGroup(
-    const InstanceManager::InstanceGroupDir& dir,
-    const InstanceManager::InstanceGroupInfo& info) {
-  std::lock_guard assemblies_lock(instance_groups_mutex_);
-  instance_groups_[dir] = info;
+void InstanceManager::RemoveInstanceGroup(const uid_t uid,
+                                          const std::string& dir) {
+  std::lock_guard assemblies_lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
+  auto result = instance_db.FindGroup({selector::kHomeField, dir});
+  if (!result.ok()) return;
+  auto group = *result;
+  instance_db.RemoveInstanceGroup(group);
 }
 
-void InstanceManager::RemoveInstanceGroup(
-    const InstanceManager::InstanceGroupDir& dir) {
-  std::lock_guard assemblies_lock(instance_groups_mutex_);
-  instance_groups_.erase(dir);
+template <typename... Args>
+static Command GetCommand(const std::string& prog_path, Args&&... args) {
+  Command command(prog_path);
+  (command.AddParameter(args), ...);
+  return command;
 }
 
-Result<InstanceManager::InstanceGroupInfo> InstanceManager::GetInstanceGroup(
-    const InstanceManager::InstanceGroupDir& dir) const {
-  std::lock_guard assemblies_lock(instance_groups_mutex_);
-  auto info_it = instance_groups_.find(dir);
-  if (info_it == instance_groups_.end()) {
-    return CF_ERR("No group dir \"" << dir << "\"");
-  } else {
-    return info_it->second;
+struct ExecCommandResult {
+  std::string stdout_buf;
+  std::string stderr_buf;
+};
+
+static Result<ExecCommandResult> ExecCommand(Command&& command) {
+  ExecCommandResult command_result;
+  CF_EXPECT_EQ(RunWithManagedStdio(std::move(command), /* stdin */ nullptr,
+                                   std::addressof(command_result.stdout_buf),
+                                   std::addressof(command_result.stderr_buf)),
+               0);
+  return command_result;
+}
+
+Result<InstanceManager::StatusCommandOutput>
+InstanceManager::IssueStatusCommand(const selector::LocalInstanceGroup& group,
+                                    const SharedFD& err) {
+  std::string not_supported_version_msg = " does not comply with cvd fleet.\n";
+  const auto host_android_out = group.HostArtifactsPath();
+  auto status_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+      .artifacts_path = host_android_out,
+      .op = "status",
+  }));
+  const auto prog_path = host_android_out + "/bin/" + status_bin;
+  Command with_args = GetCommand(prog_path, "--all_instances", "--print");
+  with_args.SetEnvironment({ConcatToString("HOME=", group.HomeDir())});
+  auto command_result = ExecCommand(std::move(with_args));
+  if (command_result.ok()) {
+    StatusCommandOutput output;
+    if (command_result->stdout_buf.empty()) {
+      WriteAll(err, ConcatToString(group.GroupName(), "-*",
+                                   not_supported_version_msg));
+      Json::Reader().parse("{}", output.stdout_json);
+      return output;
+    }
+    output.stdout_json = CF_EXPECT(ParseJson(command_result->stdout_buf));
+    return output;
   }
+  StatusCommandOutput output;
+  int index = 0;
+  for (const auto& instance_ref : CF_EXPECT(group.FindAllInstances())) {
+    const auto id = instance_ref.Get().InstanceId();
+    Command without_args = GetCommand(prog_path);
+    std::vector<std::string> new_envs{
+        ConcatToString("HOME=", group.HomeDir()),
+        ConcatToString(kCuttlefishInstanceEnvVarName, "=", std::to_string(id))};
+    without_args.SetEnvironment(new_envs);
+    auto second_command_result =
+        CF_EXPECT(ExecCommand(std::move(without_args)));
+    if (second_command_result.stdout_buf.empty()) {
+      WriteAll(err,
+               instance_ref.Get().DeviceName() + not_supported_version_msg);
+      second_command_result.stdout_buf.append("{}");
+    }
+    output.stdout_json[index] =
+        CF_EXPECT(ParseJson(second_command_result.stdout_buf));
+  }
+  return output;
 }
 
-cvd::Status InstanceManager::CvdFleet(const SharedFD& out,
-                                      const std::string& env_config) const {
-  std::lock_guard assemblies_lock(instance_groups_mutex_);
+Result<cvd::Status> InstanceManager::CvdFleetImpl(const uid_t uid,
+                                                  const SharedFD& out,
+                                                  const SharedFD& err) {
+  std::lock_guard assemblies_lock(instance_db_mutex_);
+  auto& instance_db = GetInstanceDB(uid);
   const char _GroupDeviceInfoStart[] = "[\n";
   const char _GroupDeviceInfoSeparate[] = ",\n";
   const char _GroupDeviceInfoEnd[] = "]\n";
   WriteAll(out, _GroupDeviceInfoStart);
-  for (const auto& [group_dir, group_info] : instance_groups_) {
-    auto config_path = GetCuttlefishConfigPath(group_dir);
-    if (FileExists(env_config)) {
-      config_path = env_config;
+  auto&& instance_groups = instance_db.InstanceGroups();
+
+  for (const auto& group : instance_groups) {
+    CF_EXPECT(group != nullptr);
+    auto result = IssueStatusCommand(*group, err);
+    if (!result.ok()) {
+      WriteAll(err, "      (unknown instance status error)");
+    } else {
+      const auto [stderr_msg, stdout_json] = *result;
+      WriteAll(err, stderr_msg);
+      // TODO(kwstephenkim): build a data structure that also includes
+      // selector-related information, etc.
+      WriteAll(out, stdout_json.toStyledString());
     }
-    if (config_path) {
-      // Reads CuttlefishConfig::instance_names(), which must remain stable
-      // across changes to config file format (within server_constants.h major
-      // version).
-      auto config = CuttlefishConfig::GetFromFile(*config_path);
-      if (config) {
-        Command command(group_info.host_binaries_dir + kStatusBin);
-        command.AddParameter("--print");
-        command.AddParameter("--all_instances");
-        command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, out);
-        command.AddEnvironmentVariable(kCuttlefishConfigEnvVarName,
-                                       *config_path);
-        if (int wait_result = command.Start().Wait(); wait_result != 0) {
-          WriteAll(out, "      (unknown instance status error)");
-        }
-      }
+    // move on
+    if (group == *instance_groups.crbegin()) {
+      continue;
     }
-    if (group_dir != instance_groups_.rbegin()->first) {
-      WriteAll(out, _GroupDeviceInfoSeparate);
-    }
+    WriteAll(out, _GroupDeviceInfoSeparate);
   }
   WriteAll(out, _GroupDeviceInfoEnd);
   cvd::Status status;
@@ -123,41 +305,169 @@
   return status;
 }
 
-cvd::Status InstanceManager::CvdClear(const SharedFD& out,
-                                      const SharedFD& err) {
-  std::lock_guard lock(instance_groups_mutex_);
-  cvd::Status status;
-  for (const auto& [group_dir, group_info] : instance_groups_) {
-    auto config_path = GetCuttlefishConfigPath(group_dir);
-    if (config_path) {
-      // Stop all instances that are using this group dir.
-      Command command(group_info.host_binaries_dir + kStopBin);
-      // Delete the instance dirs.
-      command.AddParameter("--clear_instance_dirs");
-      command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, out);
-      command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, err);
-      command.AddEnvironmentVariable(kCuttlefishConfigEnvVarName, *config_path);
-      if (int wait_result = command.Start().Wait(); wait_result != 0) {
-        WriteAll(
-            out,
-            "Warning: error stopping instances for dir \"" + group_dir +
-                "\".\nThis can happen if instances are already stopped.\n");
-      }
-      for (const auto& instance : group_info.instances) {
-        auto lock = lock_manager_.TryAcquireLock(instance);
-        if (lock.ok() && (*lock)) {
-          (*lock)->Status(InUseState::kNotInUse);
-        }
-      }
+Result<cvd::Status> InstanceManager::CvdFleet(
+    const uid_t uid, const SharedFD& out, const SharedFD& err,
+    const std::vector<std::string>& fleet_cmd_args) {
+  bool is_help = false;
+  for (const auto& arg : fleet_cmd_args) {
+    if (arg == "--help" || arg == "-help") {
+      is_help = true;
+      break;
     }
   }
-  RemoveFile(StringFromEnv("HOME", ".") + "/cuttlefish_runtime");
-  RemoveFile(GetGlobalConfigFileLink());
-  WriteAll(out, "Stopped all known instances\n");
+  CF_EXPECT(!is_help,
+            "cvd fleet --help should be handled by fleet handler itself.");
+  const auto status = CF_EXPECT(CvdFleetImpl(uid, out, err));
+  return status;
+}
 
-  instance_groups_.clear();
+Result<std::string> InstanceManager::StopBin(
+    const std::string& host_android_out) {
+  const auto stop_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+      .artifacts_path = host_android_out,
+      .op = "stop",
+  }));
+  return stop_bin;
+}
+
+Result<void> InstanceManager::IssueStopCommand(
+    const SharedFD& out, const SharedFD& err,
+    const std::string& config_file_path,
+    const selector::LocalInstanceGroup& group) {
+  const auto stop_bin = CF_EXPECT(StopBin(group.HostArtifactsPath()));
+  Command command(group.HostArtifactsPath() + "/bin/" + stop_bin);
+  command.AddParameter("--clear_instance_dirs");
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, out);
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, err);
+  command.AddEnvironmentVariable(kCuttlefishConfigEnvVarName, config_file_path);
+  auto wait_result = RunCommand(std::move(command));
+  /**
+   * --clear_instance_dirs may not be available for old branches. This causes
+   * the stop_cvd to terminates with a non-zero exit code due to the parsing
+   * error. Then, we will try to re-run it without the flag.
+   */
+  if (!wait_result.ok()) {
+    std::stringstream error_msg;
+    error_msg << stop_bin << " was executed internally, and failed. It might "
+              << "be failing to parse the new --clear_instance_dirs. Will try "
+              << "without the flag.\n";
+    WriteAll(err, error_msg.str());
+    Command no_clear_instance_dir_command(group.HostArtifactsPath() + "/bin/" +
+                                          stop_bin);
+    no_clear_instance_dir_command.RedirectStdIO(
+        Subprocess::StdIOChannel::kStdOut, out);
+    no_clear_instance_dir_command.RedirectStdIO(
+        Subprocess::StdIOChannel::kStdErr, err);
+    no_clear_instance_dir_command.AddEnvironmentVariable(
+        kCuttlefishConfigEnvVarName, config_file_path);
+    wait_result = RunCommand(std::move(no_clear_instance_dir_command));
+  }
+
+  if (!wait_result.ok()) {
+    WriteAll(err,
+             "Warning: error stopping instances for dir \"" + group.HomeDir() +
+                 "\".\nThis can happen if instances are already stopped.\n");
+  }
+  for (const auto& instance : group.Instances()) {
+    auto lock = lock_manager_.TryAcquireLock(instance->InstanceId());
+    if (lock.ok() && (*lock)) {
+      (*lock)->Status(InUseState::kNotInUse);
+      continue;
+    }
+    WriteAll(err, "InstanceLockFileManager failed to acquire lock");
+  }
+  return {};
+}
+
+cvd::Status InstanceManager::CvdClear(const SharedFD& out,
+                                      const SharedFD& err) {
+  std::lock_guard lock(instance_db_mutex_);
+  cvd::Status status;
+  const std::string config_json_name = cpp_basename(GetGlobalConfigFileLink());
+  for (auto& [uid, instance_db] : instance_dbs_) {
+    auto&& instance_groups = instance_db.InstanceGroups();
+    for (const auto& group : instance_groups) {
+      auto config_path = group->GetCuttlefishConfigPath();
+      if (config_path.ok()) {
+        auto stop_result = IssueStopCommand(out, err, *config_path, *group);
+        if (!stop_result.ok()) {
+          LOG(ERROR) << stop_result.error().Message();
+        }
+      }
+      RemoveFile(group->HomeDir() + "/cuttlefish_runtime");
+      RemoveFile(group->HomeDir() + config_json_name);
+    }
+    instance_db.Clear();
+  }
+  // TODO(kwstephenkim): we need a better mechanism to make sure that
+  // we clear all run_cvd processes.
+  instance_dbs_.clear();
+  WriteAll(err, "Stopped all known instances\n");
   status.set_code(cvd::Status::OK);
   return status;
 }
 
+Result<std::optional<InstanceLockFile>> InstanceManager::TryAcquireLock(
+    int instance_num) {
+  std::lock_guard lock(instance_db_mutex_);
+  return CF_EXPECT(lock_manager_.TryAcquireLock(instance_num));
+}
+
+Result<std::vector<InstanceManager::LocalInstanceGroup>>
+InstanceManager::FindGroups(const uid_t uid, const Query& query) const {
+  return CF_EXPECT(FindGroups(uid, Queries{query}));
+}
+
+Result<std::vector<InstanceManager::LocalInstanceGroup>>
+InstanceManager::FindGroups(const uid_t uid, const Queries& queries) const {
+  std::lock_guard lock(instance_db_mutex_);
+  if (!Contains(instance_dbs_, uid)) {
+    return {};
+  }
+  const auto& db = instance_dbs_.at(uid);
+  auto groups = CF_EXPECT(db.FindGroups(queries));
+  // create a copy as we are escaping the critical section
+  std::vector<LocalInstanceGroup> output;
+  for (const auto& group_ref : groups) {
+    output.push_back(group_ref.Get());
+  }
+  return output;
+}
+
+Result<std::vector<InstanceManager::LocalInstance::Copy>>
+InstanceManager::FindInstances(const uid_t uid, const Query& query) const {
+  return CF_EXPECT(FindInstances(uid, Queries{query}));
+}
+
+Result<std::vector<InstanceManager::LocalInstance::Copy>>
+InstanceManager::FindInstances(const uid_t uid, const Queries& queries) const {
+  std::lock_guard lock(instance_db_mutex_);
+  if (!Contains(instance_dbs_, uid)) {
+    return {};
+  }
+  const auto& db = instance_dbs_.at(uid);
+  auto instances = CF_EXPECT(db.FindInstances(queries));
+  // create a copy as we are escaping the critical section
+  std::vector<LocalInstance::Copy> output;
+  for (const auto& instance : instances) {
+    output.push_back(instance.Get().GetCopy());
+  }
+  return output;
+}
+
+Result<InstanceManager::LocalInstanceGroup> InstanceManager::FindGroup(
+    const uid_t uid, const Query& query) const {
+  return CF_EXPECT(FindGroup(uid, Queries{query}));
+}
+
+Result<InstanceManager::LocalInstanceGroup> InstanceManager::FindGroup(
+    const uid_t uid, const Queries& queries) const {
+  std::lock_guard lock(instance_db_mutex_);
+  CF_EXPECT(Contains(instance_dbs_, uid));
+  const auto& db = instance_dbs_.at(uid);
+  auto output = CF_EXPECT(db.FindGroups(queries));
+  CF_EXPECT_EQ(output.size(), 1);
+  return *(output.begin());
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/instance_manager.h b/host/commands/cvd/instance_manager.h
index 6aebbdb..a7bc7e9 100644
--- a/host/commands/cvd/instance_manager.h
+++ b/host/commands/cvd/instance_manager.h
@@ -16,50 +16,120 @@
 
 #pragma once
 
-#include <map>
+#include <sys/types.h>
+
 #include <mutex>
 #include <optional>
+#include <set>
 #include <string>
+#include <unordered_map>
+#include <vector>
 
 #include <fruit/fruit.h>
 
-#include "cvd_server.pb.h"
-
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/json.h"
 #include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/common_utils.h"
 #include "host/commands/cvd/instance_lock.h"
+#include "host/commands/cvd/selector/creation_analyzer.h"
+#include "host/commands/cvd/selector/group_selector.h"
+#include "host/commands/cvd/selector/instance_database.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/instance_selector.h"
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
 
 namespace cuttlefish {
 
-constexpr char kStatusBin[] = "cvd_internal_status";
-constexpr char kStopBin[] = "cvd_internal_stop";
-
 class InstanceManager {
  public:
-  using InstanceGroupDir = std::string;
-  struct InstanceGroupInfo {
-    std::string host_binaries_dir;
-    std::set<int> instances;
-  };
+  using CreationAnalyzer = selector::CreationAnalyzer;
+  using CreationAnalyzerParam = CreationAnalyzer::CreationAnalyzerParam;
+  using GroupCreationInfo = selector::GroupCreationInfo;
+  using LocalInstanceGroup = selector::LocalInstanceGroup;
+  using LocalInstance = selector::LocalInstance;
+  using GroupSelector = selector::GroupSelector;
+  using InstanceSelector = selector::InstanceSelector;
+  using Queries = selector::Queries;
+  using Query = selector::Query;
+  template <typename T>
+  using Set = selector::Set<T>;
 
-  INJECT(InstanceManager(InstanceLockFileManager&));
+  INJECT(InstanceManager(InstanceLockFileManager&, HostToolTargetManager&));
 
-  bool HasInstanceGroups() const;
-  void SetInstanceGroup(const InstanceGroupDir&, const InstanceGroupInfo&);
-  void RemoveInstanceGroup(const InstanceGroupDir&);
-  Result<InstanceGroupInfo> GetInstanceGroup(const InstanceGroupDir&) const;
+  // For cvd start
+  Result<GroupCreationInfo> Analyze(const std::string& sub_cmd,
+                                    const CreationAnalyzerParam& param,
+                                    const ucred& credential);
+
+  Result<LocalInstanceGroup> SelectGroup(const cvd_common::Args& selector_args,
+                                         const cvd_common::Envs& envs,
+                                         const uid_t uid);
+
+  Result<LocalInstanceGroup> SelectGroup(const cvd_common::Args& selector_args,
+                                         const Queries& extra_queries,
+                                         const cvd_common::Envs& envs,
+                                         const uid_t uid);
+
+  Result<LocalInstance::Copy> SelectInstance(
+      const cvd_common::Args& selector_args, const Queries& extra_queries,
+      const cvd_common::Envs& envs, const uid_t uid);
+
+  Result<LocalInstance::Copy> SelectInstance(
+      const cvd_common::Args& selector_args, const cvd_common::Envs& envs,
+      const uid_t uid);
+
+  bool HasInstanceGroups(const uid_t uid);
+  Result<void> SetInstanceGroup(const uid_t uid,
+                                const selector::GroupCreationInfo& group_info);
+  Result<void> SetBuildId(const uid_t uid, const std::string& group_name,
+                          const std::string& build_id);
+  void RemoveInstanceGroup(const uid_t uid, const std::string&);
 
   cvd::Status CvdClear(const SharedFD& out, const SharedFD& err);
-  cvd::Status CvdFleet(const SharedFD& out, const std::string& envconfig) const;
+  Result<cvd::Status> CvdFleet(const uid_t uid, const SharedFD& out,
+                               const SharedFD& err,
+                               const std::vector<std::string>& fleet_cmd_args);
+  static Result<std::string> GetCuttlefishConfigPath(const std::string& home);
+
+  Result<std::optional<InstanceLockFile>> TryAcquireLock(int instance_num);
+
+  Result<std::vector<LocalInstanceGroup>> FindGroups(const uid_t uid,
+                                                     const Query& query) const;
+  Result<std::vector<LocalInstanceGroup>> FindGroups(
+      const uid_t uid, const Queries& queries) const;
+  Result<std::vector<LocalInstance::Copy>> FindInstances(
+      const uid_t uid, const Query& query) const;
+  Result<std::vector<LocalInstance::Copy>> FindInstances(
+      const uid_t uid, const Queries& queries) const;
+
+  Result<LocalInstanceGroup> FindGroup(const uid_t uid,
+                                       const Query& query) const;
+  Result<LocalInstanceGroup> FindGroup(const uid_t uid,
+                                       const Queries& queries) const;
+  Result<Json::Value> Serialize(const uid_t uid);
+  Result<void> LoadFromJson(const uid_t uid, const Json::Value&);
 
  private:
+  Result<cvd::Status> CvdFleetImpl(const uid_t uid, const SharedFD& out,
+                                   const SharedFD& err);
+  struct StatusCommandOutput {
+    std::string stderr_msg;
+    Json::Value stdout_json;
+  };
+  Result<StatusCommandOutput> IssueStatusCommand(
+      const selector::LocalInstanceGroup& group, const SharedFD& err);
+  Result<void> IssueStopCommand(const SharedFD& out, const SharedFD& err,
+                                const std::string& config_file_path,
+                                const selector::LocalInstanceGroup& group);
+  Result<std::string> StopBin(const std::string& host_android_out);
+
+  selector::InstanceDatabase& GetInstanceDB(const uid_t uid);
   InstanceLockFileManager& lock_manager_;
-
-  mutable std::mutex instance_groups_mutex_;
-  std::map<InstanceGroupDir, InstanceGroupInfo> instance_groups_;
+  HostToolTargetManager& host_tool_target_manager_;
+  mutable std::mutex instance_db_mutex_;
+  std::unordered_map<uid_t, selector::InstanceDatabase> instance_dbs_;
 };
 
-std::optional<std::string> GetCuttlefishConfigPath(
-    const std::string& assembly_dir);
-
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/lock_file.cpp b/host/commands/cvd/lock_file.cpp
new file mode 100644
index 0000000..cf3bc08
--- /dev/null
+++ b/host/commands/cvd/lock_file.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/lock_file.h"
+
+#include <sys/file.h>
+
+#include <algorithm>
+#include <cstring>
+#include <sstream>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace cvd_impl {
+
+LockFile::LockFile(SharedFD fd, const std::string& lock_file_path)
+    : fd_(std::move(fd)), lock_file_path_(lock_file_path) {}
+
+Result<InUseState> LockFile::Status() const {
+  CF_EXPECT(fd_->LSeek(0, SEEK_SET) == 0, fd_->StrError());
+  char state_char = static_cast<char>(InUseState::kNotInUse);
+  CF_EXPECT(fd_->Read(&state_char, 1) >= 0, fd_->StrError());
+  switch (state_char) {
+    case static_cast<char>(InUseState::kInUse):
+      return InUseState::kInUse;
+    case static_cast<char>(InUseState::kNotInUse):
+      return InUseState::kNotInUse;
+    default:
+      return CF_ERR("Unexpected state value \"" << state_char << "\"");
+  }
+}
+
+Result<void> LockFile::Status(InUseState state) {
+  CF_EXPECT(fd_->LSeek(0, SEEK_SET) == 0, fd_->StrError());
+  char state_char = static_cast<char>(state);
+  CF_EXPECT(fd_->Write(&state_char, 1) == 1, fd_->StrError());
+  return {};
+}
+
+bool LockFile::operator<(const LockFile& other) const {
+  if (this == std::addressof(other)) {
+    return false;
+  }
+  if (LockFilePath() == other.LockFilePath()) {
+    return fd_ < other.fd_;
+  }
+  // operator< for std::string will be gone as of C++20
+  return (strncmp(lock_file_path_.data(), other.LockFilePath().data(),
+                  std::max(lock_file_path_.size(),
+                           other.LockFilePath().size())) < 0);
+}
+
+Result<SharedFD> LockFileManager::OpenLockFile(const std::string& file_path) {
+  auto parent_dir = android::base::Dirname(file_path);
+  CF_EXPECT(EnsureDirectoryExists(parent_dir));
+  auto fd = SharedFD::Open(file_path.data(), O_CREAT | O_RDWR, 0666);
+  CF_EXPECT(fd->IsOpen(), "open(\"" << file_path << "\"): " << fd->StrError());
+  return fd;
+}
+
+Result<LockFile> LockFileManager::AcquireLock(
+    const std::string& lock_file_path) {
+  auto fd = CF_EXPECT(OpenLockFile(lock_file_path));
+  CF_EXPECT(fd->Flock(LOCK_EX));
+  return LockFile(fd, lock_file_path);
+}
+
+Result<std::set<LockFile>> LockFileManager::AcquireLocks(
+    const std::set<std::string>& lock_file_paths) {
+  std::set<LockFile> locks;
+  for (const auto& lock_file_path : lock_file_paths) {
+    locks.emplace(CF_EXPECT(AcquireLock(lock_file_path)));
+  }
+  return locks;
+}
+
+Result<std::optional<LockFile>> LockFileManager::TryAcquireLock(
+    const std::string& lock_file_path) {
+  auto fd = CF_EXPECT(OpenLockFile(lock_file_path));
+  auto flock_result = fd->Flock(LOCK_EX | LOCK_NB);
+  if (flock_result.ok()) {
+    return std::optional<LockFile>(LockFile(fd, lock_file_path));
+    // TODO(schuffelen): Include the error code in the Result
+  } else if (!flock_result.ok() && fd->GetErrno() == EWOULDBLOCK) {
+    return {};
+  }
+  CF_EXPECT(std::move(flock_result));
+  return {};
+}
+
+Result<std::set<LockFile>> LockFileManager::TryAcquireLocks(
+    const std::set<std::string>& lock_file_paths) {
+  std::set<LockFile> locks;
+  for (const auto& lock_file_path : lock_file_paths) {
+    auto lock = CF_EXPECT(TryAcquireLock(lock_file_path));
+    if (lock) {
+      locks.emplace(std::move(*lock));
+    }
+  }
+  return locks;
+}
+
+}  // namespace cvd_impl
+
+// Replicates tempfile.gettempdir() in Python
+std::string TempDir() {
+  std::vector<std::string> try_dirs = {
+      StringFromEnv("TMPDIR", ""),
+      StringFromEnv("TEMP", ""),
+      StringFromEnv("TMP", ""),
+      "/tmp",
+      "/var/tmp",
+      "/usr/tmp",
+  };
+  for (const auto& try_dir : try_dirs) {
+    if (DirectoryExists(try_dir)) {
+      return try_dir;
+    }
+  }
+  return CurrentDirectory();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/lock_file.h b/host/commands/cvd/lock_file.h
new file mode 100644
index 0000000..361a19c
--- /dev/null
+++ b/host/commands/cvd/lock_file.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <optional>
+#include <set>
+#include <string>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+enum class InUseState : char {
+  kInUse = 'I',
+  kNotInUse = 'N',
+};
+
+// Replicates tempfile.gettempdir() in Python
+std::string TempDir();
+
+namespace cvd_impl {
+
+// This class is not thread safe.
+class LockFile {
+  friend class LockFileManager;
+
+ public:
+  const auto& LockFilePath() const { return lock_file_path_; }
+  Result<InUseState> Status() const;
+  Result<void> Status(InUseState);
+
+  // to put this into a set
+  bool operator<(const LockFile& other) const;
+
+ private:
+  LockFile(SharedFD fd, const std::string& lock_file_path);
+
+  SharedFD fd_;
+  const std::string lock_file_path_;
+};
+
+class LockFileManager {
+ public:
+  LockFileManager() = default;
+
+  Result<LockFile> AcquireLock(const std::string& lock_file_path);
+  Result<std::set<LockFile>> AcquireLocks(
+      const std::set<std::string>& lock_file_paths);
+
+  Result<std::optional<LockFile>> TryAcquireLock(
+      const std::string& lock_file_path);
+  Result<std::set<LockFile>> TryAcquireLocks(
+      const std::set<std::string>& lock_file_paths);
+
+  // Best-effort attempt to find a free instance id.
+  Result<std::optional<LockFile>> TryAcquireUnusedLock();
+
+  static Result<SharedFD> OpenLockFile(const std::string& file_path);
+};
+
+}  // namespace cvd_impl
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/logger.cpp b/host/commands/cvd/logger.cpp
new file mode 100644
index 0000000..565242d
--- /dev/null
+++ b/host/commands/cvd/logger.cpp
@@ -0,0 +1,97 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/logger.h"
+
+#include <shared_mutex>
+#include <thread>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <android-base/threads.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/cvd/server_client.h"
+
+namespace cuttlefish {
+
+ServerLogger::ServerLogger() {
+  auto log_callback = [this](android::base::LogId log_buffer_id,
+                             android::base::LogSeverity severity,
+                             const char* tag, const char* file,
+                             unsigned int line, const char* message) {
+    auto thread_id = std::this_thread::get_id();
+    std::shared_lock lock(thread_loggers_lock_);
+    auto logger_it = thread_loggers_.find(thread_id);
+    if (logger_it == thread_loggers_.end()) {
+      return;
+    }
+    logger_it->second->LogMessage(log_buffer_id, severity, tag, file, line,
+                                  message);
+  };
+  android::base::SetLogger(log_callback);
+}
+
+ServerLogger::~ServerLogger() {
+  android::base::SetLogger(android::base::StderrLogger);
+}
+
+ServerLogger::ScopedLogger ServerLogger::LogThreadToFd(SharedFD target) {
+  return ScopedLogger(*this, std::move(target));
+}
+
+ServerLogger::ScopedLogger::ScopedLogger(ServerLogger& server_logger,
+                                         SharedFD target)
+    : server_logger_(server_logger), target_(std::move(target)) {
+  auto thread_id = std::this_thread::get_id();
+  std::unique_lock lock(server_logger_.thread_loggers_lock_);
+  server_logger_.thread_loggers_[thread_id] = this;
+}
+
+ServerLogger::ScopedLogger::ScopedLogger(
+    ServerLogger::ScopedLogger&& other) noexcept
+    : server_logger_(other.server_logger_), target_(std::move(other.target_)) {
+  auto thread_id = std::this_thread::get_id();
+  std::unique_lock lock(server_logger_.thread_loggers_lock_);
+  server_logger_.thread_loggers_[thread_id] = this;
+}
+
+ServerLogger::ScopedLogger::~ScopedLogger() {
+  auto thread_id = std::this_thread::get_id();
+  std::unique_lock lock(server_logger_.thread_loggers_lock_);
+  auto logger_it = server_logger_.thread_loggers_.find(thread_id);
+  if (logger_it == server_logger_.thread_loggers_.end()) {
+    return;
+  }
+  if (logger_it->second == this) {
+    server_logger_.thread_loggers_.erase(logger_it);
+  }
+}
+
+void ServerLogger::ScopedLogger::LogMessage(
+    android::base::LogId /* log_buffer_id */,
+    android::base::LogSeverity severity, const char* tag, const char* file,
+    unsigned int line, const char* message) {
+  time_t t = time(nullptr);
+  struct tm now;
+  localtime_r(&t, &now);
+  auto output_string =
+      StderrOutputGenerator(now, getpid(), android::base::GetThreadId(),
+                            severity, tag, file, line, message);
+  WriteAll(target_, output_string);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/logger.h b/host/commands/cvd/logger.h
new file mode 100644
index 0000000..3e8009d
--- /dev/null
+++ b/host/commands/cvd/logger.h
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <shared_mutex>
+#include <thread>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_fd.h"
+
+namespace cuttlefish {
+
+/** Per-thread logging state manager class. */
+class ServerLogger {
+ public:
+  /**
+   * Thread-specific logger instance.
+   *
+   * When a `LOG(severity)` message is written on the same thread where this
+   * object was created, the message will be sent to the file descriptor stored
+   * in this object.
+   */
+  class ScopedLogger {
+   public:
+    friend ServerLogger;
+
+    ScopedLogger(ScopedLogger&&) noexcept;
+    ~ScopedLogger();
+
+   private:
+    ScopedLogger(ServerLogger&, SharedFD target);
+
+    /** Callback for `LOG(severity)` messages */
+    void LogMessage(android::base::LogId log_buffer_id,
+                    android::base::LogSeverity severity, const char* tag,
+                    const char* file, unsigned int line, const char* message);
+
+    ServerLogger& server_logger_;
+    SharedFD target_;
+  };
+  INJECT(ServerLogger());
+  ~ServerLogger();
+
+  /**
+   * Configure `LOG(severity)` messages to write to the given file descriptor
+   * for the lifetime of the returned object.
+   */
+  ScopedLogger LogThreadToFd(SharedFD);
+
+ private:
+  std::shared_mutex thread_loggers_lock_;
+  std::unordered_map<std::thread::id, ScopedLogger*> thread_loggers_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/main.cc b/host/commands/cvd/main.cc
index 4e77852..532541f 100644
--- a/host/commands/cvd/main.cc
+++ b/host/commands/cvd/main.cc
@@ -14,356 +14,289 @@
  * limitations under the License.
  */
 
-#include <stdlib.h>
-#include <chrono>
+#include <algorithm>
 #include <iostream>
-#include <map>
+#include <iterator>
 #include <optional>
 #include <string>
-#include <thread>
 #include <vector>
 
 #include <android-base/file.h>
 #include <android-base/logging.h>
-#include <android-base/result.h>
-#include <build/version.h>
 
-#include "cvd_server.pb.h"
-
+#include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/environment.h"
-#include "common/libs/utils/files.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/json.h"
 #include "common/libs/utils/result.h"
 #include "common/libs/utils/shared_fd_flag.h"
-#include "common/libs/utils/subprocess.h"
-#include "common/libs/utils/unix_sockets.h"
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/cvd/client.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/fetch/fetch_cvd.h"
+#include "host/commands/cvd/frontline_parser.h"
+#include "host/commands/cvd/handle_reset.h"
+#include "host/commands/cvd/logger.h"
+#include "host/commands/cvd/reset_client_utils.h"
 #include "host/commands/cvd/server.h"
 #include "host/commands/cvd/server_constants.h"
-#include "host/libs/config/cuttlefish_config.h"
+#include "host/commands/cvd/types.h"
 #include "host/libs/config/host_tools_version.h"
 
 namespace cuttlefish {
 namespace {
 
-Result<SharedFD> ConnectToServer() {
-  auto connection =
-      SharedFD::SocketLocalClient(cvd::kServerSocketPath,
-                                  /*is_abstract=*/true, SOCK_SEQPACKET);
-  if (!connection->IsOpen()) {
-    auto connection =
-        SharedFD::SocketLocalClient(cvd::kServerSocketPath,
-                                    /*is_abstract=*/true, SOCK_STREAM);
+std::unordered_map<std::string, std::string> EnvVectorToMap(char** envp) {
+  std::unordered_map<std::string, std::string> env_map;
+  if (!envp) {
+    return env_map;
   }
-  if (!connection->IsOpen()) {
-    return CF_ERR("Failed to connect to server" << connection->StrError());
+  for (char** e = envp; *e != nullptr; e++) {
+    std::string env_var_val(*e);
+    auto tokens = android::base::Split(env_var_val, "=");
+    if (tokens.size() <= 1) {
+      LOG(WARNING) << "Environment var in unknown format: " << env_var_val;
+      continue;
+    }
+    const auto var = tokens.at(0);
+    tokens.erase(tokens.begin());
+    env_map[var] = android::base::Join(tokens, "=");
   }
-  return connection;
+  return env_map;
 }
 
-class CvdClient {
- public:
-  Result<void> EnsureCvdServerRunning(const std::string& host_tool_directory,
-                                      int num_retries = 1) {
-    cvd::Request request;
-    request.mutable_version_request();
-    auto response = SendRequest(request);
-
-    // If cvd_server is not running, start and wait before checking its version.
-    if (!response.ok()) {
-      CF_EXPECT(StartCvdServer(host_tool_directory));
-      response = CF_EXPECT(SendRequest(request));
-    }
-    CF_EXPECT(CheckStatus(response->status(), "GetVersion"));
-    CF_EXPECT(response->has_version_response(),
-              "GetVersion call missing VersionResponse.");
-
-    auto server_version = response->version_response().version();
-    if (server_version.major() != cvd::kVersionMajor) {
-      return CF_ERR("Major version difference: cvd("
-                    << cvd::kVersionMajor << "." << cvd::kVersionMinor
-                    << ") != cvd_server(" << server_version.major() << "."
-                    << server_version.minor()
-                    << "). Try `cvd kill-server` or `pkill cvd_server`.");
-    }
-    if (server_version.minor() < cvd::kVersionMinor) {
-      std::cerr << "Minor version of cvd_server is older than latest. "
-                << "Attempting to restart..." << std::endl;
-      CF_EXPECT(StopCvdServer(/*clear=*/false));
-      CF_EXPECT(StartCvdServer(host_tool_directory));
-      if (num_retries > 0) {
-        CF_EXPECT(EnsureCvdServerRunning(host_tool_directory, num_retries - 1));
-        return {};
-      } else {
-        return CF_ERR("Unable to start the cvd_server with version "
-                      << cvd::kVersionMajor << "." << cvd::kVersionMinor);
-      }
-    }
-    if (server_version.build() != android::build::GetBuildNumber()) {
-      LOG(VERBOSE) << "cvd_server client version ("
-                   << android::build::GetBuildNumber()
-                   << ") does not match server version ("
-                   << server_version.build() << std::endl;
-    }
-    auto self_crc32 = FileCrc("/proc/self/exe");
-    if (server_version.crc32() != self_crc32) {
-      LOG(VERBOSE) << "cvd_server client checksum (" << self_crc32
-                   << ") doesn't match server checksum ("
-                   << server_version.crc32() << std::endl;
-    }
-    return {};
-  }
-
-  Result<void> StopCvdServer(bool clear) {
-    if (!server_) {
-      // server_ may not represent a valid connection even while the server is
-      // running, if we haven't tried to connect. This establishes first whether
-      // the server is running.
-      auto connection_attempt = ConnectToServer();
-      if (!connection_attempt.ok()) {
-        return {};
-      }
-    }
-
-    cvd::Request request;
-    auto shutdown_request = request.mutable_shutdown_request();
-    if (clear) {
-      shutdown_request->set_clear(true);
-    }
-
-    // Send the server a pipe with the Shutdown request that it
-    // will close when it fully exits.
-    SharedFD read_pipe, write_pipe;
-    CF_EXPECT(cuttlefish::SharedFD::Pipe(&read_pipe, &write_pipe),
-              "Unable to create shutdown pipe: " << strerror(errno));
-
-    auto response = SendRequest(request, /*extra_fd=*/write_pipe);
-
-    // If the server is already not running then SendRequest will fail.
-    // We treat this as success.
-    if (!response.ok()) {
-      server_.reset();
-      return {};
-    }
-
-    CF_EXPECT(CheckStatus(response->status(), "Shutdown"));
-    CF_EXPECT(response->has_shutdown_response(),
-              "Shutdown call missing ShutdownResponse.");
-
-    // Clear out the server_ socket.
-    server_.reset();
-
-    // Close the write end of the pipe in this process. Now the only
-    // process that may have the write end still open is the cvd_server.
-    write_pipe->Close();
-
-    // Wait for the pipe to close by attempting to read from the pipe.
-    char buf[1];  // Any size >0 should work for read attempt.
-    CF_EXPECT(read_pipe->Read(buf, sizeof(buf)) <= 0,
-              "Unexpected read value from cvd_server shutdown pipe.");
-    return {};
-  }
-
-  Result<void> HandleCommand(std::vector<std::string> args,
-                             std::vector<std::string> env) {
-    cvd::Request request;
-    auto command_request = request.mutable_command_request();
-    for (const std::string& arg : args) {
-      command_request->add_args(arg);
-    }
-    for (const std::string& e : env) {
-      auto eq_pos = e.find('=');
-      if (eq_pos == std::string::npos) {
-        LOG(WARNING) << "Environment var in unknown format: " << e;
-        continue;
-      }
-      (*command_request->mutable_env())[e.substr(0, eq_pos)] =
-          e.substr(eq_pos + 1);
-    }
-    std::unique_ptr<char, void(*)(void*)> cwd(getcwd(nullptr, 0), &free);
-    command_request->set_working_directory(cwd.get());
-    command_request->set_wait_behavior(cvd::WAIT_BEHAVIOR_COMPLETE);
-
-    auto response = CF_EXPECT(SendRequest(request));
-    CF_EXPECT(CheckStatus(response.status(), "HandleCommand"));
-    CF_EXPECT(response.has_command_response(),
-              "HandleCommand call missing CommandResponse.");
-    return {};
-  }
-
- private:
-  std::optional<UnixMessageSocket> server_;
-
-  Result<void> SetServer(const SharedFD& server) {
-    CF_EXPECT(!server_, "Already have a server");
-    CF_EXPECT(server->IsOpen(), server->StrError());
-    server_ = UnixMessageSocket(server);
-    CF_EXPECT(server_->EnableCredentials(true).ok(),
-              "Unable to enable UnixMessageSocket credentials.");
-    return {};
-  }
-
-  Result<cvd::Response> SendRequest(const cvd::Request& request,
-                                    std::optional<SharedFD> extra_fd = {}) {
-    if (!server_) {
-      CF_EXPECT(SetServer(CF_EXPECT(ConnectToServer())));
-    }
-    // Serialize and send the request.
-    std::string serialized;
-    CF_EXPECT(request.SerializeToString(&serialized),
-              "Unable to serialize request proto.");
-    UnixSocketMessage request_message;
-
-    std::vector<SharedFD> control_fds = {
-        SharedFD::Dup(0),
-        SharedFD::Dup(1),
-        SharedFD::Dup(2),
-    };
-    if (extra_fd) {
-      control_fds.push_back(*extra_fd);
-    }
-    auto control = CF_EXPECT(ControlMessage::FromFileDescriptors(control_fds));
-    request_message.control.emplace_back(std::move(control));
-
-    request_message.data =
-        std::vector<char>(serialized.begin(), serialized.end());
-    CF_EXPECT(server_->WriteMessage(request_message));
-
-    // Read and parse the response.
-    auto read_result = CF_EXPECT(server_->ReadMessage());
-    serialized = std::string(read_result.data.begin(), read_result.data.end());
-    cvd::Response response;
-    CF_EXPECT(response.ParseFromString(serialized),
-              "Unable to parse serialized response proto.");
-    return response;
-  }
-
-  Result<void> StartCvdServer(const std::string& host_tool_directory) {
-    SharedFD server_fd =
-        SharedFD::SocketLocalServer(cvd::kServerSocketPath,
-                                    /*is_abstract=*/true, SOCK_SEQPACKET, 0666);
-    CF_EXPECT(server_fd->IsOpen(), server_fd->StrError());
-
-    // TODO(b/196114111): Investigate fully "daemonizing" the cvd_server.
-    CF_EXPECT(setenv("ANDROID_HOST_OUT", host_tool_directory.c_str(),
-                     /*overwrite=*/true) == 0);
-    Command command("/proc/self/exe");
-    command.AddParameter("-INTERNAL_server_fd=", server_fd);
-    SubprocessOptions options;
-    options.ExitWithParent(false);
-    command.Start(options);
-
-    // Connect to the server_fd, which waits for startup.
-    CF_EXPECT(SetServer(SharedFD::SocketLocalClient(cvd::kServerSocketPath,
-                                                    /*is_abstract=*/true,
-                                                    SOCK_SEQPACKET)));
-    return {};
-  }
-
-  Result<void> CheckStatus(const cvd::Status& status, const std::string& rpc) {
-    if (status.code() == cvd::Status::OK) {
-      return {};
-    }
-    return CF_ERR("Received error response for \"" << rpc << "\":\n"
-                                                   << status.message()
-                                                   << "\nIn client");
-  }
-};
-
-[[noreturn]] void CallPythonAcloud(std::vector<std::string>& args) {
-  auto android_top = StringFromEnv("ANDROID_BUILD_TOP", "");
-  if (android_top == "") {
-    LOG(FATAL) << "Could not find android environment. Please run "
-               << "\"source build/envsetup.sh\".";
-    abort();
-  }
-  // TODO(b/206893146): Detect what the platform actually is.
-  auto py_acloud_path =
-      android_top + "/prebuilts/asuite/acloud/linux-x86/acloud";
-  char** new_argv = new char*[args.size() + 1];
-  for (size_t i = 0; i < args.size(); i++) {
-    new_argv[i] = args[i].data();
-  }
-  new_argv[args.size()] = nullptr;
-  execv(py_acloud_path.data(), new_argv);
-  PLOG(FATAL) << "execv(" << py_acloud_path << ", ...) failed";
-  abort();
+bool IsServerModeExpected(const std::string& exec_file) {
+  return exec_file == kServerExecPath;
 }
 
-Result<int> CvdMain(int argc, char** argv, char** envp) {
-  android::base::InitLogging(argv, android::base::StderrLogger);
-
-  std::vector<std::string> args = ArgsToVec(argc, argv);
-  std::vector<Flag> flags;
-
-  CvdClient client;
-
-  // TODO(b/206893146): Make this decision inside the server.
-  if (args[0] == "acloud") {
-    auto server_running = client.EnsureCvdServerRunning(
-        android::base::Dirname(android::base::GetExecutableDirectory()));
-    if (server_running.ok()) {
-      // TODO(schuffelen): Deduplicate when calls to setenv are removed.
-      std::vector<std::string> env;
-      for (char** e = envp; *e != 0; e++) {
-        env.emplace_back(*e);
-      }
-      args[0] = "try-acloud";
-      auto attempt = client.HandleCommand(args, env);
-      if (attempt.ok()) {
-        args[0] = "acloud";
-        CF_EXPECT(client.HandleCommand(args, env));
-        return 0;
-      } else {
-        CallPythonAcloud(args);
-      }
-    } else {
-      // Something is wrong with the server, fall back to python acloud
-      CallPythonAcloud(args);
-    }
-  }
-  bool clean = false;
-  flags.emplace_back(GflagsCompatFlag("clean", clean));
+struct RunServerParam {
   SharedFD internal_server_fd;
-  flags.emplace_back(SharedFDFlag("INTERNAL_server_fd", internal_server_fd));
-
-  CF_EXPECT(ParseFlags(flags, args));
-
-  if (internal_server_fd->IsOpen()) {
-    return CF_EXPECT(CvdServerMain(internal_server_fd));
-  } else if (argv[0] == std::string("/proc/self/exe")) {
+  SharedFD carryover_client_fd;
+  std::optional<SharedFD> memory_carryover_fd;
+  /**
+   * Cvd server usually prints out in the client's stream. However,
+   * after Exec(), the client stdout and stderr becomes unreachable by
+   * LOG(ERROR), etc.
+   *
+   * Thus, in that case, the client fd is passed to print Exec() log
+   * on it.
+   *
+   */
+  SharedFD carryover_stderr_fd;
+};
+Result<void> RunServer(const RunServerParam& fds) {
+  if (!fds.internal_server_fd->IsOpen()) {
     return CF_ERR(
         "Expected to be in server mode, but didn't get a server "
         "fd: "
-        << internal_server_fd->StrError());
+        << fds.internal_server_fd->StrError());
+  }
+  std::unique_ptr<ServerLogger> server_logger =
+      std::make_unique<ServerLogger>();
+  CF_EXPECT(server_logger != nullptr, "ServerLogger memory allocation failed.");
+
+  std::unique_ptr<ServerLogger::ScopedLogger> scoped_logger;
+  if (fds.carryover_stderr_fd->IsOpen()) {
+    scoped_logger = std::make_unique<ServerLogger::ScopedLogger>(
+        std::move(server_logger->LogThreadToFd(fds.carryover_stderr_fd)));
+  }
+  if (fds.memory_carryover_fd && !(*fds.memory_carryover_fd)->IsOpen()) {
+    LOG(ERROR) << "Memory carryover file is supposed to be open but is not.";
+  }
+  CF_EXPECT(CvdServerMain({.internal_server_fd = fds.internal_server_fd,
+                           .carryover_client_fd = fds.carryover_client_fd,
+                           .memory_carryover_fd = fds.memory_carryover_fd,
+                           .server_logger = std::move(server_logger),
+                           .scoped_logger = std::move(scoped_logger)}));
+  return {};
+}
+
+struct ParseResult {
+  SharedFD internal_server_fd;
+  SharedFD carryover_client_fd;
+  std::optional<SharedFD> memory_carryover_fd;
+  SharedFD carryover_stderr_fd;
+};
+
+Result<ParseResult> ParseIfServer(std::vector<std::string>& all_args) {
+  std::vector<Flag> flags;
+  SharedFD internal_server_fd;
+  flags.emplace_back(SharedFDFlag("INTERNAL_server_fd", internal_server_fd));
+  SharedFD carryover_client_fd;
+  flags.emplace_back(
+      SharedFDFlag("INTERNAL_carryover_client_fd", carryover_client_fd));
+  SharedFD carryover_stderr_fd;
+  flags.emplace_back(
+      SharedFDFlag("INTERNAL_carryover_stderr_fd", carryover_stderr_fd));
+  SharedFD memory_carryover_fd;
+  flags.emplace_back(
+      SharedFDFlag("INTERNAL_memory_carryover_fd", memory_carryover_fd));
+  CF_EXPECT(ParseFlags(flags, all_args));
+  std::optional<SharedFD> memory_carryover_fd_opt;
+  if (memory_carryover_fd->IsOpen()) {
+    memory_carryover_fd_opt = std::move(memory_carryover_fd);
+  }
+  ParseResult result = {
+      .internal_server_fd = internal_server_fd,
+      .carryover_client_fd = carryover_client_fd,
+      .memory_carryover_fd = memory_carryover_fd_opt,
+      .carryover_stderr_fd = carryover_stderr_fd,
+  };
+  return {result};
+}
+
+Result<FlagCollection> CvdFlags() {
+  FlagCollection cvd_flags;
+  cvd_flags.EnrollFlag(CvdFlag<bool>("clean", false));
+  cvd_flags.EnrollFlag(CvdFlag<bool>("help", false));
+  return cvd_flags;
+}
+
+Result<bool> FilterDriverHelpOptions(const FlagCollection& cvd_flags,
+                                     cvd_common::Args& cvd_args) {
+  auto help_flag = CF_EXPECT(cvd_flags.GetFlag("help"));
+  bool is_help = CF_EXPECT(help_flag.CalculateFlag<bool>(cvd_args));
+  return is_help;
+}
+
+cvd_common::Args AllArgs(const std::string& prog_path,
+                         const cvd_common::Args& cvd_args,
+                         const std::optional<std::string>& subcmd,
+                         const cvd_common::Args& subcmd_args) {
+  std::vector<std::string> all_args;
+  all_args.push_back(prog_path);
+  all_args.insert(all_args.end(), cvd_args.begin(), cvd_args.end());
+  if (subcmd) {
+    all_args.push_back(*subcmd);
+  }
+  all_args.insert(all_args.end(), subcmd_args.begin(), subcmd_args.end());
+  return all_args;
+}
+
+struct ClientCommandCheckResult {
+  bool was_client_command_;
+  cvd_common::Args new_all_args;
+};
+Result<ClientCommandCheckResult> HandleClientCommands(
+    CvdClient& client, const cvd_common::Args& all_args) {
+  ClientCommandCheckResult output;
+  std::vector<std::string> client_internal_commands{"kill-server",
+                                                    "server-kill", "reset"};
+  FlagCollection cvd_flags = CF_EXPECT(CvdFlags());
+  FrontlineParser::ParserParam client_param{
+      .server_supported_subcmds = std::vector<std::string>{},
+      .internal_cmds = client_internal_commands,
+      .all_args = all_args,
+      .cvd_flags = cvd_flags};
+  auto client_parser_result = FrontlineParser::Parse(client_param);
+  if (!client_parser_result.ok()) {
+    return ClientCommandCheckResult{.was_client_command_ = false,
+                                    .new_all_args = all_args};
+  }
+
+  auto client_parser = std::move(*client_parser_result);
+  CF_EXPECT(client_parser != nullptr);
+  auto cvd_args = client_parser->CvdArgs();
+  auto is_help = CF_EXPECT(FilterDriverHelpOptions(cvd_flags, cvd_args));
+  output.new_all_args =
+      AllArgs(client_parser->ProgPath(), cvd_args, client_parser->SubCmd(),
+              client_parser->SubCmdArgs());
+  output.was_client_command_ = (!is_help && client_parser->SubCmd());
+  if (!output.was_client_command_) {
+    // could be simply "cvd"
+    output.new_all_args = cvd_common::Args{"cvd", "help"};
+    return output;
   }
 
   // Special case for `cvd kill-server`, handled by directly
   // stopping the cvd_server.
-  if (argc > 1 && strcmp("kill-server", argv[1]) == 0) {
+  std::vector<std::string> kill_server_cmds{"kill-server", "server-kill"};
+  std::string subcmd = client_parser->SubCmd().value_or("");
+  if (Contains(kill_server_cmds, subcmd)) {
     CF_EXPECT(client.StopCvdServer(/*clear=*/true));
-    return 0;
+    return output;
+  }
+  CF_EXPECT_EQ(subcmd, "reset", "unsupported subcmd: " << subcmd);
+  CF_EXPECT(HandleReset(client, client_parser->SubCmdArgs()));
+  return output;
+}
+
+Result<void> CvdMain(int argc, char** argv, char** envp) {
+  android::base::InitLogging(argv, android::base::StderrLogger);
+
+  cvd_common::Args all_args = ArgsToVec(argc, argv);
+  CF_EXPECT(!all_args.empty());
+
+  auto env = EnvVectorToMap(envp);
+
+  if (android::base::Basename(all_args[0]) == "fetch_cvd") {
+    CF_EXPECT(FetchCvdMain(argc, argv));
+    return {};
   }
 
-  // Special case for --clean flag, used to clear any existing state.
-  if (clean) {
-    LOG(INFO) << "cvd invoked with --clean; "
-              << "stopping the cvd_server before continuing.";
-    CF_EXPECT(client.StopCvdServer(/*clear=*/true));
+  CvdClient client;
+  // TODO(b/206893146): Make this decision inside the server.
+  if (android::base::Basename(all_args[0]) == "acloud") {
+    return client.HandleAcloud(all_args, env);
   }
 
-  // Handle all remaining commands by forwarding them to the cvd_server.
-  CF_EXPECT(client.EnsureCvdServerRunning(android::base::Dirname(
-                android::base::GetExecutableDirectory())),
+  if (IsServerModeExpected(all_args[0])) {
+    auto parsed_fds = CF_EXPECT(ParseIfServer(all_args));
+
+    return RunServer({.internal_server_fd = parsed_fds.internal_server_fd,
+                      .carryover_client_fd = parsed_fds.carryover_client_fd,
+                      .memory_carryover_fd = parsed_fds.memory_carryover_fd,
+                      .carryover_stderr_fd = parsed_fds.carryover_stderr_fd});
+  }
+
+  CF_EXPECT_EQ(android::base::Basename(all_args[0]), "cvd");
+
+  // TODO(kwstephenkim): --help should be handled here.
+  // And, the FrontlineParser takes any positional argument as
+  // a valid subcommand.
+
+  auto [was_client_command, new_all_args] =
+      CF_EXPECT(HandleClientCommands(client, all_args));
+  if (was_client_command) {
+    return {};
+  }
+  /*
+   * For now, the parser needs a running server. The parser will
+   * be moved to the server side, and then it won't.
+   *
+   */
+  CF_EXPECT(client.ValidateServerVersion(),
             "Unable to ensure cvd_server is running.");
 
-  // TODO(schuffelen): Deduplicate when calls to setenv are removed.
-  std::vector<std::string> env;
-  for (char** e = envp; *e != 0; e++) {
-    env.emplace_back(*e);
+  std::vector<std::string> version_command{"version"};
+  FlagCollection cvd_flags = CF_EXPECT(CvdFlags());
+  FrontlineParser::ParserParam version_param{
+      .server_supported_subcmds = std::vector<std::string>{},
+      .internal_cmds = version_command,
+      .all_args = new_all_args,
+      .cvd_flags = cvd_flags};
+  auto version_parser_result = FrontlineParser::Parse(version_param);
+  if (version_parser_result.ok()) {
+    auto version_parser = std::move(*version_parser_result);
+    CF_EXPECT(version_parser != nullptr);
+    const auto subcmd = version_parser->SubCmd().value_or("");
+    if (subcmd == "version") {
+      auto version_msg = CF_EXPECT(client.HandleVersion());
+      std::cout << version_msg;
+      return {};
+    }
+    CF_EXPECT(subcmd.empty(),
+              "subcmd is expected to be \"\" but is " << subcmd);
   }
-  CF_EXPECT(client.HandleCommand(args, env));
-  return 0;
+
+  const cvd_common::Args new_cmd_args{"cvd", "process"};
+  CF_EXPECT(!new_all_args.empty());
+  const cvd_common::Args new_selector_args{new_all_args.begin(),
+                                           new_all_args.end()};
+  // TODO(schuffelen): Deduplicate when calls to setenv are removed.
+  CF_EXPECT(client.HandleCommand(new_cmd_args, env, new_selector_args));
+  return {};
 }
 
 }  // namespace
@@ -372,9 +305,9 @@
 int main(int argc, char** argv, char** envp) {
   auto result = cuttlefish::CvdMain(argc, argv, envp);
   if (result.ok()) {
-    return *result;
+    return 0;
   } else {
-    std::cerr << result.error() << std::endl;
+    std::cerr << result.error().Trace() << std::endl;
     return -1;
   }
 }
diff --git a/host/commands/cvd/parser/Android.bp b/host/commands/cvd/parser/Android.bp
new file mode 100644
index 0000000..fde5bfa
--- /dev/null
+++ b/host/commands/cvd/parser/Android.bp
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libcvd_parser",
+    srcs: [
+        "instance/cf_vm_configs.cpp",
+        "instance/cf_boot_configs.cpp",
+        "instance/cf_security_configs.cpp",
+        "instance/cf_graphics_configs.cpp",
+        "instance/cf_metrics_configs.cpp",
+        "cf_configs_common.cpp",
+        "cf_configs_instances.cpp",
+        "launch_cvd_templates.cpp",
+        "launch_cvd_parser.cpp",
+        "cf_flags_validator.cpp",
+        "fetch_cvd_parser.cpp",
+        "load_configs_parser.cpp",
+    ],
+    static_libs: [
+        "libprotobuf-cpp-full",
+        "libcuttlefish_launch_cvd_proto",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
+
diff --git a/host/commands/cvd/parser/README.md b/host/commands/cvd/parser/README.md
new file mode 100644
index 0000000..c4b305f
--- /dev/null
+++ b/host/commands/cvd/parser/README.md
@@ -0,0 +1,33 @@
+Canonical configs user interface groups and classes
+
+# UI structure and categories
+[![UI structure diagram](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/linkage.svg)
+
+## vm category
+[![vm category diagram](./doc/vm.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/vm.svg)
+
+## graphics category
+[![graphics category diagram](./doc/graphics.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/graphics.svg)
+
+## adb category
+[![adb category diagram](./doc/adb.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/adb.svg)
+
+## streaming category
+[![streaming category diagram](./doc/streaming.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/streaming.svg)
+
+## disk category
+[![disk category diagram](./doc/disk.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/disk.svg)
+
+## connectivity category
+[![connectivity category diagram](./doc/connectivity.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/connectivity.svg)
+
+## camera category
+[![camera category diagram](./doc/camera.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/camera.svg)
+## audio category
+[![audio category diagram](./doc/audio.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/audio.svg)
+
+## location category
+[![location category diagram](./doc/location.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/location.svg)
+
+## vehicle category
+[![streaming category diagram](./doc/vehicle.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/parser/doc/vehicle.svg)
diff --git a/host/commands/cvd/parser/cf_configs_common.cpp b/host/commands/cvd/parser/cf_configs_common.cpp
new file mode 100644
index 0000000..6b71df5
--- /dev/null
+++ b/host/commands/cvd/parser/cf_configs_common.cpp
@@ -0,0 +1,272 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/parser/cf_configs_common.h"
+
+#include <android-base/logging.h>
+namespace cuttlefish {
+
+/**
+ * Validate Json data Name and type
+ */
+Result<void> ValidateTypo(const Json::Value& root,
+                          const std::map<std::string, Json::ValueType>& map) {
+  for (const std::string& flag : root.getMemberNames()) {
+    CF_EXPECT(map.count(flag) != 0 , "Invalid flag name (typo) , Param --> " << flag<< " not recognized");
+    CF_EXPECT(root[flag].isConvertibleTo(map.at(flag)), "Invalid flag typ"<< flag);
+  }
+  return {};
+}
+
+Result<void> ValidateIntConfig(
+    const Json::Value& instances, const std::string& group,
+    const std::string& json_flag,
+    std::function<Result<void>(int)> validate_config) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (instances[i].isMember(group) &&
+        (instances[i][group].isMember(json_flag))) {
+      int flag = instances[i][group][json_flag].asInt();
+      CF_EXPECT(validate_config(flag), "Invalid flag value \"" << flag << "\"");
+    }
+  }
+  return {};
+}
+
+Result<void> ValidateIntConfigSubGroup(
+    const Json::Value& instances, const std::string& group,
+    const std::string& subgroup, const std::string& json_flag,
+    std::function<Result<void>(int)> validate_config) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (instances[i].isMember(group) &&
+        (instances[i][group].isMember(subgroup)) &&
+        (instances[i][group][subgroup].isMember(json_flag))) {
+      int flag = instances[i][group][subgroup][json_flag].asInt();
+      CF_EXPECT(validate_config(flag), "Invalid flag value \"" << flag << "\"");
+    }
+  }
+  return {};
+}
+
+Result<void> ValidateStringConfig(
+    const Json::Value& instances, const std::string& group,
+    const std::string& json_flag,
+    std::function<Result<void>(const std::string&)> validate_config) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (instances[i].isMember(group) &&
+        (instances[i][group].isMember(json_flag))) {
+      // Validate input parameter
+      std::string flag = instances[i][group][json_flag].asString();
+      CF_EXPECT(validate_config(flag), "Invalid flag value \"" << flag << "\"");
+    }
+  }
+  return {};
+}
+
+Result<void> ValidateStringConfigSubGroup(
+    const Json::Value& instances, const std::string& group,
+    const std::string& subgroup, const std::string& json_flag,
+    std::function<Result<void>(const std::string&)> validate_config) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(subgroup)) ||
+        (!instances[i][group][subgroup].isMember(json_flag))) {
+      std::string flag = instances[i][group][subgroup][json_flag].asString();
+      CF_EXPECT(validate_config(flag), "Invalid flag value \"" << flag << "\"");
+    }
+  }
+  return {};
+}
+
+void InitIntConfig(Json::Value& instances, const std::string& group,
+                   const std::string& json_flag, int default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(json_flag))) {
+      instances[i][group][json_flag] = default_value;
+    }
+  }
+}
+
+void InitIntConfigSubGroup(Json::Value& instances, const std::string& group,
+                           const std::string& subgroup,
+                           const std::string& json_flag, int default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(subgroup)) ||
+        (!instances[i][group][subgroup].isMember(json_flag))) {
+      instances[i][group][subgroup][json_flag] = default_value;
+    }
+  }
+}
+
+void InitIntConfigSubGroupVector(Json::Value& instances,
+                                 const std::string& group,
+                                 const std::string& subgroup,
+                                 const std::string& json_flag,
+                                 int default_value) {
+  // Allocate and initialize with default values
+  for (int i = 0; i < instances.size(); i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(subgroup)) ||
+        (instances[i][group][subgroup].size() == 0)) {
+      instances[i][group][subgroup][0][json_flag] = default_value;
+
+    } else {
+      // Check the whole array
+      int vector_size = instances[i][group][subgroup].size();
+      for (int j = 0; j < vector_size; j++) {
+        if (!instances[i][group][subgroup][j].isMember(json_flag)) {
+          instances[i][group][subgroup][j][json_flag] = default_value;
+        }
+      }
+    }
+  }
+}
+
+void InitStringConfig(Json::Value& instances, const std::string& group,
+                      const std::string& json_flag, const std::string& default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(json_flag))) {
+      instances[i][group][json_flag] = default_value;
+    }
+  }
+}
+
+void InitStringConfigSubGroup(Json::Value& instances, const std::string& group,
+                              const std::string& subgroup,
+                              const std::string& json_flag,
+                              const std::string& default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(subgroup)) ||
+        (!instances[i][group][subgroup].isMember(json_flag))) {
+      instances[i][group][subgroup][json_flag] = default_value;
+    }
+  }
+}
+
+void InitBoolConfig(Json::Value& instances, const std::string& group,
+                    const std::string& json_flag, const bool default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(json_flag))) {
+      instances[i][group][json_flag] = default_value;
+    }
+  }
+}
+
+void InitBoolConfigSubGroup(Json::Value& instances, const std::string& group,
+                            const std::string& subgroup,
+                            const std::string& json_flag,
+                            const bool default_value) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (!instances[i].isMember(group) ||
+        (!instances[i][group].isMember(subgroup)) ||
+        (!instances[i][group][subgroup].isMember(json_flag))) {
+      instances[i][group][subgroup][json_flag] = default_value;
+    }
+  }
+}
+
+// TODO(b/255384531) for using variadic functions
+
+std::string GenerateGflag(const Json::Value& instances,
+                          const std::string& gflag_name,
+                          const std::string& group,
+                          const std::string& json_flag) {
+  int size = instances.size();
+  std::stringstream buff;
+  // Append Header
+  buff << "--" << gflag_name << "=";
+  // Append values
+  for (int i = 0; i < size; i++) {
+    buff << instances[i][group][json_flag].asString();
+    if (i != size - 1) {
+      buff << ",";
+    }
+  }
+  return buff.str();
+}
+
+std::string GenerateGflagSubGroup(const Json::Value& instances,
+                                  const std::string& gflag_name,
+                                  const std::string& group,
+                                  const std::string& subgroup,
+                                  const std::string& json_flag) {
+  int size = instances.size();
+  std::stringstream buff;
+  // Append Header
+  buff << "--" << gflag_name << "=";
+  // Append values
+  for (int i = 0; i < size; i++) {
+    buff << instances[i][group][subgroup][json_flag].asString();
+    if (i != size - 1){ buff << ",";}
+  }
+  return buff.str();
+}
+
+std::vector<std::string> MergeResults(std::vector<std::string> first_list,
+                                      std::vector<std::string> scond_list) {
+  std::vector<std::string> result;
+  result.reserve(first_list.size() + scond_list.size());
+  result.insert(result.begin(), first_list.begin(), first_list.end());
+  result.insert(result.end(), scond_list.begin(), scond_list.end());
+  return result;
+}
+
+/**
+ * @brief This function merges two json objects and override json tree in dst
+ * with src json keys
+ *
+ * @param dst : destination json object tree(modified in place)
+ * @param src : input json object tree to be merged
+ */
+void MergeTwoJsonObjs(Json::Value& dst, const Json::Value& src) {
+  // Merge all members of src into dst
+  for (const auto& key : src.getMemberNames()) {
+    if (src[key].type() == Json::arrayValue) {
+      for (int i = 0; i < src[key].size(); i++) {
+        MergeTwoJsonObjs(dst[key][i], src[key][i]);
+      }
+    } else if (src[key].type() == Json::objectValue) {
+      MergeTwoJsonObjs(dst[key], src[key]);
+    } else {
+      dst[key] = src[key];
+    }
+  }
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/cf_configs_common.h b/host/commands/cvd/parser/cf_configs_common.h
new file mode 100644
index 0000000..9791711
--- /dev/null
+++ b/host/commands/cvd/parser/cf_configs_common.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include <iostream>
+
+#include "common/libs/utils/result.h"
+
+#define GENERATE_MVP_FLAGS_ONLY true
+
+namespace cuttlefish {
+
+Result<void> ValidateTypo(const Json::Value& root,
+                          const std::map<std::string, Json::ValueType>& map);
+
+Result<void> ValidateIntConfig(
+    const Json::Value& instances, const std::string& group,
+    const std::string& json_flag,
+    std::function<Result<void>(int)> validate_config);
+
+Result<void> ValidateIntConfigSubGroup(
+    const Json::Value& instances, const std::string& group,
+    const std::string& subgroup, const std::string& json_flag,
+    std::function<Result<void>(int)> validate_config);
+
+Result<void> ValidateStringConfig(
+    const Json::Value& instances, const std::string& group,
+    const std::string& json_flag,
+    std::function<Result<void>(const std::string&)> validate_config);
+
+Result<void> ValidateStringConfigSubGroup(
+    const Json::Value& instances, const std::string& group,
+    const std::string& subgroup, const std::string& json_flag,
+    std::function<Result<void>(const std::string&)> validate_config);
+
+void InitIntConfig(Json::Value& instances, const std::string& group,
+                   const std::string& json_flag, int default_value);
+
+void InitIntConfigSubGroup(Json::Value& instances, const std::string& group,
+                           const std::string& subgroup,
+                           const std::string& json_flag, int default_value);
+
+void InitIntConfigSubGroupVector(Json::Value& instances,
+                                 const std::string& group,
+                                 const std::string& subgroup,
+                                 const std::string& json_flag,
+                                 int default_value);
+
+void InitStringConfig(Json::Value& instances, const std::string& group,
+                      const std::string& json_flag, const std::string& default_value);
+
+void InitStringConfigSubGroup(Json::Value& instances, const std::string& group,
+                              const std::string& subgroup, const std::string& json_flag,
+                              const std::string& default_value);
+
+void InitBoolConfig(Json::Value& instances, const std::string& group,
+                    const std::string& json_flag, const bool default_value);
+
+void InitBoolConfigSubGroup(Json::Value& instances, const std::string& group,
+                            const std::string& subgroup,
+                            const std::string& json_flag,
+                            const bool default_value);
+
+std::string GenerateGflag(const Json::Value& instances,
+                          const std::string& gflag_name,
+                          const std::string& group,
+                          const std::string& json_flag);
+
+std::string GenerateGflagSubGroup(const Json::Value& instances,
+                                  const std::string& gflag_name,
+                                  const std::string& group,
+                                  const std::string& subgroup,
+                                  const std::string& json_flag);
+
+std::vector<std::string> MergeResults(std::vector<std::string> first_list,
+                                      std::vector<std::string> scond_list);
+
+void MergeTwoJsonObjs(Json::Value& dst, const Json::Value& src);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/cf_configs_instances.cpp b/host/commands/cvd/parser/cf_configs_instances.cpp
new file mode 100644
index 0000000..bf3d9f9
--- /dev/null
+++ b/host/commands/cvd/parser/cf_configs_instances.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/parser/cf_configs_instances.h"
+
+#include <android-base/logging.h>
+#include <iostream>
+
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/parser/instance/cf_boot_configs.h"
+#include "host/commands/cvd/parser/instance/cf_graphics_configs.h"
+#include "host/commands/cvd/parser/instance/cf_metrics_configs.h"
+#include "host/commands/cvd/parser/instance/cf_security_configs.h"
+#include "host/commands/cvd/parser/instance/cf_vm_configs.h"
+
+namespace cuttlefish {
+
+void InitInstancesConfigs(Json::Value& root) {
+  InitVmConfigs(root);
+  InitBootConfigs(root);
+  InitSecurityConfigs(root);
+  InitGraphicsConfigs(root);
+}
+
+std::vector<std::string> GenerateInstancesFlags(const Json::Value& root) {
+  std::vector<std::string> result = GenerateVmFlags(root);
+  if (!GENERATE_MVP_FLAGS_ONLY) {
+    result = MergeResults(result, GenerateBootFlags(root));
+  }
+  result = MergeResults(result, GenerateSecurityFlags(root));
+  result = MergeResults(result, GenerateGraphicsFlags(root));
+  result = MergeResults(result, GenerateMetricsFlags(root));
+
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/cf_configs_instances.h b/host/commands/cvd/parser/cf_configs_instances.h
new file mode 100644
index 0000000..1a756eb
--- /dev/null
+++ b/host/commands/cvd/parser/cf_configs_instances.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+
+namespace cuttlefish {
+
+void InitInstancesConfigs(Json::Value& root);
+std::vector<std::string> GenerateInstancesFlags(const Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/cf_flags_validator.cpp b/host/commands/cvd/parser/cf_flags_validator.cpp
new file mode 100644
index 0000000..bbf6881
--- /dev/null
+++ b/host/commands/cvd/parser/cf_flags_validator.cpp
@@ -0,0 +1,224 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/file.h>
+#include <gflags/gflags.h>
+
+#include <stdio.h>
+#include <fstream>
+#include <string>
+#include <unordered_set>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flags_validator.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+
+namespace cuttlefish {
+
+// json main parameters definitions
+static std::map<std::string, Json::ValueType> kConfigsKeyMap = {
+    {"credential", Json::ValueType::stringValue},
+    {"netsim_bt", Json::ValueType::booleanValue},
+    {"instances", Json::ValueType::arrayValue}};
+
+// instance object parameters definitions
+static std::map<std::string, Json::ValueType> kInstanceKeyMap = {
+    {"@import", Json::ValueType::stringValue},
+    {"vm", Json::ValueType::objectValue},
+    {"boot", Json::ValueType::objectValue},
+    {"security", Json::ValueType::objectValue},
+    {"disk", Json::ValueType::objectValue},
+    {"graphics", Json::ValueType::objectValue},
+    {"camera", Json::ValueType::objectValue},
+    {"connectivity", Json::ValueType::objectValue},
+    {"audio", Json::ValueType::objectValue},
+    {"streaming", Json::ValueType::objectValue},
+    {"adb", Json::ValueType::objectValue},
+    {"vehicle", Json::ValueType::objectValue},
+    {"location", Json::ValueType::objectValue}};
+
+// supported import values for @import key
+static std::unordered_set<std::string> kSupportedImportValues = {
+    "phone", "tablet", "tv", "wearable", "auto", "slim", "go", "foldable"};
+
+// supported import values for vm category and crosvm subcategory
+static std::map<std::string, Json::ValueType> kVmKeyMap = {
+    {"cpus", Json::ValueType::uintValue},
+    {"memory_mb", Json::ValueType::uintValue},
+    {"use_sdcard", Json::ValueType::booleanValue},
+    {"setupwizard_mode", Json::ValueType::stringValue},
+    {"uuid", Json::ValueType::stringValue},
+    {"crosvm", Json::ValueType::objectValue},
+    {"qemu", Json::ValueType::objectValue},
+    {"gem5", Json::ValueType::objectValue},
+    {"custom_actions", Json::ValueType::arrayValue},
+};
+static std::map<std::string, Json::ValueType> kCrosvmKeyMap = {
+    {"enable_sandbox", Json::ValueType::booleanValue},
+};
+
+// supported import values for boot category and kernel subcategory
+static std::map<std::string, Json::ValueType> kBootKeyMap = {
+    {"extra_bootconfig_args", Json::ValueType::stringValue},
+    {"kernel", Json::ValueType::objectValue},
+    {"enable_bootanimation", Json::ValueType::booleanValue},
+};
+static std::map<std::string, Json::ValueType> kernelkeyMap = {
+    {"extra_kernel_cmdline", Json::ValueType::stringValue},
+};
+
+// supported import values for graphics category and displays subcategory
+static std::map<std::string, Json::ValueType> kGraphicsKeyMap = {
+    {"displays", Json::ValueType::arrayValue},
+};
+static std::map<std::string, Json::ValueType> kDisplayKeyMap = {
+    {"width", Json::ValueType::uintValue},
+    {"height", Json::ValueType::uintValue},
+    {"dpi", Json::ValueType::uintValue},
+    {"refresh_rate_hertz", Json::ValueType::uintValue},
+};
+
+// supported import values for security category
+static std::map<std::string, Json::ValueType> kSecurityKeyMap = {
+    {"serial_number", Json::ValueType::stringValue},
+    {"guest_enforce_security", Json::ValueType::booleanValue},
+};
+
+// supported import values for disk category
+static std::map<std::string, Json::ValueType> kDiskKeyMap = {
+    {"default_build", Json::ValueType::stringValue},
+    {"system_build", Json::ValueType::stringValue},
+    {"kernel_build", Json::ValueType::stringValue},
+};
+
+// Validate the security json parameters
+Result<void> ValidateSecurityConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kSecurityKeyMap),
+            "ValidateSecurityConfigs ValidateTypo fail");
+  return {};
+}
+Result<void> ValidateDiskConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kDiskKeyMap),
+            "ValidateDiskConfigs ValidateTypo fail");
+  return {};
+}
+
+// Validate the displays json parameters
+Result<void> ValidateDisplaysConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kDisplayKeyMap),
+            "ValidateDisplaysConfigs ValidateTypo fail");
+  return {};
+}
+
+// Validate the graphics json parameters
+Result<void> ValidateGraphicsConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kGraphicsKeyMap),
+            "ValidateGraphicsConfigs ValidateTypo fail");
+
+  if (root.isMember("displays") && root["displays"].size() != 0) {
+    int num_displays = root["displays"].size();
+    for (int i = 0; i < num_displays; i++) {
+      CF_EXPECT(ValidateDisplaysConfigs(root["displays"][i]),
+                "ValidateDisplaysConfigs fail");
+    }
+  }
+
+  return {};
+}
+
+// Validate the vm json parameters
+Result<void> ValidateVmConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kVmKeyMap),
+            "ValidateVmConfigs ValidateTypo fail");
+  if (root.isMember("crosvm")) {
+    CF_EXPECT(ValidateTypo(root["crosvm"], kCrosvmKeyMap),
+              "ValidateVmConfigs ValidateTypo crosvm fail");
+  }
+  return {};
+}
+
+// Validate the kernel json parameters
+Result<void> ValidateKernelConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kernelkeyMap),
+            "ValidateKernelConfigs ValidateTypo fail");
+  return {};
+}
+
+// Validate the boot json parameters
+Result<void> ValidateBootConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kBootKeyMap),
+            "ValidateBootConfigs ValidateTypo fail");
+
+  if (root.isMember("kernel")) {
+    CF_EXPECT(ValidateKernelConfigs(root["kernel"]),
+              "ValidateKernelConfigs fail");
+  }
+
+  return {};
+}
+
+// Validate the instances json parameters
+Result<void> ValidateInstancesConfigs(const Json::Value& root) {
+  int num_instances = root.size();
+  for (unsigned int i = 0; i < num_instances; i++) {
+    CF_EXPECT(ValidateTypo(root[i], kInstanceKeyMap), "vm ValidateTypo fail");
+
+    if (root[i].isMember("vm")) {
+      CF_EXPECT(ValidateVmConfigs(root[i]["vm"]), "ValidateVmConfigs fail");
+    }
+
+    // Validate @import flag values are supported or not
+    if (root[i].isMember("@import")) {
+      CF_EXPECT(kSupportedImportValues.count(root[i]["@import"].asString()) > 0,
+                "@Import flag values are not supported");
+    }
+
+    if (root[i].isMember("boot")) {
+      CF_EXPECT(ValidateBootConfigs(root[i]["boot"]),
+                "ValidateBootConfigs fail");
+    }
+    if (root[i].isMember("security")) {
+      CF_EXPECT(ValidateSecurityConfigs(root[i]["security"]),
+                "ValidateSecurityConfigs fail");
+    }
+    if (root[i].isMember("disk")) {
+      CF_EXPECT(ValidateDiskConfigs(root[i]["disk"]),
+                "ValidateDiskConfigs fail");
+    }
+    if (root[i].isMember("graphics")) {
+      CF_EXPECT(ValidateGraphicsConfigs(root[i]["graphics"]),
+                "ValidateGraphicsConfigs fail");
+    }
+  }
+  CF_EXPECT(ValidateStringConfig(root, "vm", "setupwizard_mode",
+                                 ValidateStupWizardMode),
+            "Invalid value for setupwizard_mode flag");
+
+  return {};
+}
+
+// Validate cuttlefish config json parameters
+Result<void> ValidateCfConfigs(const Json::Value& root) {
+  CF_EXPECT(ValidateTypo(root, kConfigsKeyMap),
+            "Typo in config main parameters");
+  CF_EXPECT(root.isMember("instances"), "instances object is missing");
+  CF_EXPECT(ValidateInstancesConfigs(root["instances"]),
+            "ValidateInstancesConfigs failed");
+
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/cf_flags_validator.h b/host/commands/cvd/parser/cf_flags_validator.h
new file mode 100644
index 0000000..86b8ec7
--- /dev/null
+++ b/host/commands/cvd/parser/cf_flags_validator.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+Result<void> ValidateCfConfigs(const Json::Value& root);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/doc/adb.dot b/host/commands/cvd/parser/doc/adb.dot
new file mode 100644
index 0000000..06deeba
--- /dev/null
+++ b/host/commands/cvd/parser/doc/adb.dot
@@ -0,0 +1,10 @@
+graph {
+
+adb
+run_adb_connector
+mode
+
+adb--run_adb_connector
+adb--mode
+
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/adb.png b/host/commands/cvd/parser/doc/adb.png
new file mode 100644
index 0000000..4a0d68e
--- /dev/null
+++ b/host/commands/cvd/parser/doc/adb.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/adb.svg b/host/commands/cvd/parser/doc/adb.svg
new file mode 100644
index 0000000..9e32683
--- /dev/null
+++ b/host/commands/cvd/parser/doc/adb.svg
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="244pt" height="116pt"
+ viewBox="0.00 0.00 244.19 116.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 112)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-112 240.19,-112 240.19,4 -4,4"/>
+<!-- adb -->
+<g id="node1" class="node">
+<title>adb</title>
+<ellipse fill="none" stroke="black" cx="141.64" cy="-90" rx="27" ry="18"/>
+<text text-anchor="middle" x="141.64" y="-86.3" font-family="Times,serif" font-size="14.00">adb</text>
+</g>
+<!-- run_adb_connector -->
+<g id="node2" class="node">
+<title>run_adb_connector</title>
+<ellipse fill="none" stroke="black" cx="78.64" cy="-18" rx="78.79" ry="18"/>
+<text text-anchor="middle" x="78.64" y="-14.3" font-family="Times,serif" font-size="14.00">run_adb_connector</text>
+</g>
+<!-- adb&#45;&#45;run_adb_connector -->
+<g id="edge1" class="edge">
+<title>adb&#45;&#45;run_adb_connector</title>
+<path fill="none" stroke="black" d="M128.28,-74.15C118.17,-62.92 104.27,-47.48 93.75,-35.79"/>
+</g>
+<!-- mode -->
+<g id="node3" class="node">
+<title>mode</title>
+<ellipse fill="none" stroke="black" cx="205.64" cy="-18" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="205.64" y="-14.3" font-family="Times,serif" font-size="14.00">mode</text>
+</g>
+<!-- adb&#45;&#45;mode -->
+<g id="edge2" class="edge">
+<title>adb&#45;&#45;mode</title>
+<path fill="none" stroke="black" d="M155.22,-74.15C165.93,-62.44 180.83,-46.14 191.65,-34.31"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/audio.dot b/host/commands/cvd/parser/doc/audio.dot
new file mode 100644
index 0000000..eeafd8e
--- /dev/null
+++ b/host/commands/cvd/parser/doc/audio.dot
@@ -0,0 +1,6 @@
+graph {
+  rankdir=LR
+
+audio--enable
+
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/audio.png b/host/commands/cvd/parser/doc/audio.png
new file mode 100644
index 0000000..7a7f549
--- /dev/null
+++ b/host/commands/cvd/parser/doc/audio.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/audio.svg b/host/commands/cvd/parser/doc/audio.svg
new file mode 100644
index 0000000..b265f3f
--- /dev/null
+++ b/host/commands/cvd/parser/doc/audio.svg
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="173pt" height="44pt"
+ viewBox="0.00 0.00 172.69 44.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 40)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-40 168.69,-40 168.69,4 -4,4"/>
+<!-- audio -->
+<g id="node1" class="node">
+<title>audio</title>
+<ellipse fill="none" stroke="black" cx="30.55" cy="-18" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="30.55" y="-14.3" font-family="Times,serif" font-size="14.00">audio</text>
+</g>
+<!-- enable -->
+<g id="node2" class="node">
+<title>enable</title>
+<ellipse fill="none" stroke="black" cx="130.89" cy="-18" rx="33.6" ry="18"/>
+<text text-anchor="middle" x="130.89" y="-14.3" font-family="Times,serif" font-size="14.00">enable</text>
+</g>
+<!-- audio&#45;&#45;enable -->
+<g id="edge1" class="edge">
+<title>audio&#45;&#45;enable</title>
+<path fill="none" stroke="black" d="M61.32,-18C72.51,-18 85.31,-18 96.78,-18"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/camera.dot b/host/commands/cvd/parser/doc/camera.dot
new file mode 100644
index 0000000..78bb75f
--- /dev/null
+++ b/host/commands/cvd/parser/doc/camera.dot
@@ -0,0 +1,4 @@
+graph {
+  rankdir=LR
+camera--camera_server_port
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/camera.png b/host/commands/cvd/parser/doc/camera.png
new file mode 100644
index 0000000..83c0a63
--- /dev/null
+++ b/host/commands/cvd/parser/doc/camera.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/camera.svg b/host/commands/cvd/parser/doc/camera.svg
new file mode 100644
index 0000000..1a9e45e
--- /dev/null
+++ b/host/commands/cvd/parser/doc/camera.svg
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="279pt" height="44pt"
+ viewBox="0.00 0.00 279.28 44.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 40)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-40 275.28,-40 275.28,4 -4,4"/>
+<!-- camera -->
+<g id="node1" class="node">
+<title>camera</title>
+<ellipse fill="none" stroke="black" cx="36.4" cy="-18" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="36.4" y="-14.3" font-family="Times,serif" font-size="14.00">camera</text>
+</g>
+<!-- camera_server_port -->
+<g id="node2" class="node">
+<title>camera_server_port</title>
+<ellipse fill="none" stroke="black" cx="190.04" cy="-18" rx="81.49" ry="18"/>
+<text text-anchor="middle" x="190.04" y="-14.3" font-family="Times,serif" font-size="14.00">camera_server_port</text>
+</g>
+<!-- camera&#45;&#45;camera_server_port -->
+<g id="edge1" class="edge">
+<title>camera&#45;&#45;camera_server_port</title>
+<path fill="none" stroke="black" d="M72.95,-18C83.8,-18 96.21,-18 108.78,-18"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/connectivity.dot b/host/commands/cvd/parser/doc/connectivity.dot
new file mode 100644
index 0000000..723eb14
--- /dev/null
+++ b/host/commands/cvd/parser/doc/connectivity.dot
@@ -0,0 +1,14 @@
+graph {
+  rankdir=LR
+
+connectivity--ril_dns
+connectivity--wifi
+    wifi--ap_kernel_image
+    wifi--ap_rootfs_image
+    wifi--vhost_net
+connectivity--bluetooth
+connectivity--modem_simulator
+    modem_simulator--enable
+    modem_simulator--sim_type
+    modem_simulator--count
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/connectivity.png b/host/commands/cvd/parser/doc/connectivity.png
new file mode 100644
index 0000000..3c73606
--- /dev/null
+++ b/host/commands/cvd/parser/doc/connectivity.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/connectivity.svg b/host/commands/cvd/parser/doc/connectivity.svg
new file mode 100644
index 0000000..921565d
--- /dev/null
+++ b/host/commands/cvd/parser/doc/connectivity.svg
@@ -0,0 +1,129 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="483pt" height="314pt"
+ viewBox="0.00 0.00 482.96 314.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 310)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-310 478.96,-310 478.96,4 -4,4"/>
+<!-- connectivity -->
+<g id="node1" class="node">
+<title>connectivity</title>
+<ellipse fill="none" stroke="black" cx="53.95" cy="-193" rx="53.89" ry="18"/>
+<text text-anchor="middle" x="53.95" y="-189.3" font-family="Times,serif" font-size="14.00">connectivity</text>
+</g>
+<!-- ril_dns -->
+<g id="node2" class="node">
+<title>ril_dns</title>
+<ellipse fill="none" stroke="black" cx="219.93" cy="-274" rx="35.19" ry="18"/>
+<text text-anchor="middle" x="219.93" y="-270.3" font-family="Times,serif" font-size="14.00">ril_dns</text>
+</g>
+<!-- connectivity&#45;&#45;ril_dns -->
+<g id="edge1" class="edge">
+<title>connectivity&#45;&#45;ril_dns</title>
+<path fill="none" stroke="black" d="M78.75,-209.26C96.26,-220.76 120.91,-236.02 143.89,-247 158.51,-253.98 175.44,-260.22 189.52,-264.94"/>
+</g>
+<!-- wifi -->
+<g id="node3" class="node">
+<title>wifi</title>
+<ellipse fill="none" stroke="black" cx="219.93" cy="-220" rx="27" ry="18"/>
+<text text-anchor="middle" x="219.93" y="-216.3" font-family="Times,serif" font-size="14.00">wifi</text>
+</g>
+<!-- connectivity&#45;&#45;wifi -->
+<g id="edge2" class="edge">
+<title>connectivity&#45;&#45;wifi</title>
+<path fill="none" stroke="black" d="M102.82,-200.88C132.5,-205.77 169.45,-211.85 193.59,-215.83"/>
+</g>
+<!-- bluetooth -->
+<g id="node7" class="node">
+<title>bluetooth</title>
+<ellipse fill="none" stroke="black" cx="219.93" cy="-166" rx="44.39" ry="18"/>
+<text text-anchor="middle" x="219.93" y="-162.3" font-family="Times,serif" font-size="14.00">bluetooth</text>
+</g>
+<!-- connectivity&#45;&#45;bluetooth -->
+<g id="edge6" class="edge">
+<title>connectivity&#45;&#45;bluetooth</title>
+<path fill="none" stroke="black" d="M102.82,-185.12C126.93,-181.15 155.82,-176.39 178.76,-172.61"/>
+</g>
+<!-- modem_simulator -->
+<g id="node8" class="node">
+<title>modem_simulator</title>
+<ellipse fill="none" stroke="black" cx="219.93" cy="-99" rx="76.09" ry="18"/>
+<text text-anchor="middle" x="219.93" y="-95.3" font-family="Times,serif" font-size="14.00">modem_simulator</text>
+</g>
+<!-- connectivity&#45;&#45;modem_simulator -->
+<g id="edge7" class="edge">
+<title>connectivity&#45;&#45;modem_simulator</title>
+<path fill="none" stroke="black" d="M80.41,-177.02C98.07,-166.06 122.23,-151.34 143.89,-139 158,-130.96 173.84,-122.45 187.32,-115.35"/>
+</g>
+<!-- ap_kernel_image -->
+<g id="node4" class="node">
+<title>ap_kernel_image</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-288" rx="71.49" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-284.3" font-family="Times,serif" font-size="14.00">ap_kernel_image</text>
+</g>
+<!-- wifi&#45;&#45;ap_kernel_image -->
+<g id="edge3" class="edge">
+<title>wifi&#45;&#45;ap_kernel_image</title>
+<path fill="none" stroke="black" d="M244.31,-228.24C259.15,-233.51 278.73,-240.55 295.98,-247 312.06,-253.02 315.96,-254.82 331.98,-261 342.12,-264.92 353.1,-269.13 363.31,-273.04"/>
+</g>
+<!-- ap_rootfs_image -->
+<g id="node5" class="node">
+<title>ap_rootfs_image</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-234" rx="70.39" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-230.3" font-family="Times,serif" font-size="14.00">ap_rootfs_image</text>
+</g>
+<!-- wifi&#45;&#45;ap_rootfs_image -->
+<g id="edge4" class="edge">
+<title>wifi&#45;&#45;ap_rootfs_image</title>
+<path fill="none" stroke="black" d="M246.94,-222.01C270.08,-223.79 304.93,-226.48 335.78,-228.86"/>
+</g>
+<!-- vhost_net -->
+<g id="node6" class="node">
+<title>vhost_net</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-180" rx="44.69" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-176.3" font-family="Times,serif" font-size="14.00">vhost_net</text>
+</g>
+<!-- wifi&#45;&#45;vhost_net -->
+<g id="edge5" class="edge">
+<title>wifi&#45;&#45;vhost_net</title>
+<path fill="none" stroke="black" d="M245.79,-214.52C276.19,-207.82 327.97,-196.41 363.64,-188.56"/>
+</g>
+<!-- enable -->
+<g id="node9" class="node">
+<title>enable</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-126" rx="33.6" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-122.3" font-family="Times,serif" font-size="14.00">enable</text>
+</g>
+<!-- modem_simulator&#45;&#45;enable -->
+<g id="edge8" class="edge">
+<title>modem_simulator&#45;&#45;enable</title>
+<path fill="none" stroke="black" d="M284.68,-108.48C313.7,-112.8 346.76,-117.71 370.46,-121.24"/>
+</g>
+<!-- sim_type -->
+<g id="node10" class="node">
+<title>sim_type</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-72" rx="43.59" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-68.3" font-family="Times,serif" font-size="14.00">sim_type</text>
+</g>
+<!-- modem_simulator&#45;&#45;sim_type -->
+<g id="edge9" class="edge">
+<title>modem_simulator&#45;&#45;sim_type</title>
+<path fill="none" stroke="black" d="M284.68,-89.52C310.43,-85.69 339.36,-81.39 362.11,-78"/>
+</g>
+<!-- count -->
+<g id="node11" class="node">
+<title>count</title>
+<ellipse fill="none" stroke="black" cx="403.47" cy="-18" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="403.47" y="-14.3" font-family="Times,serif" font-size="14.00">count</text>
+</g>
+<!-- modem_simulator&#45;&#45;count -->
+<g id="edge10" class="edge">
+<title>modem_simulator&#45;&#45;count</title>
+<path fill="none" stroke="black" d="M252.66,-82.66C274.69,-71.54 304.84,-56.77 331.98,-45 346.65,-38.64 363.34,-32.21 376.8,-27.22"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/disk.dot b/host/commands/cvd/parser/doc/disk.dot
new file mode 100644
index 0000000..3682004
--- /dev/null
+++ b/host/commands/cvd/parser/doc/disk.dot
@@ -0,0 +1,33 @@
+graph {
+  rankdir=LR
+
+disk--bootloader
+    bootloader--boot_slot
+    bootloader--build
+    bootloader--pause
+disk--boot
+    boot_build [label = "build"]
+    boot--boot_build
+    boot--kernel
+    boot--initramfs
+    boot--extra_cmdline
+    boot--extra_bootconfig
+disk--data
+    data--format
+    data--size
+disk--metadata
+    metadata_size [label = "size"]
+    metadata--metadata_size
+disk--misc
+    misc_size [label = "size"]
+    misc--misc_size
+disk--otheros
+    otheros--esp_image
+    otheros--initramfs_path
+    otheros--kernel_path
+    otheros--root_image
+disk--super
+    super_build [label = "build"]
+    super--system
+    super--super_build
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/disk.png b/host/commands/cvd/parser/doc/disk.png
new file mode 100644
index 0000000..9979454
--- /dev/null
+++ b/host/commands/cvd/parser/doc/disk.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/disk.svg b/host/commands/cvd/parser/doc/disk.svg
new file mode 100644
index 0000000..016744d
--- /dev/null
+++ b/host/commands/cvd/parser/doc/disk.svg
@@ -0,0 +1,294 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="372pt" height="962pt"
+ viewBox="0.00 0.00 371.88 962.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 958)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-958 367.88,-958 367.88,4 -4,4"/>
+<!-- disk -->
+<g id="node1" class="node">
+<title>disk</title>
+<ellipse fill="none" stroke="black" cx="27" cy="-396" rx="27" ry="18"/>
+<text text-anchor="middle" x="27" y="-392.3" font-family="Times,serif" font-size="14.00">disk</text>
+</g>
+<!-- bootloader -->
+<g id="node2" class="node">
+<title>bootloader</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-855" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-851.3" font-family="Times,serif" font-size="14.00">bootloader</text>
+</g>
+<!-- disk&#45;&#45;bootloader -->
+<g id="edge1" class="edge">
+<title>disk&#45;&#45;bootloader</title>
+<path fill="none" stroke="black" d="M31.95,-413.72C41.6,-456.91 67.27,-571.05 90,-666 105.08,-728.98 123.95,-803.49 132.5,-837.05"/>
+</g>
+<!-- boot -->
+<g id="node6" class="node">
+<title>boot</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-639" rx="27" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-635.3" font-family="Times,serif" font-size="14.00">boot</text>
+</g>
+<!-- disk&#45;&#45;boot -->
+<g id="edge5" class="edge">
+<title>disk&#45;&#45;boot</title>
+<path fill="none" stroke="black" d="M35.67,-413.09C55.7,-457.7 109.41,-577.34 129.43,-621.93"/>
+</g>
+<!-- data -->
+<g id="node12" class="node">
+<title>data</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-450" rx="27" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-446.3" font-family="Times,serif" font-size="14.00">data</text>
+</g>
+<!-- disk&#45;&#45;data -->
+<g id="edge11" class="edge">
+<title>disk&#45;&#45;data</title>
+<path fill="none" stroke="black" d="M49.03,-406.41C68.23,-415.91 96.46,-429.89 115.75,-439.44"/>
+</g>
+<!-- metadata -->
+<g id="node15" class="node">
+<title>metadata</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-396" rx="42.79" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-392.3" font-family="Times,serif" font-size="14.00">metadata</text>
+</g>
+<!-- disk&#45;&#45;metadata -->
+<g id="edge14" class="edge">
+<title>disk&#45;&#45;metadata</title>
+<path fill="none" stroke="black" d="M54.13,-396C66.36,-396 81.23,-396 94.99,-396"/>
+</g>
+<!-- misc -->
+<g id="node17" class="node">
+<title>misc</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-342" rx="27.9" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-338.3" font-family="Times,serif" font-size="14.00">misc</text>
+</g>
+<!-- disk&#45;&#45;misc -->
+<g id="edge16" class="edge">
+<title>disk&#45;&#45;misc</title>
+<path fill="none" stroke="black" d="M49.03,-385.59C68.11,-376.15 96.1,-362.29 115.38,-352.75"/>
+</g>
+<!-- otheros -->
+<g id="node19" class="node">
+<title>otheros</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-234" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-230.3" font-family="Times,serif" font-size="14.00">otheros</text>
+</g>
+<!-- disk&#45;&#45;otheros -->
+<g id="edge18" class="edge">
+<title>disk&#45;&#45;otheros</title>
+<path fill="none" stroke="black" d="M39.06,-379.58C59.86,-348.69 104.61,-282.24 125.68,-250.95"/>
+</g>
+<!-- super -->
+<g id="node24" class="node">
+<title>super</title>
+<ellipse fill="none" stroke="black" cx="138.1" cy="-72" rx="29.8" ry="18"/>
+<text text-anchor="middle" x="138.1" y="-68.3" font-family="Times,serif" font-size="14.00">super</text>
+</g>
+<!-- disk&#45;&#45;super -->
+<g id="edge23" class="edge">
+<title>disk&#45;&#45;super</title>
+<path fill="none" stroke="black" d="M33.62,-378.51C44.37,-345.1 68.69,-269.97 90,-207 104.24,-164.91 121.59,-115.68 130.74,-89.87"/>
+</g>
+<!-- boot_slot -->
+<g id="node3" class="node">
+<title>boot_slot</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-936" rx="43.59" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-932.3" font-family="Times,serif" font-size="14.00">boot_slot</text>
+</g>
+<!-- bootloader&#45;&#45;boot_slot -->
+<g id="edge2" class="edge">
+<title>bootloader&#45;&#45;boot_slot</title>
+<path fill="none" stroke="black" d="M160.91,-870.97C177.25,-882.48 200.44,-897.88 222.19,-909 233.77,-914.92 246.96,-920.33 258.73,-924.73"/>
+</g>
+<!-- build -->
+<g id="node4" class="node">
+<title>build</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-882" rx="28.7" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-878.3" font-family="Times,serif" font-size="14.00">build</text>
+</g>
+<!-- bootloader&#45;&#45;build -->
+<g id="edge3" class="edge">
+<title>bootloader&#45;&#45;build</title>
+<path fill="none" stroke="black" d="M182.1,-862.59C208.79,-867.3 242.24,-873.21 265.19,-877.26"/>
+</g>
+<!-- pause -->
+<g id="node5" class="node">
+<title>pause</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-828" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-824.3" font-family="Times,serif" font-size="14.00">pause</text>
+</g>
+<!-- bootloader&#45;&#45;pause -->
+<g id="edge4" class="edge">
+<title>bootloader&#45;&#45;pause</title>
+<path fill="none" stroke="black" d="M182.1,-847.41C208.08,-842.82 240.47,-837.1 263.35,-833.06"/>
+</g>
+<!-- boot_build -->
+<g id="node7" class="node">
+<title>boot_build</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-774" rx="28.7" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-770.3" font-family="Times,serif" font-size="14.00">build</text>
+</g>
+<!-- boot&#45;&#45;boot_build -->
+<g id="edge6" class="edge">
+<title>boot&#45;&#45;boot_build</title>
+<path fill="none" stroke="black" d="M148.14,-655.95C161.47,-679.42 188.46,-721.79 222.19,-747 235.21,-756.73 252.16,-763.4 266.11,-767.67"/>
+</g>
+<!-- kernel -->
+<g id="node8" class="node">
+<title>kernel</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-720" rx="32.49" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-716.3" font-family="Times,serif" font-size="14.00">kernel</text>
+</g>
+<!-- boot&#45;&#45;kernel -->
+<g id="edge7" class="edge">
+<title>boot&#45;&#45;kernel</title>
+<path fill="none" stroke="black" d="M156.92,-652.14C173.32,-663.85 198.6,-680.94 222.19,-693 235.73,-699.92 251.49,-706.16 264.6,-710.89"/>
+</g>
+<!-- initramfs -->
+<g id="node9" class="node">
+<title>initramfs</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-666" rx="42.79" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-662.3" font-family="Times,serif" font-size="14.00">initramfs</text>
+</g>
+<!-- boot&#45;&#45;initramfs -->
+<g id="edge8" class="edge">
+<title>boot&#45;&#45;initramfs</title>
+<path fill="none" stroke="black" d="M164.35,-643.46C188.53,-647.73 225.35,-654.23 253.25,-659.15"/>
+</g>
+<!-- extra_cmdline -->
+<g id="node10" class="node">
+<title>extra_cmdline</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-612" rx="61.99" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-608.3" font-family="Times,serif" font-size="14.00">extra_cmdline</text>
+</g>
+<!-- boot&#45;&#45;extra_cmdline -->
+<g id="edge9" class="edge">
+<title>boot&#45;&#45;extra_cmdline</title>
+<path fill="none" stroke="black" d="M164.35,-634.54C184.8,-630.93 214.29,-625.73 239.77,-621.23"/>
+</g>
+<!-- extra_bootconfig -->
+<g id="node11" class="node">
+<title>extra_bootconfig</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-558" rx="70.69" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-554.3" font-family="Times,serif" font-size="14.00">extra_bootconfig</text>
+</g>
+<!-- boot&#45;&#45;extra_bootconfig -->
+<g id="edge10" class="edge">
+<title>boot&#45;&#45;extra_bootconfig</title>
+<path fill="none" stroke="black" d="M156.92,-625.86C173.32,-614.15 198.6,-597.06 222.19,-585 231.1,-580.44 240.97,-576.19 250.42,-572.46"/>
+</g>
+<!-- format -->
+<g id="node13" class="node">
+<title>format</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-504" rx="34.39" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-500.3" font-family="Times,serif" font-size="14.00">format</text>
+</g>
+<!-- data&#45;&#45;format -->
+<g id="edge12" class="edge">
+<title>data&#45;&#45;format</title>
+<path fill="none" stroke="black" d="M162.32,-458.2C189.65,-467.85 234.98,-483.85 264.17,-494.16"/>
+</g>
+<!-- size -->
+<g id="node14" class="node">
+<title>size</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-450" rx="27" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-446.3" font-family="Times,serif" font-size="14.00">size</text>
+</g>
+<!-- data&#45;&#45;size -->
+<g id="edge13" class="edge">
+<title>data&#45;&#45;size</title>
+<path fill="none" stroke="black" d="M165.39,-450C193.49,-450 237.77,-450 265.83,-450"/>
+</g>
+<!-- metadata_size -->
+<g id="node16" class="node">
+<title>metadata_size</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-396" rx="27" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-392.3" font-family="Times,serif" font-size="14.00">size</text>
+</g>
+<!-- metadata&#45;&#45;metadata_size -->
+<g id="edge15" class="edge">
+<title>metadata&#45;&#45;metadata_size</title>
+<path fill="none" stroke="black" d="M181.29,-396C208.35,-396 242.62,-396 265.82,-396"/>
+</g>
+<!-- misc_size -->
+<g id="node18" class="node">
+<title>misc_size</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-342" rx="27" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-338.3" font-family="Times,serif" font-size="14.00">size</text>
+</g>
+<!-- misc&#45;&#45;misc_size -->
+<g id="edge17" class="edge">
+<title>misc&#45;&#45;misc_size</title>
+<path fill="none" stroke="black" d="M166.09,-342C194.21,-342 237.99,-342 265.83,-342"/>
+</g>
+<!-- esp_image -->
+<g id="node20" class="node">
+<title>esp_image</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-288" rx="48.99" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-284.3" font-family="Times,serif" font-size="14.00">esp_image</text>
+</g>
+<!-- otheros&#45;&#45;esp_image -->
+<g id="edge19" class="edge">
+<title>otheros&#45;&#45;esp_image</title>
+<path fill="none" stroke="black" d="M168.22,-244.28C193.53,-253.22 230.24,-266.18 257.12,-275.67"/>
+</g>
+<!-- initramfs_path -->
+<g id="node21" class="node">
+<title>initramfs_path</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-234" rx="62.29" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-230.3" font-family="Times,serif" font-size="14.00">initramfs_path</text>
+</g>
+<!-- otheros&#45;&#45;initramfs_path -->
+<g id="edge20" class="edge">
+<title>otheros&#45;&#45;initramfs_path</title>
+<path fill="none" stroke="black" d="M174.57,-234C191.16,-234 211.4,-234 230.38,-234"/>
+</g>
+<!-- kernel_path -->
+<g id="node22" class="node">
+<title>kernel_path</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-180" rx="51.99" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-176.3" font-family="Times,serif" font-size="14.00">kernel_path</text>
+</g>
+<!-- otheros&#45;&#45;kernel_path -->
+<g id="edge21" class="edge">
+<title>otheros&#45;&#45;kernel_path</title>
+<path fill="none" stroke="black" d="M168.22,-223.72C193.18,-214.9 229.24,-202.17 256.01,-192.72"/>
+</g>
+<!-- root_image -->
+<g id="node23" class="node">
+<title>root_image</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-126" rx="51.19" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-122.3" font-family="Times,serif" font-size="14.00">root_image</text>
+</g>
+<!-- otheros&#45;&#45;root_image -->
+<g id="edge22" class="edge">
+<title>otheros&#45;&#45;root_image</title>
+<path fill="none" stroke="black" d="M152.53,-217.38C167.79,-199.42 194.22,-170.96 222.19,-153 231.82,-146.82 243.08,-141.7 253.71,-137.64"/>
+</g>
+<!-- super_build -->
+<g id="node25" class="node">
+<title>super_build</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-72" rx="28.7" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-68.3" font-family="Times,serif" font-size="14.00">build</text>
+</g>
+<!-- super&#45;&#45;super_build -->
+<g id="edge25" class="edge">
+<title>super&#45;&#45;super_build</title>
+<path fill="none" stroke="black" d="M168.22,-72C195.87,-72 237.13,-72 264.31,-72"/>
+</g>
+<!-- system -->
+<g id="node26" class="node">
+<title>system</title>
+<ellipse fill="none" stroke="black" cx="293.04" cy="-18" rx="36" ry="18"/>
+<text text-anchor="middle" x="293.04" y="-14.3" font-family="Times,serif" font-size="14.00">system</text>
+</g>
+<!-- super&#45;&#45;system -->
+<g id="edge24" class="edge">
+<title>super&#45;&#45;system</title>
+<path fill="none" stroke="black" d="M164.35,-63.08C191.58,-53.47 234.86,-38.19 263.36,-28.13"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/graphics.dot b/host/commands/cvd/parser/doc/graphics.dot
new file mode 100644
index 0000000..abc9dbe
--- /dev/null
+++ b/host/commands/cvd/parser/doc/graphics.dot
@@ -0,0 +1,14 @@
+graph {
+  rankdir=LR
+    graphics--hwcomposer
+    graphics--gpu_mode
+    graphics--record_screen
+    graphics--gpu_capture_binary
+    graphics--enable_gpu_udmabuf
+    graphics--enable_gpu_angle
+    graphics--displays
+        displays--dpi
+        displays--refresh_rate_hz
+        displays--x_res
+        displays--y_res
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/graphics.png b/host/commands/cvd/parser/doc/graphics.png
new file mode 100644
index 0000000..bd8bd87
--- /dev/null
+++ b/host/commands/cvd/parser/doc/graphics.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/graphics.svg b/host/commands/cvd/parser/doc/graphics.svg
new file mode 100644
index 0000000..21a5032
--- /dev/null
+++ b/host/commands/cvd/parser/doc/graphics.svg
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="466pt" height="449pt"
+ viewBox="0.00 0.00 466.07 449.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 445)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-445 462.07,-445 462.07,4 -4,4"/>
+<!-- graphics -->
+<g id="node1" class="node">
+<title>graphics</title>
+<ellipse fill="none" stroke="black" cx="40.3" cy="-261" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="40.3" y="-257.3" font-family="Times,serif" font-size="14.00">graphics</text>
+</g>
+<!-- hwcomposer -->
+<g id="node2" class="node">
+<title>hwcomposer</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-423" rx="55.79" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-419.3" font-family="Times,serif" font-size="14.00">hwcomposer</text>
+</g>
+<!-- graphics&#45;&#45;hwcomposer -->
+<g id="edge1" class="edge">
+<title>graphics&#45;&#45;hwcomposer</title>
+<path fill="none" stroke="black" d="M46.57,-279.16C55.91,-308.12 78.08,-364.67 116.59,-396 127.29,-404.7 140.8,-410.61 153.93,-414.62"/>
+</g>
+<!-- gpu_mode -->
+<g id="node3" class="node">
+<title>gpu_mode</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-369" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-365.3" font-family="Times,serif" font-size="14.00">gpu_mode</text>
+</g>
+<!-- graphics&#45;&#45;gpu_mode -->
+<g id="edge2" class="edge">
+<title>graphics&#45;&#45;gpu_mode</title>
+<path fill="none" stroke="black" d="M53.16,-278.35C66.55,-296.68 89.96,-325.2 116.59,-342 129.97,-350.44 146.2,-356.43 160.94,-360.58"/>
+</g>
+<!-- record_screen -->
+<g id="node4" class="node">
+<title>record_screen</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-315" rx="59.59" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-311.3" font-family="Times,serif" font-size="14.00">record_screen</text>
+</g>
+<!-- graphics&#45;&#45;record_screen -->
+<g id="edge3" class="edge">
+<title>graphics&#45;&#45;record_screen</title>
+<path fill="none" stroke="black" d="M71.97,-272.32C85.59,-277.26 101.85,-283.05 116.59,-288 131,-292.84 146.84,-297.89 160.98,-302.29"/>
+</g>
+<!-- gpu_capture_binary -->
+<g id="node5" class="node">
+<title>gpu_capture_binary</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-261" rx="81.49" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-257.3" font-family="Times,serif" font-size="14.00">gpu_capture_binary</text>
+</g>
+<!-- graphics&#45;&#45;gpu_capture_binary -->
+<g id="edge4" class="edge">
+<title>graphics&#45;&#45;gpu_capture_binary</title>
+<path fill="none" stroke="black" d="M80.78,-261C93.37,-261 107.8,-261 122.22,-261"/>
+</g>
+<!-- enable_gpu_udmabuf -->
+<g id="node6" class="node">
+<title>enable_gpu_udmabuf</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-207" rx="87.18" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-203.3" font-family="Times,serif" font-size="14.00">enable_gpu_udmabuf</text>
+</g>
+<!-- graphics&#45;&#45;enable_gpu_udmabuf -->
+<g id="edge5" class="edge">
+<title>graphics&#45;&#45;enable_gpu_udmabuf</title>
+<path fill="none" stroke="black" d="M71.97,-249.68C85.59,-244.74 101.85,-238.95 116.59,-234 128.62,-229.96 141.66,-225.77 153.86,-221.94"/>
+</g>
+<!-- enable_gpu_angle -->
+<g id="node7" class="node">
+<title>enable_gpu_angle</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-153" rx="74.99" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-149.3" font-family="Times,serif" font-size="14.00">enable_gpu_angle</text>
+</g>
+<!-- graphics&#45;&#45;enable_gpu_angle -->
+<g id="edge6" class="edge">
+<title>graphics&#45;&#45;enable_gpu_angle</title>
+<path fill="none" stroke="black" d="M53.16,-243.65C66.55,-225.32 89.96,-196.8 116.59,-180 126.26,-173.9 137.42,-169.08 148.43,-165.31"/>
+</g>
+<!-- displays -->
+<g id="node8" class="node">
+<title>displays</title>
+<ellipse fill="none" stroke="black" cx="203.69" cy="-99" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="203.69" y="-95.3" font-family="Times,serif" font-size="14.00">displays</text>
+</g>
+<!-- graphics&#45;&#45;displays -->
+<g id="edge7" class="edge">
+<title>graphics&#45;&#45;displays</title>
+<path fill="none" stroke="black" d="M46.57,-242.84C55.91,-213.88 78.08,-157.33 116.59,-126 130.44,-114.73 149.03,-108.14 165.39,-104.3"/>
+</g>
+<!-- dpi -->
+<g id="node9" class="node">
+<title>dpi</title>
+<ellipse fill="none" stroke="black" cx="392.42" cy="-180" rx="27" ry="18"/>
+<text text-anchor="middle" x="392.42" y="-176.3" font-family="Times,serif" font-size="14.00">dpi</text>
+</g>
+<!-- displays&#45;&#45;dpi -->
+<g id="edge8" class="edge">
+<title>displays&#45;&#45;dpi</title>
+<path fill="none" stroke="black" d="M239.26,-106.96C255.41,-111.39 274.61,-117.71 290.78,-126 308.58,-135.12 309.42,-143.07 326.78,-153 339.87,-160.49 355.44,-167.06 368.03,-171.84"/>
+</g>
+<!-- refresh_rate_hz -->
+<g id="node10" class="node">
+<title>refresh_rate_hz</title>
+<ellipse fill="none" stroke="black" cx="392.42" cy="-126" rx="65.79" ry="18"/>
+<text text-anchor="middle" x="392.42" y="-122.3" font-family="Times,serif" font-size="14.00">refresh_rate_hz</text>
+</g>
+<!-- displays&#45;&#45;refresh_rate_hz -->
+<g id="edge9" class="edge">
+<title>displays&#45;&#45;refresh_rate_hz</title>
+<path fill="none" stroke="black" d="M241.58,-104.33C267.94,-108.15 303.92,-113.35 334.01,-117.7"/>
+</g>
+<!-- x_res -->
+<g id="node11" class="node">
+<title>x_res</title>
+<ellipse fill="none" stroke="black" cx="392.42" cy="-72" rx="29.8" ry="18"/>
+<text text-anchor="middle" x="392.42" y="-68.3" font-family="Times,serif" font-size="14.00">x_res</text>
+</g>
+<!-- displays&#45;&#45;x_res -->
+<g id="edge10" class="edge">
+<title>displays&#45;&#45;x_res</title>
+<path fill="none" stroke="black" d="M241.58,-93.67C277.29,-88.5 330.66,-80.78 363.14,-76.09"/>
+</g>
+<!-- y_res -->
+<g id="node12" class="node">
+<title>y_res</title>
+<ellipse fill="none" stroke="black" cx="392.42" cy="-18" rx="29.8" ry="18"/>
+<text text-anchor="middle" x="392.42" y="-14.3" font-family="Times,serif" font-size="14.00">y_res</text>
+</g>
+<!-- displays&#45;&#45;y_res -->
+<g id="edge11" class="edge">
+<title>displays&#45;&#45;y_res</title>
+<path fill="none" stroke="black" d="M232.74,-86.39C257.44,-75.34 294.41,-58.91 326.78,-45 340.19,-39.24 355.23,-32.94 367.43,-27.88"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/linkage.dot b/host/commands/cvd/parser/doc/linkage.dot
new file mode 100644
index 0000000..39d58ee
--- /dev/null
+++ b/host/commands/cvd/parser/doc/linkage.dot
@@ -0,0 +1,23 @@
+graph {
+  rankdir=LR
+node [shape=record,width=.1,height=.1];
+root--instances
+root--common
+common--vhost_user_mac80211_hwsim
+common--wmediumd_config
+common--bluetooth_controller_properties_file
+common--bluetooth_default_commands_file
+common--enable_host_bluetooth
+common--netsim
+common--netsim_bt
+instances--vm
+instances--disk
+instances--graphics
+instances--camera
+instances--connectivity
+instances--audio
+instances--streaming
+instances--adb
+instances--vehicle
+instances--location
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/linkage.png b/host/commands/cvd/parser/doc/linkage.png
new file mode 100644
index 0000000..4ef1c9d
--- /dev/null
+++ b/host/commands/cvd/parser/doc/linkage.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/linkage.svg b/host/commands/cvd/parser/doc/linkage.svg
new file mode 100644
index 0000000..127d70a
--- /dev/null
+++ b/host/commands/cvd/parser/doc/linkage.svg
@@ -0,0 +1,228 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="396pt" height="704pt"
+ viewBox="0.00 0.00 396.00 704.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 700)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-700 392,-700 392,4 -4,4"/>
+<!-- root -->
+<g id="node1" class="node">
+<title>root</title>
+<polygon fill="none" stroke="black" points="0,-294.5 0,-317.5 38,-317.5 38,-294.5 0,-294.5"/>
+<text text-anchor="middle" x="19" y="-302.3" font-family="Times,serif" font-size="14.00">root</text>
+</g>
+<!-- instances -->
+<g id="node2" class="node">
+<title>instances</title>
+<polygon fill="none" stroke="black" points="74,-462.5 74,-485.5 140,-485.5 140,-462.5 74,-462.5"/>
+<text text-anchor="middle" x="107" y="-470.3" font-family="Times,serif" font-size="14.00">instances</text>
+</g>
+<!-- root&#45;&#45;instances -->
+<g id="edge1" class="edge">
+<title>root&#45;&#45;instances</title>
+<path fill="none" stroke="black" d="M25.91,-317.55C41.62,-348.24 84.29,-431.59 100.05,-462.37"/>
+</g>
+<!-- common -->
+<g id="node3" class="node">
+<title>common</title>
+<polygon fill="none" stroke="black" points="74.5,-147.5 74.5,-170.5 139.5,-170.5 139.5,-147.5 74.5,-147.5"/>
+<text text-anchor="middle" x="107" y="-155.3" font-family="Times,serif" font-size="14.00">common</text>
+</g>
+<!-- root&#45;&#45;common -->
+<g id="edge2" class="edge">
+<title>root&#45;&#45;common</title>
+<path fill="none" stroke="black" d="M26.75,-294.47C42.75,-267.12 82.84,-198.59 99.04,-170.89"/>
+</g>
+<!-- vm -->
+<g id="node11" class="node">
+<title>vm</title>
+<polygon fill="none" stroke="black" points="264.5,-672.5 264.5,-695.5 299.5,-695.5 299.5,-672.5 264.5,-672.5"/>
+<text text-anchor="middle" x="282" y="-680.3" font-family="Times,serif" font-size="14.00">vm</text>
+</g>
+<!-- instances&#45;&#45;vm -->
+<g id="edge10" class="edge">
+<title>instances&#45;&#45;vm</title>
+<path fill="none" stroke="black" d="M108.45,-485.96C110.41,-519.56 120.8,-615.42 176,-663 200.92,-684.48 241.58,-686.3 264.29,-685.37"/>
+</g>
+<!-- disk -->
+<g id="node12" class="node">
+<title>disk</title>
+<polygon fill="none" stroke="black" points="262.5,-630.5 262.5,-653.5 301.5,-653.5 301.5,-630.5 262.5,-630.5"/>
+<text text-anchor="middle" x="282" y="-638.3" font-family="Times,serif" font-size="14.00">disk</text>
+</g>
+<!-- instances&#45;&#45;disk -->
+<g id="edge11" class="edge">
+<title>instances&#45;&#45;disk</title>
+<path fill="none" stroke="black" d="M109.77,-485.75C114.68,-513.94 131.17,-585.67 176,-621 200.97,-640.68 239.43,-643.44 262.22,-643.1"/>
+</g>
+<!-- graphics -->
+<g id="node13" class="node">
+<title>graphics</title>
+<polygon fill="none" stroke="black" points="251,-588.5 251,-611.5 313,-611.5 313,-588.5 251,-588.5"/>
+<text text-anchor="middle" x="282" y="-596.3" font-family="Times,serif" font-size="14.00">graphics</text>
+</g>
+<!-- instances&#45;&#45;graphics -->
+<g id="edge12" class="edge">
+<title>instances&#45;&#45;graphics</title>
+<path fill="none" stroke="black" d="M112.03,-485.67C120.29,-507.73 141.33,-555.45 176,-579 197.99,-593.94 228.18,-598.74 250.54,-600.07"/>
+</g>
+<!-- camera -->
+<g id="node14" class="node">
+<title>camera</title>
+<polygon fill="none" stroke="black" points="254,-546.5 254,-569.5 310,-569.5 310,-546.5 254,-546.5"/>
+<text text-anchor="middle" x="282" y="-554.3" font-family="Times,serif" font-size="14.00">camera</text>
+</g>
+<!-- instances&#45;&#45;camera -->
+<g id="edge13" class="edge">
+<title>instances&#45;&#45;camera</title>
+<path fill="none" stroke="black" d="M116.99,-485.58C128.87,-500.02 151.21,-524.33 176,-537 200.5,-549.52 231.77,-554.58 253.79,-556.62"/>
+</g>
+<!-- connectivity -->
+<g id="node15" class="node">
+<title>connectivity</title>
+<polygon fill="none" stroke="black" points="240.5,-504.5 240.5,-527.5 323.5,-527.5 323.5,-504.5 240.5,-504.5"/>
+<text text-anchor="middle" x="282" y="-512.3" font-family="Times,serif" font-size="14.00">connectivity</text>
+</g>
+<!-- instances&#45;&#45;connectivity -->
+<g id="edge14" class="edge">
+<title>instances&#45;&#45;connectivity</title>
+<path fill="none" stroke="black" d="M140.32,-484.72C151.55,-488.25 164.26,-492.02 176,-495 197.08,-500.35 220.83,-505.19 240.4,-508.88"/>
+</g>
+<!-- audio -->
+<g id="node16" class="node">
+<title>audio</title>
+<polygon fill="none" stroke="black" points="258.5,-462.5 258.5,-485.5 305.5,-485.5 305.5,-462.5 258.5,-462.5"/>
+<text text-anchor="middle" x="282" y="-470.3" font-family="Times,serif" font-size="14.00">audio</text>
+</g>
+<!-- instances&#45;&#45;audio -->
+<g id="edge15" class="edge">
+<title>instances&#45;&#45;audio</title>
+<path fill="none" stroke="black" d="M140.13,-474C174.53,-474 228.21,-474 258.41,-474"/>
+</g>
+<!-- streaming -->
+<g id="node17" class="node">
+<title>streaming</title>
+<polygon fill="none" stroke="black" points="246.5,-420.5 246.5,-443.5 317.5,-443.5 317.5,-420.5 246.5,-420.5"/>
+<text text-anchor="middle" x="282" y="-428.3" font-family="Times,serif" font-size="14.00">streaming</text>
+</g>
+<!-- instances&#45;&#45;streaming -->
+<g id="edge16" class="edge">
+<title>instances&#45;&#45;streaming</title>
+<path fill="none" stroke="black" d="M140.32,-463.28C151.55,-459.75 164.26,-455.98 176,-453 199.24,-447.1 225.73,-441.82 246.28,-438.02"/>
+</g>
+<!-- adb -->
+<g id="node18" class="node">
+<title>adb</title>
+<polygon fill="none" stroke="black" points="264,-378.5 264,-401.5 300,-401.5 300,-378.5 264,-378.5"/>
+<text text-anchor="middle" x="282" y="-386.3" font-family="Times,serif" font-size="14.00">adb</text>
+</g>
+<!-- instances&#45;&#45;adb -->
+<g id="edge17" class="edge">
+<title>instances&#45;&#45;adb</title>
+<path fill="none" stroke="black" d="M116.99,-462.42C128.87,-447.98 151.21,-423.67 176,-411 204.47,-396.45 242.08,-391.97 263.7,-390.6"/>
+</g>
+<!-- vehicle -->
+<g id="node19" class="node">
+<title>vehicle</title>
+<polygon fill="none" stroke="black" points="254,-336.5 254,-359.5 310,-359.5 310,-336.5 254,-336.5"/>
+<text text-anchor="middle" x="282" y="-344.3" font-family="Times,serif" font-size="14.00">vehicle</text>
+</g>
+<!-- instances&#45;&#45;vehicle -->
+<g id="edge18" class="edge">
+<title>instances&#45;&#45;vehicle</title>
+<path fill="none" stroke="black" d="M112.03,-462.33C120.29,-440.27 141.33,-392.55 176,-369 199.07,-353.33 231.15,-348.81 253.77,-347.76"/>
+</g>
+<!-- location -->
+<g id="node20" class="node">
+<title>location</title>
+<polygon fill="none" stroke="black" points="252,-294.5 252,-317.5 312,-317.5 312,-294.5 252,-294.5"/>
+<text text-anchor="middle" x="282" y="-302.3" font-family="Times,serif" font-size="14.00">location</text>
+</g>
+<!-- instances&#45;&#45;location -->
+<g id="edge19" class="edge">
+<title>instances&#45;&#45;location</title>
+<path fill="none" stroke="black" d="M109.77,-462.25C114.68,-434.06 131.17,-362.33 176,-327 197.46,-310.08 228.9,-305.67 251.78,-304.98"/>
+</g>
+<!-- vhost_user_mac80211_hwsim -->
+<g id="node4" class="node">
+<title>vhost_user_mac80211_hwsim</title>
+<polygon fill="none" stroke="black" points="191,-252.5 191,-275.5 373,-275.5 373,-252.5 191,-252.5"/>
+<text text-anchor="middle" x="282" y="-260.3" font-family="Times,serif" font-size="14.00">vhost_user_mac80211_hwsim</text>
+</g>
+<!-- common&#45;&#45;vhost_user_mac80211_hwsim -->
+<g id="edge3" class="edge">
+<title>common&#45;&#45;vhost_user_mac80211_hwsim</title>
+<path fill="none" stroke="black" d="M113.94,-170.66C124.01,-189.17 146.3,-225.04 176,-243 182.28,-246.8 189.13,-249.93 196.21,-252.5"/>
+</g>
+<!-- wmediumd_config -->
+<g id="node5" class="node">
+<title>wmediumd_config</title>
+<polygon fill="none" stroke="black" points="222,-210.5 222,-233.5 342,-233.5 342,-210.5 222,-210.5"/>
+<text text-anchor="middle" x="282" y="-218.3" font-family="Times,serif" font-size="14.00">wmediumd_config</text>
+</g>
+<!-- common&#45;&#45;wmediumd_config -->
+<g id="edge4" class="edge">
+<title>common&#45;&#45;wmediumd_config</title>
+<path fill="none" stroke="black" d="M123.1,-170.52C136.35,-180.09 156.51,-193.32 176,-201 190.44,-206.69 206.63,-210.9 221.9,-214"/>
+</g>
+<!-- bluetooth_controller_properties_file -->
+<g id="node6" class="node">
+<title>bluetooth_controller_properties_file</title>
+<polygon fill="none" stroke="black" points="176,-168.5 176,-191.5 388,-191.5 388,-168.5 176,-168.5"/>
+<text text-anchor="middle" x="282" y="-176.3" font-family="Times,serif" font-size="14.00">bluetooth_controller_properties_file</text>
+</g>
+<!-- common&#45;&#45;bluetooth_controller_properties_file -->
+<g id="edge5" class="edge">
+<title>common&#45;&#45;bluetooth_controller_properties_file</title>
+<path fill="none" stroke="black" d="M139.73,-162.85C153.09,-164.47 169.42,-166.46 186.17,-168.49"/>
+</g>
+<!-- bluetooth_default_commands_file -->
+<g id="node7" class="node">
+<title>bluetooth_default_commands_file</title>
+<polygon fill="none" stroke="black" points="180.5,-126.5 180.5,-149.5 383.5,-149.5 383.5,-126.5 180.5,-126.5"/>
+<text text-anchor="middle" x="282" y="-134.3" font-family="Times,serif" font-size="14.00">bluetooth_default_commands_file</text>
+</g>
+<!-- common&#45;&#45;bluetooth_default_commands_file -->
+<g id="edge6" class="edge">
+<title>common&#45;&#45;bluetooth_default_commands_file</title>
+<path fill="none" stroke="black" d="M139.73,-155.15C153.09,-153.53 169.42,-151.54 186.17,-149.51"/>
+</g>
+<!-- enable_host_bluetooth -->
+<g id="node8" class="node">
+<title>enable_host_bluetooth</title>
+<polygon fill="none" stroke="black" points="212.5,-84.5 212.5,-107.5 351.5,-107.5 351.5,-84.5 212.5,-84.5"/>
+<text text-anchor="middle" x="282" y="-92.3" font-family="Times,serif" font-size="14.00">enable_host_bluetooth</text>
+</g>
+<!-- common&#45;&#45;enable_host_bluetooth -->
+<g id="edge7" class="edge">
+<title>common&#45;&#45;enable_host_bluetooth</title>
+<path fill="none" stroke="black" d="M123.1,-147.48C136.35,-137.91 156.51,-124.68 176,-117 187.5,-112.47 200.11,-108.87 212.48,-106.04"/>
+</g>
+<!-- netsim -->
+<g id="node9" class="node">
+<title>netsim</title>
+<polygon fill="none" stroke="black" points="255.5,-42.5 255.5,-65.5 308.5,-65.5 308.5,-42.5 255.5,-42.5"/>
+<text text-anchor="middle" x="282" y="-50.3" font-family="Times,serif" font-size="14.00">netsim</text>
+</g>
+<!-- common&#45;&#45;netsim -->
+<g id="edge8" class="edge">
+<title>common&#45;&#45;netsim</title>
+<path fill="none" stroke="black" d="M113.94,-147.34C124.01,-128.83 146.3,-92.96 176,-75 200.18,-60.38 232.66,-55.72 255.06,-54.34"/>
+</g>
+<!-- netsim_bt -->
+<g id="node10" class="node">
+<title>netsim_bt</title>
+<polygon fill="none" stroke="black" points="246.5,-0.5 246.5,-23.5 317.5,-23.5 317.5,-0.5 246.5,-0.5"/>
+<text text-anchor="middle" x="282" y="-8.3" font-family="Times,serif" font-size="14.00">netsim_bt</text>
+</g>
+<!-- common&#45;&#45;netsim_bt -->
+<g id="edge9" class="edge">
+<title>common&#45;&#45;netsim_bt</title>
+<path fill="none" stroke="black" d="M110.7,-147.42C117.2,-122.23 136.22,-62.41 176,-33 196.18,-18.08 224.28,-13.06 246.33,-11.64"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/location.dot b/host/commands/cvd/parser/doc/location.dot
new file mode 100644
index 0000000..ed8a3de
--- /dev/null
+++ b/host/commands/cvd/parser/doc/location.dot
@@ -0,0 +1,6 @@
+graph {
+  rankdir=LR
+    location--start_gnss_proxy
+    location--fixed_location_file_path
+    location--gnss_file_path
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/location.png b/host/commands/cvd/parser/doc/location.png
new file mode 100644
index 0000000..f90e1ce
--- /dev/null
+++ b/host/commands/cvd/parser/doc/location.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/location.svg b/host/commands/cvd/parser/doc/location.svg
new file mode 100644
index 0000000..cad95d8
--- /dev/null
+++ b/host/commands/cvd/parser/doc/location.svg
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="316pt" height="152pt"
+ viewBox="0.00 0.00 315.68 152.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 148)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-148 311.68,-148 311.68,4 -4,4"/>
+<!-- location -->
+<g id="node1" class="node">
+<title>location</title>
+<ellipse fill="none" stroke="black" cx="39" cy="-72" rx="38.99" ry="18"/>
+<text text-anchor="middle" x="39" y="-68.3" font-family="Times,serif" font-size="14.00">location</text>
+</g>
+<!-- start_gnss_proxy -->
+<g id="node2" class="node">
+<title>start_gnss_proxy</title>
+<ellipse fill="none" stroke="black" cx="210.83" cy="-126" rx="70.69" ry="18"/>
+<text text-anchor="middle" x="210.83" y="-122.3" font-family="Times,serif" font-size="14.00">start_gnss_proxy</text>
+</g>
+<!-- location&#45;&#45;start_gnss_proxy -->
+<g id="edge1" class="edge">
+<title>location&#45;&#45;start_gnss_proxy</title>
+<path fill="none" stroke="black" d="M69.7,-83.42C83.15,-88.44 99.29,-94.25 113.99,-99 129.29,-103.94 146.12,-108.85 161.36,-113.11"/>
+</g>
+<!-- fixed_location_file_path -->
+<g id="node3" class="node">
+<title>fixed_location_file_path</title>
+<ellipse fill="none" stroke="black" cx="210.83" cy="-72" rx="96.68" ry="18"/>
+<text text-anchor="middle" x="210.83" y="-68.3" font-family="Times,serif" font-size="14.00">fixed_location_file_path</text>
+</g>
+<!-- location&#45;&#45;fixed_location_file_path -->
+<g id="edge2" class="edge">
+<title>location&#45;&#45;fixed_location_file_path</title>
+<path fill="none" stroke="black" d="M78.11,-72C88.97,-72 101.31,-72 113.98,-72"/>
+</g>
+<!-- gnss_file_path -->
+<g id="node4" class="node">
+<title>gnss_file_path</title>
+<ellipse fill="none" stroke="black" cx="210.83" cy="-18" rx="61.99" ry="18"/>
+<text text-anchor="middle" x="210.83" y="-14.3" font-family="Times,serif" font-size="14.00">gnss_file_path</text>
+</g>
+<!-- location&#45;&#45;gnss_file_path -->
+<g id="edge3" class="edge">
+<title>location&#45;&#45;gnss_file_path</title>
+<path fill="none" stroke="black" d="M69.7,-60.58C83.15,-55.56 99.29,-49.75 113.99,-45 130.28,-39.74 148.3,-34.51 164.28,-30.08"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/streaming.dot b/host/commands/cvd/parser/doc/streaming.dot
new file mode 100644
index 0000000..42f521d
--- /dev/null
+++ b/host/commands/cvd/parser/doc/streaming.dot
@@ -0,0 +1,14 @@
+graph {
+  rankdir=LR
+streaming--start
+streaming--start_sig_server
+streaming--verify_sig_server_certificate
+streaming--assets_dir
+streaming--certs_dir
+streaming--sig_server_addr
+streaming--sig_server_path
+streaming--sig_server_port
+streaming--sig_server_secure
+streaming--tcp_port_range
+streaming--udp_port_range
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/streaming.png b/host/commands/cvd/parser/doc/streaming.png
new file mode 100644
index 0000000..798484e
--- /dev/null
+++ b/host/commands/cvd/parser/doc/streaming.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/streaming.svg b/host/commands/cvd/parser/doc/streaming.svg
new file mode 100644
index 0000000..efbd42f
--- /dev/null
+++ b/host/commands/cvd/parser/doc/streaming.svg
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="360pt" height="584pt"
+ viewBox="0.00 0.00 359.87 584.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 580)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-580 355.87,-580 355.87,4 -4,4"/>
+<!-- streaming -->
+<g id="node1" class="node">
+<title>streaming</title>
+<ellipse fill="none" stroke="black" cx="46.15" cy="-288" rx="46.29" ry="18"/>
+<text text-anchor="middle" x="46.15" y="-284.3" font-family="Times,serif" font-size="14.00">streaming</text>
+</g>
+<!-- start -->
+<g id="node2" class="node">
+<title>start</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-558" rx="27" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-554.3" font-family="Times,serif" font-size="14.00">start</text>
+</g>
+<!-- streaming&#45;&#45;start -->
+<g id="edge1" class="edge">
+<title>streaming&#45;&#45;start</title>
+<path fill="none" stroke="black" d="M47.55,-306.07C49.58,-351.16 61.36,-469.42 128.29,-531 151.31,-552.18 188.12,-557.5 212.93,-558.47"/>
+</g>
+<!-- start_sig_server -->
+<g id="node3" class="node">
+<title>start_sig_server</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-504" rx="66.09" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-500.3" font-family="Times,serif" font-size="14.00">start_sig_server</text>
+</g>
+<!-- streaming&#45;&#45;start_sig_server -->
+<g id="edge2" class="edge">
+<title>streaming&#45;&#45;start_sig_server</title>
+<path fill="none" stroke="black" d="M49.49,-305.96C55.39,-343.69 74.45,-431.45 128.29,-477 141.74,-488.38 159.23,-495.11 176.33,-499.05"/>
+</g>
+<!-- verify_sig_server_certificate -->
+<g id="node4" class="node">
+<title>verify_sig_server_certificate</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-450" rx="111.58" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-446.3" font-family="Times,serif" font-size="14.00">verify_sig_server_certificate</text>
+</g>
+<!-- streaming&#45;&#45;verify_sig_server_certificate -->
+<g id="edge3" class="edge">
+<title>streaming&#45;&#45;verify_sig_server_certificate</title>
+<path fill="none" stroke="black" d="M52.84,-306C63.05,-335.25 87.32,-392.86 128.29,-423 136.9,-429.34 146.81,-434.2 157.09,-437.93"/>
+</g>
+<!-- assets_dir -->
+<g id="node5" class="node">
+<title>assets_dir</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-396" rx="45.49" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-392.3" font-family="Times,serif" font-size="14.00">assets_dir</text>
+</g>
+<!-- streaming&#45;&#45;assets_dir -->
+<g id="edge4" class="edge">
+<title>streaming&#45;&#45;assets_dir</title>
+<path fill="none" stroke="black" d="M59.76,-305.33C74.22,-323.92 99.66,-352.92 128.29,-369 149,-380.63 174.64,-387.29 196.04,-391.09"/>
+</g>
+<!-- certs_dir -->
+<g id="node6" class="node">
+<title>certs_dir</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-342" rx="41.69" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-338.3" font-family="Times,serif" font-size="14.00">certs_dir</text>
+</g>
+<!-- streaming&#45;&#45;certs_dir -->
+<g id="edge5" class="edge">
+<title>streaming&#45;&#45;certs_dir</title>
+<path fill="none" stroke="black" d="M80.91,-299.92C95.41,-304.86 112.6,-310.48 128.29,-315 152.93,-322.09 180.99,-328.91 202.71,-333.93"/>
+</g>
+<!-- sig_server_addr -->
+<g id="node7" class="node">
+<title>sig_server_addr</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-288" rx="66.89" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-284.3" font-family="Times,serif" font-size="14.00">sig_server_addr</text>
+</g>
+<!-- streaming&#45;&#45;sig_server_addr -->
+<g id="edge6" class="edge">
+<title>streaming&#45;&#45;sig_server_addr</title>
+<path fill="none" stroke="black" d="M92.63,-288C116.58,-288 146.38,-288 172.84,-288"/>
+</g>
+<!-- sig_server_path -->
+<g id="node8" class="node">
+<title>sig_server_path</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-234" rx="66.09" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-230.3" font-family="Times,serif" font-size="14.00">sig_server_path</text>
+</g>
+<!-- streaming&#45;&#45;sig_server_path -->
+<g id="edge7" class="edge">
+<title>streaming&#45;&#45;sig_server_path</title>
+<path fill="none" stroke="black" d="M80.91,-276.08C95.41,-271.14 112.6,-265.52 128.29,-261 147.89,-255.36 169.65,-249.89 188.66,-245.36"/>
+</g>
+<!-- sig_server_port -->
+<g id="node9" class="node">
+<title>sig_server_port</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-180" rx="65.79" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-176.3" font-family="Times,serif" font-size="14.00">sig_server_port</text>
+</g>
+<!-- streaming&#45;&#45;sig_server_port -->
+<g id="edge8" class="edge">
+<title>streaming&#45;&#45;sig_server_port</title>
+<path fill="none" stroke="black" d="M59.76,-270.67C74.22,-252.08 99.66,-223.08 128.29,-207 144.34,-197.98 163.36,-191.95 181.06,-187.93"/>
+</g>
+<!-- sig_server_secure -->
+<g id="node10" class="node">
+<title>sig_server_secure</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-126" rx="73.39" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-122.3" font-family="Times,serif" font-size="14.00">sig_server_secure</text>
+</g>
+<!-- streaming&#45;&#45;sig_server_secure -->
+<g id="edge9" class="edge">
+<title>streaming&#45;&#45;sig_server_secure</title>
+<path fill="none" stroke="black" d="M52.84,-270C63.05,-240.75 87.32,-183.14 128.29,-153 141.21,-143.5 157.04,-137.3 172.66,-133.28"/>
+</g>
+<!-- tcp_port_range -->
+<g id="node11" class="node">
+<title>tcp_port_range</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-72" rx="63.89" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-68.3" font-family="Times,serif" font-size="14.00">tcp_port_range</text>
+</g>
+<!-- streaming&#45;&#45;tcp_port_range -->
+<g id="edge10" class="edge">
+<title>streaming&#45;&#45;tcp_port_range</title>
+<path fill="none" stroke="black" d="M49.49,-270.04C55.39,-232.31 74.45,-144.55 128.29,-99 142.27,-87.17 160.61,-80.37 178.34,-76.5"/>
+</g>
+<!-- udp_port_range -->
+<g id="node12" class="node">
+<title>udp_port_range</title>
+<ellipse fill="none" stroke="black" cx="240.08" cy="-18" rx="66.09" ry="18"/>
+<text text-anchor="middle" x="240.08" y="-14.3" font-family="Times,serif" font-size="14.00">udp_port_range</text>
+</g>
+<!-- streaming&#45;&#45;udp_port_range -->
+<g id="edge11" class="edge">
+<title>streaming&#45;&#45;udp_port_range</title>
+<path fill="none" stroke="black" d="M47.55,-269.93C49.58,-224.84 61.36,-106.58 128.29,-45 141.19,-33.14 158.41,-26.25 175.42,-22.32"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/doc/vm.dot b/host/commands/cvd/parser/doc/vm.dot
new file mode 100644
index 0000000..b0d40b9
--- /dev/null
+++ b/host/commands/cvd/parser/doc/vm.dot
@@ -0,0 +1,47 @@
+graph {
+  rankdir=LR
+    vm--cpus
+    vm--vm_manager
+    vm--vsock_guest_cid
+    vm--enable_minimal_mode
+    vm--restart_subprocesses
+    vm--setupwizard_mode
+    vm--smt
+    vm--use_allocd
+    vm--use_sdcard
+    vm--uuid
+    vm--file_verbosity
+    vm--verbosity
+    vm--Run_file_discovery
+    vm--config
+    vm--memory_mb
+    vm--custom_actions
+    vm--vm_manager
+        crosvm_binary_dir [label = "binary_dir"]
+        qemu_binary_dir [label = "binary_dir"]
+        gem5_binary_dir [label = "binary_dir"]
+
+        vm_manager--crosvm
+            crosvm--crosvm_binary_dir
+            crosvm--seccomp_policy_dir
+            crosvm--enable_sandbox
+        vm_manager--qemu
+            qemu--qemu_binary_dir
+        vm_manager--gem5
+            gem5--gem5_binary_dir
+            gem5--checkpoint_dir
+            gem5--debug_file
+            gem5--debug_flags
+    vm--security
+        security--guest_enforce_security
+        security--serial_number
+        security--secure_hals
+    vm--kernel
+        kernel--enable_kernel_log
+        kernel--kgdb
+        kernel--gdb_port
+        kernel--console
+        kernel--extra_kernel_cmdline
+        kernel--initramfs_path
+        kernel--path
+}
\ No newline at end of file
diff --git a/host/commands/cvd/parser/doc/vm.png b/host/commands/cvd/parser/doc/vm.png
new file mode 100644
index 0000000..3b3ff8d
--- /dev/null
+++ b/host/commands/cvd/parser/doc/vm.png
Binary files differ
diff --git a/host/commands/cvd/parser/doc/vm.svg b/host/commands/cvd/parser/doc/vm.svg
new file mode 100644
index 0000000..a86496e
--- /dev/null
+++ b/host/commands/cvd/parser/doc/vm.svg
@@ -0,0 +1,453 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="706pt" height="1421pt"
+ viewBox="0.00 0.00 705.55 1421.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1417)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-1417 701.55,-1417 701.55,4 -4,4"/>
+<!-- vm -->
+<g id="node1" class="node">
+<title>vm</title>
+<ellipse fill="none" stroke="black" cx="27" cy="-855" rx="27" ry="18"/>
+<text text-anchor="middle" x="27" y="-851.3" font-family="Times,serif" font-size="14.00">vm</text>
+</g>
+<!-- cpus -->
+<g id="node2" class="node">
+<title>cpus</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1287" rx="27" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1283.3" font-family="Times,serif" font-size="14.00">cpus</text>
+</g>
+<!-- vm&#45;&#45;cpus -->
+<g id="edge1" class="edge">
+<title>vm&#45;&#45;cpus</title>
+<path fill="none" stroke="black" d="M28.28,-873.28C29.73,-944.33 38.49,-1201.1 90,-1260 106.32,-1278.66 134.79,-1284.74 155.73,-1286.57"/>
+</g>
+<!-- vm_manager -->
+<g id="node3" class="node">
+<title>vm_manager</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1233" rx="57.69" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1229.3" font-family="Times,serif" font-size="14.00">vm_manager</text>
+</g>
+<!-- vm&#45;&#45;vm_manager -->
+<g id="edge2" class="edge">
+<title>vm&#45;&#45;vm_manager</title>
+<path fill="none" stroke="black" d="M28.92,-872.96C32.25,-936.52 45.95,-1147.78 90,-1197 102.09,-1210.51 120.56,-1217.41 137.77,-1221.58"/>
+</g>
+<!-- vm&#45;&#45;vm_manager -->
+<g id="edge17" class="edge">
+<title>vm&#45;&#45;vm_manager</title>
+<path fill="none" stroke="black" d="M28.6,-873.24C31.33,-939.48 44.4,-1164.05 90,-1215 99.21,-1225.29 112.11,-1231.74 125.34,-1235.46"/>
+</g>
+<!-- vsock_guest_cid -->
+<g id="node4" class="node">
+<title>vsock_guest_cid</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1179" rx="69.59" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1175.3" font-family="Times,serif" font-size="14.00">vsock_guest_cid</text>
+</g>
+<!-- vm&#45;&#45;vsock_guest_cid -->
+<g id="edge3" class="edge">
+<title>vm&#45;&#45;vsock_guest_cid</title>
+<path fill="none" stroke="black" d="M29.37,-873.02C34.13,-930.87 51.77,-1110.59 90,-1152 98.12,-1160.8 108.95,-1166.79 120.32,-1170.85"/>
+</g>
+<!-- enable_minimal_mode -->
+<g id="node5" class="node">
+<title>enable_minimal_mode</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1125" rx="92.88" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1121.3" font-family="Times,serif" font-size="14.00">enable_minimal_mode</text>
+</g>
+<!-- vm&#45;&#45;enable_minimal_mode -->
+<g id="edge4" class="edge">
+<title>vm&#45;&#45;enable_minimal_mode</title>
+<path fill="none" stroke="black" d="M30.26,-872.89C36.97,-923.34 58.33,-1065.18 90,-1098 96.24,-1104.47 103.93,-1109.4 112.2,-1113.17"/>
+</g>
+<!-- restart_subprocesses -->
+<g id="node6" class="node">
+<title>restart_subprocesses</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1071" rx="83.39" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1067.3" font-family="Times,serif" font-size="14.00">restart_subprocesses</text>
+</g>
+<!-- vm&#45;&#45;restart_subprocesses -->
+<g id="edge5" class="edge">
+<title>vm&#45;&#45;restart_subprocesses</title>
+<path fill="none" stroke="black" d="M28.59,-873.23C30.82,-910.69 41.27,-996.78 90,-1044 97.29,-1051.06 106.32,-1056.29 115.86,-1060.16"/>
+</g>
+<!-- setupwizard_mode -->
+<g id="node7" class="node">
+<title>setupwizard_mode</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-1017" rx="77.19" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-1013.3" font-family="Times,serif" font-size="14.00">setupwizard_mode</text>
+</g>
+<!-- vm&#45;&#45;setupwizard_mode -->
+<g id="edge6" class="edge">
+<title>vm&#45;&#45;setupwizard_mode</title>
+<path fill="none" stroke="black" d="M31.33,-873.19C37.6,-902.19 53.91,-958.79 90,-990 98.43,-997.29 108.75,-1002.61 119.38,-1006.5"/>
+</g>
+<!-- smt -->
+<g id="node8" class="node">
+<title>smt</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-963" rx="27" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-959.3" font-family="Times,serif" font-size="14.00">smt</text>
+</g>
+<!-- vm&#45;&#45;smt -->
+<g id="edge7" class="edge">
+<title>vm&#45;&#45;smt</title>
+<path fill="none" stroke="black" d="M36.61,-871.83C46.89,-890.24 65.76,-919.36 90,-936 109.89,-949.65 136.74,-956.5 156.29,-959.87"/>
+</g>
+<!-- use_allocd -->
+<g id="node9" class="node">
+<title>use_allocd</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-909" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-905.3" font-family="Times,serif" font-size="14.00">use_allocd</text>
+</g>
+<!-- vm&#45;&#45;use_allocd -->
+<g id="edge8" class="edge">
+<title>vm&#45;&#45;use_allocd</title>
+<path fill="none" stroke="black" d="M49.54,-865.17C61.34,-870.54 76.32,-877.03 90,-882 107.2,-888.24 126.6,-894.11 143.12,-898.76"/>
+</g>
+<!-- use_sdcard -->
+<g id="node10" class="node">
+<title>use_sdcard</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-855" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-851.3" font-family="Times,serif" font-size="14.00">use_sdcard</text>
+</g>
+<!-- vm&#45;&#45;use_sdcard -->
+<g id="edge9" class="edge">
+<title>vm&#45;&#45;use_sdcard</title>
+<path fill="none" stroke="black" d="M54.11,-855C75.56,-855 106.54,-855 132.63,-855"/>
+</g>
+<!-- uuid -->
+<g id="node11" class="node">
+<title>uuid</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-801" rx="27" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-797.3" font-family="Times,serif" font-size="14.00">uuid</text>
+</g>
+<!-- vm&#45;&#45;uuid -->
+<g id="edge10" class="edge">
+<title>vm&#45;&#45;uuid</title>
+<path fill="none" stroke="black" d="M49.54,-844.83C61.34,-839.46 76.32,-832.97 90,-828 112.52,-819.83 138.79,-812.31 157.48,-807.29"/>
+</g>
+<!-- file_verbosity -->
+<g id="node12" class="node">
+<title>file_verbosity</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-747" rx="59.59" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-743.3" font-family="Times,serif" font-size="14.00">file_verbosity</text>
+</g>
+<!-- vm&#45;&#45;file_verbosity -->
+<g id="edge11" class="edge">
+<title>vm&#45;&#45;file_verbosity</title>
+<path fill="none" stroke="black" d="M36.61,-838.17C46.89,-819.76 65.76,-790.64 90,-774 102.07,-765.71 116.71,-759.94 130.72,-755.92"/>
+</g>
+<!-- verbosity -->
+<g id="node13" class="node">
+<title>verbosity</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-693" rx="43.59" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-689.3" font-family="Times,serif" font-size="14.00">verbosity</text>
+</g>
+<!-- vm&#45;&#45;verbosity -->
+<g id="edge12" class="edge">
+<title>vm&#45;&#45;verbosity</title>
+<path fill="none" stroke="black" d="M31.33,-836.81C37.6,-807.81 53.91,-751.21 90,-720 103.97,-707.92 123.11,-701.24 140.31,-697.55"/>
+</g>
+<!-- Run_file_discovery -->
+<g id="node14" class="node">
+<title>Run_file_discovery</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-639" rx="79.89" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-635.3" font-family="Times,serif" font-size="14.00">Run_file_discovery</text>
+</g>
+<!-- vm&#45;&#45;Run_file_discovery -->
+<g id="edge13" class="edge">
+<title>vm&#45;&#45;Run_file_discovery</title>
+<path fill="none" stroke="black" d="M28.59,-836.77C30.82,-799.31 41.27,-713.22 90,-666 97.65,-658.59 107.21,-653.2 117.27,-649.28"/>
+</g>
+<!-- config -->
+<g id="node15" class="node">
+<title>config</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-585" rx="33.29" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-581.3" font-family="Times,serif" font-size="14.00">config</text>
+</g>
+<!-- vm&#45;&#45;config -->
+<g id="edge14" class="edge">
+<title>vm&#45;&#45;config</title>
+<path fill="none" stroke="black" d="M30.26,-837.11C36.97,-786.66 58.33,-644.82 90,-612 105.48,-595.96 129.94,-589.31 149.75,-586.62"/>
+</g>
+<!-- memory_mb -->
+<g id="node16" class="node">
+<title>memory_mb</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-531" rx="57.39" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-527.3" font-family="Times,serif" font-size="14.00">memory_mb</text>
+</g>
+<!-- vm&#45;&#45;memory_mb -->
+<g id="edge15" class="edge">
+<title>vm&#45;&#45;memory_mb</title>
+<path fill="none" stroke="black" d="M29.37,-836.98C34.13,-779.13 51.77,-599.41 90,-558 100.04,-547.12 114.22,-540.54 128.43,-536.59"/>
+</g>
+<!-- custom_actions -->
+<g id="node17" class="node">
+<title>custom_actions</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-477" rx="65.79" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-473.3" font-family="Times,serif" font-size="14.00">custom_actions</text>
+</g>
+<!-- vm&#45;&#45;custom_actions -->
+<g id="edge16" class="edge">
+<title>vm&#45;&#45;custom_actions</title>
+<path fill="none" stroke="black" d="M28.74,-836.97C31.75,-772.45 45.12,-554.15 90,-504 98.54,-494.45 110.26,-488.21 122.48,-484.14"/>
+</g>
+<!-- security -->
+<g id="node29" class="node">
+<title>security</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-423" rx="38.19" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-419.3" font-family="Times,serif" font-size="14.00">security</text>
+</g>
+<!-- vm&#45;&#45;security -->
+<g id="edge29" class="edge">
+<title>vm&#45;&#45;security</title>
+<path fill="none" stroke="black" d="M28.28,-836.72C29.73,-765.67 38.49,-508.9 90,-450 103.58,-434.47 125.58,-427.65 144.65,-424.75"/>
+</g>
+<!-- kernel -->
+<g id="node33" class="node">
+<title>kernel</title>
+<ellipse fill="none" stroke="black" cx="182.94" cy="-207" rx="32.49" ry="18"/>
+<text text-anchor="middle" x="182.94" y="-203.3" font-family="Times,serif" font-size="14.00">kernel</text>
+</g>
+<!-- vm&#45;&#45;kernel -->
+<g id="edge33" class="edge">
+<title>vm&#45;&#45;kernel</title>
+<path fill="none" stroke="black" d="M28.4,-836.63C30.2,-773.74 39.87,-561.88 90,-396 110.27,-328.94 151.85,-256.16 171.38,-223.96"/>
+</g>
+<!-- crosvm -->
+<g id="node21" class="node">
+<title>crosvm</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-1314" rx="37.09" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-1310.3" font-family="Times,serif" font-size="14.00">crosvm</text>
+</g>
+<!-- vm_manager&#45;&#45;crosvm -->
+<g id="edge18" class="edge">
+<title>vm_manager&#45;&#45;crosvm</title>
+<path fill="none" stroke="black" d="M227.31,-1244.62C242.76,-1249.05 260.22,-1254.4 275.88,-1260 311.37,-1272.69 351.21,-1290.13 376.71,-1301.71"/>
+</g>
+<!-- qemu -->
+<g id="node24" class="node">
+<title>qemu</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-1233" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-1229.3" font-family="Times,serif" font-size="14.00">qemu</text>
+</g>
+<!-- vm_manager&#45;&#45;qemu -->
+<g id="edge22" class="edge">
+<title>vm_manager&#45;&#45;qemu</title>
+<path fill="none" stroke="black" d="M240.97,-1233C283.35,-1233 339.46,-1233 373.36,-1233"/>
+</g>
+<!-- gem5 -->
+<g id="node25" class="node">
+<title>gem5</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-1125" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-1121.3" font-family="Times,serif" font-size="14.00">gem5</text>
+</g>
+<!-- vm_manager&#45;&#45;gem5 -->
+<g id="edge24" class="edge">
+<title>vm_manager&#45;&#45;gem5</title>
+<path fill="none" stroke="black" d="M229.75,-1222.28C244.78,-1218.05 261.34,-1212.6 275.88,-1206 317.15,-1187.27 360.77,-1156.86 384.72,-1139.11"/>
+</g>
+<!-- crosvm_binary_dir -->
+<g id="node18" class="node">
+<title>crosvm_binary_dir</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1395" rx="47.39" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1391.3" font-family="Times,serif" font-size="14.00">binary_dir</text>
+</g>
+<!-- qemu_binary_dir -->
+<g id="node19" class="node">
+<title>qemu_binary_dir</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1233" rx="47.39" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1229.3" font-family="Times,serif" font-size="14.00">binary_dir</text>
+</g>
+<!-- gem5_binary_dir -->
+<g id="node20" class="node">
+<title>gem5_binary_dir</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1179" rx="47.39" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1175.3" font-family="Times,serif" font-size="14.00">binary_dir</text>
+</g>
+<!-- crosvm&#45;&#45;crosvm_binary_dir -->
+<g id="edge19" class="edge">
+<title>crosvm&#45;&#45;crosvm_binary_dir</title>
+<path fill="none" stroke="black" d="M432.23,-1326.24C457.82,-1337.63 497.43,-1354.79 532.47,-1368 547.12,-1373.52 563.47,-1379.06 577.66,-1383.67"/>
+</g>
+<!-- seccomp_policy_dir -->
+<g id="node22" class="node">
+<title>seccomp_policy_dir</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1341" rx="82.59" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1337.3" font-family="Times,serif" font-size="14.00">seccomp_policy_dir</text>
+</g>
+<!-- crosvm&#45;&#45;seccomp_policy_dir -->
+<g id="edge20" class="edge">
+<title>crosvm&#45;&#45;seccomp_policy_dir</title>
+<path fill="none" stroke="black" d="M440.13,-1318.52C468.23,-1322.15 508.61,-1327.37 543.39,-1331.87"/>
+</g>
+<!-- enable_sandbox -->
+<g id="node23" class="node">
+<title>enable_sandbox</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1287" rx="67.69" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1283.3" font-family="Times,serif" font-size="14.00">enable_sandbox</text>
+</g>
+<!-- crosvm&#45;&#45;enable_sandbox -->
+<g id="edge21" class="edge">
+<title>crosvm&#45;&#45;enable_sandbox</title>
+<path fill="none" stroke="black" d="M440.13,-1309.48C471.13,-1305.47 517.09,-1299.53 553.97,-1294.76"/>
+</g>
+<!-- qemu&#45;&#45;qemu_binary_dir -->
+<g id="edge23" class="edge">
+<title>qemu&#45;&#45;qemu_binary_dir</title>
+<path fill="none" stroke="black" d="M435.1,-1233C469.61,-1233 526.77,-1233 567.19,-1233"/>
+</g>
+<!-- gem5&#45;&#45;gem5_binary_dir -->
+<g id="edge25" class="edge">
+<title>gem5&#45;&#45;gem5_binary_dir</title>
+<path fill="none" stroke="black" d="M432.48,-1132.06C468.96,-1141.49 533.83,-1158.27 575.33,-1169"/>
+</g>
+<!-- checkpoint_dir -->
+<g id="node26" class="node">
+<title>checkpoint_dir</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1125" rx="63.89" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1121.3" font-family="Times,serif" font-size="14.00">checkpoint_dir</text>
+</g>
+<!-- gem5&#45;&#45;checkpoint_dir -->
+<g id="edge26" class="edge">
+<title>gem5&#45;&#45;checkpoint_dir</title>
+<path fill="none" stroke="black" d="M435.1,-1125C465.21,-1125 512.57,-1125 551,-1125"/>
+</g>
+<!-- debug_file -->
+<g id="node27" class="node">
+<title>debug_file</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1071" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1067.3" font-family="Times,serif" font-size="14.00">debug_file</text>
+</g>
+<!-- gem5&#45;&#45;debug_file -->
+<g id="edge27" class="edge">
+<title>gem5&#45;&#45;debug_file</title>
+<path fill="none" stroke="black" d="M432.48,-1117.94C468.85,-1108.53 533.44,-1091.83 574.96,-1081.1"/>
+</g>
+<!-- debug_flags -->
+<g id="node28" class="node">
+<title>debug_flags</title>
+<ellipse fill="none" stroke="black" cx="615.01" cy="-1017" rx="53.89" ry="18"/>
+<text text-anchor="middle" x="615.01" y="-1013.3" font-family="Times,serif" font-size="14.00">debug_flags</text>
+</g>
+<!-- gem5&#45;&#45;debug_flags -->
+<g id="edge28" class="edge">
+<title>gem5&#45;&#45;debug_flags</title>
+<path fill="none" stroke="black" d="M423.73,-1111.11C447.79,-1093.59 491.53,-1063.43 532.47,-1044 545.27,-1037.92 559.79,-1032.64 573.01,-1028.4"/>
+</g>
+<!-- guest_enforce_security -->
+<g id="node30" class="node">
+<title>guest_enforce_security</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-504" rx="92.08" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-500.3" font-family="Times,serif" font-size="14.00">guest_enforce_security</text>
+</g>
+<!-- security&#45;&#45;guest_enforce_security -->
+<g id="edge30" class="edge">
+<title>security&#45;&#45;guest_enforce_security</title>
+<path fill="none" stroke="black" d="M218.67,-430.08C236.36,-434.46 257.89,-440.98 275.88,-450 293.76,-458.96 294.01,-468.02 311.88,-477 322.22,-482.19 333.73,-486.57 345,-490.18"/>
+</g>
+<!-- serial_number -->
+<g id="node31" class="node">
+<title>serial_number</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-450" rx="61.99" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-446.3" font-family="Times,serif" font-size="14.00">serial_number</text>
+</g>
+<!-- security&#45;&#45;serial_number -->
+<g id="edge31" class="edge">
+<title>security&#45;&#45;serial_number</title>
+<path fill="none" stroke="black" d="M220.14,-427.46C254.56,-431.7 306.85,-438.14 346.74,-443.05"/>
+</g>
+<!-- secure_hals -->
+<g id="node32" class="node">
+<title>secure_hals</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-396" rx="51.99" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-392.3" font-family="Times,serif" font-size="14.00">secure_hals</text>
+</g>
+<!-- security&#45;&#45;secure_hals -->
+<g id="edge32" class="edge">
+<title>security&#45;&#45;secure_hals</title>
+<path fill="none" stroke="black" d="M220.14,-418.54C256.99,-414 314.33,-406.94 355,-401.93"/>
+</g>
+<!-- enable_kernel_log -->
+<g id="node34" class="node">
+<title>enable_kernel_log</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-342" rx="75.29" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-338.3" font-family="Times,serif" font-size="14.00">enable_kernel_log</text>
+</g>
+<!-- kernel&#45;&#45;enable_kernel_log -->
+<g id="edge34" class="edge">
+<title>kernel&#45;&#45;enable_kernel_log</title>
+<path fill="none" stroke="black" d="M198.53,-222.95C220.79,-246.39 265.57,-290.04 311.88,-315 323.09,-321.04 335.81,-325.89 348.12,-329.71"/>
+</g>
+<!-- kgdb -->
+<g id="node35" class="node">
+<title>kgdb</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-288" rx="28.7" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-284.3" font-family="Times,serif" font-size="14.00">kgdb</text>
+</g>
+<!-- kernel&#45;&#45;kgdb -->
+<g id="edge35" class="edge">
+<title>kernel&#45;&#45;kgdb</title>
+<path fill="none" stroke="black" d="M208.62,-218.39C234.15,-229.94 275.33,-247.93 311.88,-261 333.67,-268.79 358.93,-276.14 377.35,-281.19"/>
+</g>
+<!-- gdb_port -->
+<g id="node36" class="node">
+<title>gdb_port</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-234" rx="42.49" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-230.3" font-family="Times,serif" font-size="14.00">gdb_port</text>
+</g>
+<!-- kernel&#45;&#45;gdb_port -->
+<g id="edge36" class="edge">
+<title>kernel&#45;&#45;gdb_port</title>
+<path fill="none" stroke="black" d="M214.89,-210.81C253.84,-215.61 320.8,-223.85 363.48,-229.11"/>
+</g>
+<!-- console -->
+<g id="node37" class="node">
+<title>console</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-180" rx="37.89" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-176.3" font-family="Times,serif" font-size="14.00">console</text>
+</g>
+<!-- kernel&#45;&#45;console -->
+<g id="edge37" class="edge">
+<title>kernel&#45;&#45;console</title>
+<path fill="none" stroke="black" d="M214.89,-203.19C255.02,-198.25 324.88,-189.64 367.29,-184.42"/>
+</g>
+<!-- extra_kernel_cmdline -->
+<g id="node38" class="node">
+<title>extra_kernel_cmdline</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-126" rx="87.99" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-122.3" font-family="Times,serif" font-size="14.00">extra_kernel_cmdline</text>
+</g>
+<!-- kernel&#45;&#45;extra_kernel_cmdline -->
+<g id="edge38" class="edge">
+<title>kernel&#45;&#45;extra_kernel_cmdline</title>
+<path fill="none" stroke="black" d="M208.62,-195.61C234.15,-184.06 275.33,-166.07 311.88,-153 324.33,-148.55 337.92,-144.24 350.71,-140.42"/>
+</g>
+<!-- initramfs_path -->
+<g id="node39" class="node">
+<title>initramfs_path</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-72" rx="62.29" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-68.3" font-family="Times,serif" font-size="14.00">initramfs_path</text>
+</g>
+<!-- kernel&#45;&#45;initramfs_path -->
+<g id="edge39" class="edge">
+<title>kernel&#45;&#45;initramfs_path</title>
+<path fill="none" stroke="black" d="M198.53,-191.05C220.79,-167.61 265.57,-123.96 311.88,-99 324.69,-92.1 339.48,-86.75 353.37,-82.71"/>
+</g>
+<!-- path -->
+<g id="node40" class="node">
+<title>path</title>
+<ellipse fill="none" stroke="black" cx="404.18" cy="-18" rx="27" ry="18"/>
+<text text-anchor="middle" x="404.18" y="-14.3" font-family="Times,serif" font-size="14.00">path</text>
+</g>
+<!-- kernel&#45;&#45;path -->
+<g id="edge40" class="edge">
+<title>kernel&#45;&#45;path</title>
+<path fill="none" stroke="black" d="M192.83,-189.7C210.74,-156.53 254.45,-83.87 311.88,-45 331.59,-31.66 358.02,-24.79 377.36,-21.32"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/cvd/parser/fetch_cvd_parser.cpp b/host/commands/cvd/parser/fetch_cvd_parser.cpp
new file mode 100644
index 0000000..c6d7c84
--- /dev/null
+++ b/host/commands/cvd/parser/fetch_cvd_parser.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/parser/fetch_cvd_parser.h"
+
+#include <android-base/file.h>
+#include <gflags/gflags.h>
+
+#include <stdio.h>
+#include <fstream>
+#include <string>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+
+namespace cuttlefish {
+
+#define EMPTY_CREDENTIAL ""
+#define EMPTY_DEFAULT_BUILD ""
+#define EMPTY_SYSTEM_BUILD ""
+#define EMPTY_KERNEL_BUILD ""
+
+void InitFetchInstanceConfigs(Json::Value& instances) {
+  // Handle common flags
+  InitStringConfig(instances, "disk", "default_build", EMPTY_DEFAULT_BUILD);
+  InitStringConfig(instances, "disk", "system_build", EMPTY_SYSTEM_BUILD);
+  InitStringConfig(instances, "disk", "kernel_build", EMPTY_KERNEL_BUILD);
+}
+
+void InitFetchCvdConfigs(Json::Value& root) {
+  if (!root.isMember("credential")) {
+    root["credential"] = EMPTY_CREDENTIAL;
+  }
+  InitFetchInstanceConfigs(root["instances"]);
+}
+
+FetchCvdDeviceConfigs ParseFetchInstanceConfigs(const Json::Value& instance) {
+  FetchCvdDeviceConfigs result;
+  result.default_build = instance["disk"]["default_build"].asString();
+  result.system_build = instance["disk"]["system_build"].asString();
+  result.kernel_build = instance["disk"]["kernel_build"].asString();
+  if (result.default_build != EMPTY_DEFAULT_BUILD ||
+      result.system_build != EMPTY_SYSTEM_BUILD ||
+      result.kernel_build != EMPTY_KERNEL_BUILD) {
+    result.use_fetch_artifact = true;
+  } else {
+    result.use_fetch_artifact = false;
+  }
+
+  return result;
+}
+
+FetchCvdConfigs GenerateFetchCvdFlags(const Json::Value& root) {
+  FetchCvdConfigs result;
+  result.credential = root["credential"].asString();
+  int num_instances = root["instances"].size();
+  for (unsigned int i = 0; i < num_instances; i++) {
+    auto instance_config = ParseFetchInstanceConfigs(root["instances"][i]);
+    result.instances.emplace_back(instance_config);
+  }
+
+  return result;
+}
+
+FetchCvdConfigs ParseFetchCvdConfigs(Json::Value& root) {
+  InitFetchCvdConfigs(root);
+  return GenerateFetchCvdFlags(root);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/fetch_cvd_parser.h b/host/commands/cvd/parser/fetch_cvd_parser.h
new file mode 100644
index 0000000..659675c
--- /dev/null
+++ b/host/commands/cvd/parser/fetch_cvd_parser.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+
+namespace cuttlefish {
+
+struct FetchCvdDeviceConfigs {
+  bool use_fetch_artifact;
+  std::string default_build;
+  std::string system_build;
+  std::string kernel_build;
+};
+
+struct FetchCvdConfigs {
+  std::string credential;
+  std::vector<FetchCvdDeviceConfigs> instances;
+};
+
+FetchCvdConfigs ParseFetchCvdConfigs(Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_boot_configs.cpp b/host/commands/cvd/parser/instance/cf_boot_configs.cpp
new file mode 100644
index 0000000..e096b6c
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_boot_configs.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/parser/instance/cf_boot_configs.h"
+
+#include <android-base/logging.h>
+
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+void InitBootConfigs(Json::Value& instances) {
+  InitStringConfig(instances, "boot", "extra_bootconfig_args",
+                   CF_DEFAULTS_EXTRA_BOOTCONFIG_ARGS);
+  InitBoolConfig(instances, "boot", "enable_bootanimation",
+                 CF_DEFAULTS_ENABLE_BOOTANIMATION);
+  InitStringConfigSubGroup(instances, "boot", "kernel", "extra_kernel_cmdline",
+                           CF_DEFAULTS_EXTRA_KERNEL_CMDLINE);
+}
+
+std::vector<std::string> GenerateBootFlags(const Json::Value& instances) {
+  std::vector<std::string> result;
+  result.emplace_back(GenerateGflag(instances, "extra_bootconfig_args", "boot",
+                                    "extra_bootconfig_args"));
+  result.emplace_back(GenerateGflag(instances, "enable_bootanimation", "boot",
+                                    "enable_bootanimation"));
+  result.emplace_back(GenerateGflagSubGroup(instances, "extra_kernel_cmdline",
+                                            "boot", "kernel",
+                                            "extra_kernel_cmdline"));
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_boot_configs.h b/host/commands/cvd/parser/instance/cf_boot_configs.h
new file mode 100644
index 0000000..79efad3
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_boot_configs.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include <string>
+#include <vector>
+
+namespace cuttlefish {
+void InitBootConfigs(Json::Value& root);
+std::vector<std::string> GenerateBootFlags(const Json::Value& root);
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_graphics_configs.cpp b/host/commands/cvd/parser/instance/cf_graphics_configs.cpp
new file mode 100644
index 0000000..1e17684
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_graphics_configs.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/parser/instance/cf_boot_configs.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <google/protobuf/text_format.h>
+
+#include "launch_cvd.pb.h"
+
+#include "common/libs/utils/base64.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+void InitGraphicsConfigs(Json::Value& instances) {
+  InitIntConfigSubGroupVector(instances, "graphics", "displays", "width",
+                              CF_DEFAULTS_DISPLAY_WIDTH);
+  InitIntConfigSubGroupVector(instances, "graphics", "displays", "height",
+                              CF_DEFAULTS_DISPLAY_HEIGHT);
+  InitIntConfigSubGroupVector(instances, "graphics", "displays", "dpi",
+                              CF_DEFAULTS_DISPLAY_DPI);
+  InitIntConfigSubGroupVector(instances, "graphics", "displays",
+                              "refresh_rate_hertz",
+                              CF_DEFAULTS_DISPLAY_REFRESH_RATE);
+}
+
+std::string GenerateDisplayFlag(const Json::Value& instances_json) {
+  using google::protobuf::TextFormat;
+  cuttlefish::InstancesDisplays all_instances_displays;
+
+  int num_instances = instances_json.size();
+  for (int i = 0; i < num_instances; i++) {
+    auto* instance = all_instances_displays.add_instances();
+    int num_displays = instances_json[i]["graphics"]["displays"].size();
+    for (int j = 0; j < num_displays; j++) {
+      Json::Value display_json = instances_json[i]["graphics"]["displays"][j];
+      auto* display = instance->add_displays();
+      display->set_width(display_json["width"].asInt());
+      display->set_height(display_json["height"].asInt());
+      display->set_dpi(display_json["dpi"].asInt());
+      display->set_refresh_rate_hertz(
+          display_json["refresh_rate_hertz"].asInt());
+    }
+  }
+
+  std::string bin_output;
+  if (!all_instances_displays.SerializeToString(&bin_output)) {
+    LOG(ERROR) << "Failed to convert display proto to binary string ";
+    return std::string();
+  }
+
+  std::string base64_output;
+  if (!cuttlefish::EncodeBase64((void*)bin_output.c_str(), bin_output.size(),
+                                &base64_output)) {
+    LOG(ERROR) << "Failed to apply EncodeBase64 to binary string ";
+    return std::string();
+  }
+  return "--displays_binproto=" + base64_output;
+}
+
+std::vector<std::string> GenerateGraphicsFlags(const Json::Value& instances) {
+  std::vector<std::string> result;
+  result.emplace_back(GenerateDisplayFlag(instances));
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_graphics_configs.h b/host/commands/cvd/parser/instance/cf_graphics_configs.h
new file mode 100644
index 0000000..c0bdccd
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_graphics_configs.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include <string>
+#include <vector>
+
+namespace cuttlefish {
+void InitGraphicsConfigs(Json::Value& root);
+std::vector<std::string> GenerateGraphicsFlags(const Json::Value& root);
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_metrics_configs.cpp b/host/commands/cvd/parser/instance/cf_metrics_configs.cpp
new file mode 100644
index 0000000..d708343
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_metrics_configs.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/parser/instance/cf_metrics_configs.h"
+
+#include <android-base/logging.h>
+
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+// Metrics collection will be disabled by default for canonical configs MVP
+#define DEFAULT_ENABLE_REPORTING "n"
+
+namespace cuttlefish {
+
+static std::string GenerateReportFlag() {
+  std::stringstream result_flag;
+  result_flag << "--report_anonymous_usage_stats=" << DEFAULT_ENABLE_REPORTING;
+  return result_flag.str();
+}
+
+std::vector<std::string> GenerateMetricsFlags(const Json::Value&) {
+  std::vector<std::string> result;
+  result.emplace_back(GenerateReportFlag());
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_metrics_configs.h b/host/commands/cvd/parser/instance/cf_metrics_configs.h
new file mode 100644
index 0000000..d4d0ae2
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_metrics_configs.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include <string>
+#include <vector>
+
+namespace cuttlefish {
+std::vector<std::string> GenerateMetricsFlags(const Json::Value& root);
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_security_configs.cpp b/host/commands/cvd/parser/instance/cf_security_configs.cpp
new file mode 100644
index 0000000..6e847bd
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_security_configs.cpp
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/parser/instance/cf_security_configs.h"
+
+#include <android-base/logging.h>
+
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+/*This function is created to cover the initiation use_random_serial flag
+when the json value of serial_number equal "@random"
+*/
+void InitRandomSerialNumber(Json::Value& instances) {
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    std::string serial_number_str =
+        instances[i]["security"]["serial_number"].asString();
+    if (serial_number_str == "@random") {
+      instances[i]["security"]["use_random_serial"] = true;
+    } else {
+      instances[i]["security"]["use_random_serial"] = false;
+    }
+  }
+}
+
+void InitSecurityConfigs(Json::Value& instances) {
+  InitStringConfig(instances, "security", "serial_number",
+                   CF_DEFAULTS_SERIAL_NUMBER);
+  // This init should be called after the InitSecurityConfigs call, since it
+  // depends on  serial_number flag
+  InitRandomSerialNumber(instances);
+  InitBoolConfig(instances, "security", "guest_enforce_security",
+                 CF_DEFAULTS_GUEST_ENFORCE_SECURITY);
+}
+
+std::vector<std::string> GenerateSecurityFlags(const Json::Value& instances) {
+  std::vector<std::string> result;
+  if (!GENERATE_MVP_FLAGS_ONLY) {
+    result.emplace_back(
+        GenerateGflag(instances, "serial_number", "security", "serial_number"));
+    result.emplace_back(GenerateGflag(instances, "use_random_serial",
+                                      "security", "use_random_serial"));
+  }
+  result.emplace_back(GenerateGflag(instances, "guest_enforce_security",
+                                    "security", "guest_enforce_security"));
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_security_configs.h b/host/commands/cvd/parser/instance/cf_security_configs.h
new file mode 100644
index 0000000..d814644
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_security_configs.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <string>
+#include <vector>
+
+#include <json/json.h>
+
+namespace cuttlefish {
+void InitSecurityConfigs(Json::Value& root);
+std::vector<std::string> GenerateSecurityFlags(const Json::Value& root);
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_vm_configs.cpp b/host/commands/cvd/parser/instance/cf_vm_configs.cpp
new file mode 100644
index 0000000..424cdf8
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_vm_configs.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/parser/instance/cf_vm_configs.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+#define UI_DEFAULTS_MEMORY_MB 2048
+
+namespace cuttlefish {
+
+void InitVmManagerConfig(Json::Value& instances) {
+  // Allocate and initialize with default values
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (instances[i].isMember("vm")) {
+      if (instances[i]["vm"].isMember("crosvm")) {
+        instances[i]["vm"]["vm_manager"] = "crosvm";
+      } else if (instances[i]["vm"].isMember("qemu")) {
+        instances[i]["vm"]["vm_manager"] = "qemu_cli";
+      } else if (instances[i]["vm"].isMember("gem5")) {
+        instances[i]["vm"]["vm_manager"] = "gem5";
+      } else {
+        // Set vm manager to default value (crosvm)
+        instances[i]["vm"]["vm_manager"] = "crosvm";
+      }
+    } else {
+      // vm object doesn't exist , set the default vm manager to crosvm
+      instances[i]["vm"]["vm_manager"] = "crosvm";
+    }
+  }
+}
+
+void InitVmConfigs(Json::Value& instances) {
+  InitIntConfig(instances, "vm", "cpus", CF_DEFAULTS_CPUS);
+  InitIntConfig(instances, "vm", "memory_mb", UI_DEFAULTS_MEMORY_MB);
+  InitBoolConfig(instances, "vm", "use_sdcard", CF_DEFAULTS_USE_SDCARD);
+  InitStringConfig(instances, "vm", "setupwizard_mode",
+                   CF_DEFAULTS_SETUPWIZARD_MODE);
+  InitStringConfig(instances, "vm", "uuid", CF_DEFAULTS_UUID);
+  InitVmManagerConfig(instances);
+  InitBoolConfigSubGroup(instances, "vm", "crosvm", "enable_sandbox",
+                         CF_DEFAULTS_ENABLE_SANDBOX);
+}
+
+std::vector<std::string> GenerateCustomConfigsFlags(
+    const Json::Value& instances) {
+  std::vector<std::string> result;
+  int size = instances.size();
+  for (int i = 0; i < size; i++) {
+    if (instances[i].isMember("vm") &&
+        instances[i]["vm"].isMember("custom_actions")) {
+      Json::StreamWriterBuilder factory;
+      std::string mapped_text =
+          Json::writeString(factory, instances[i]["vm"]["custom_actions"]);
+      // format json string string to match aosp/2374890 input format
+      mapped_text = android::base::StringReplace(mapped_text, "\n", "", true);
+      mapped_text = android::base::StringReplace(mapped_text, "\r", "", true);
+      mapped_text =
+          android::base::StringReplace(mapped_text, "\"", "\\\"", true);
+      std::stringstream buff;
+      buff << "--custom_actions=" << mapped_text;
+      result.emplace_back(buff.str());
+    } else {
+      // custom_actions parameter doesn't exist in the configuration file
+      result.emplace_back("--custom_actions=unset");
+    }
+  }
+  return result;
+}
+
+std::vector<std::string> GenerateVmFlags(const Json::Value& instances) {
+  std::vector<std::string> result;
+  result.emplace_back(GenerateGflag(instances, "cpus", "vm", "cpus"));
+  result.emplace_back(GenerateGflag(instances, "memory_mb", "vm", "memory_mb"));
+  result.emplace_back(
+      GenerateGflag(instances, "use_sdcard", "vm", "use_sdcard"));
+  result.emplace_back(
+      GenerateGflag(instances, "vm_manager", "vm", "vm_manager"));
+  result.emplace_back(
+      GenerateGflag(instances, "setupwizard_mode", "vm", "setupwizard_mode"));
+  if (!GENERATE_MVP_FLAGS_ONLY) {
+    result.emplace_back(GenerateGflag(instances, "uuid", "vm", "uuid"));
+  }
+  result.emplace_back(GenerateGflagSubGroup(instances, "enable_sandbox", "vm",
+                                            "crosvm", "enable_sandbox"));
+
+  result = MergeResults(result, GenerateCustomConfigsFlags(instances));
+
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/instance/cf_vm_configs.h b/host/commands/cvd/parser/instance/cf_vm_configs.h
new file mode 100644
index 0000000..3fb4904
--- /dev/null
+++ b/host/commands/cvd/parser/instance/cf_vm_configs.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ std::string * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ std::string * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include <string>
+#include <vector>
+
+namespace cuttlefish {
+void InitVmConfigs(Json::Value& root);
+std::vector<std::string> GenerateVmFlags(const Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/launch_cvd_parser.cpp b/host/commands/cvd/parser/launch_cvd_parser.cpp
new file mode 100644
index 0000000..2a28647
--- /dev/null
+++ b/host/commands/cvd/parser/launch_cvd_parser.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/file.h>
+#include <gflags/gflags.h>
+
+#include <stdio.h>
+#include <fstream>
+#include <string>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/parser/cf_configs_instances.h"
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/parser/launch_cvd_templates.h"
+
+namespace cuttlefish {
+
+std::string GenerateNumInstancesFlag(const Json::Value& root) {
+  int num_instances = root["instances"].size();
+  LOG(DEBUG) << "num_instances = " << num_instances;
+  std::string result = "--num_instances=" + std::to_string(num_instances);
+  return result;
+}
+
+std::string GenerateCommonGflag(const Json::Value& root,
+                                const std::string& gflag_name,
+                                const std::string& json_flag) {
+  std::stringstream buff;
+  // Append Header
+  buff << "--" << gflag_name << "=" << root[json_flag].asString();
+  return buff.str();
+}
+
+std::vector<std::string> GenerateCfFlags(const Json::Value& root) {
+  std::vector<std::string> result;
+  result.emplace_back(GenerateNumInstancesFlag(root));
+  result.emplace_back(GenerateCommonGflag(root, "netsim_bt", "netsim_bt"));
+
+  result = MergeResults(result, GenerateInstancesFlags(root["instances"]));
+  return result;
+}
+
+void InitCvdConfigs(Json::Value& root) {
+  // Handle common flags
+  if (!root.isMember("netsim_bt")) {
+    root["netsim_bt"] = CF_DEFAULTS_NETSIM_BT;
+  }
+  // Handle instances flags
+  InitInstancesConfigs(root["instances"]);
+}
+
+std::vector<std::string> ParseLaunchCvdConfigs(Json::Value& root) {
+  ExtractLaunchTemplates(root["instances"]);
+  InitCvdConfigs(root);
+  return GenerateCfFlags(root);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/launch_cvd_parser.h b/host/commands/cvd/parser/launch_cvd_parser.h
new file mode 100644
index 0000000..3ca36b2
--- /dev/null
+++ b/host/commands/cvd/parser/launch_cvd_parser.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+
+namespace cuttlefish {
+
+std::vector<std::string> ParseLaunchCvdConfigs(Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/launch_cvd_templates.cpp b/host/commands/cvd/parser/launch_cvd_templates.cpp
new file mode 100644
index 0000000..7aa6add
--- /dev/null
+++ b/host/commands/cvd/parser/launch_cvd_templates.cpp
@@ -0,0 +1,313 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/file.h>
+
+#include <stdio.h>
+#include <string>
+
+#include "common/libs/utils/json.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+
+namespace cuttlefish {
+
+enum class ConfigTemplate {
+  PHONE,
+  TABLET,
+  TV,
+  WEARABLE,
+  AUTO,
+  SLIM,
+  GO,
+  FOLDABLE,
+  UNKNOWN,
+};
+
+static std::map<std::string, ConfigTemplate> kSupportedTemplatesKeyMap = {
+    {"phone", ConfigTemplate::PHONE}, {"tablet", ConfigTemplate::TABLET},
+    {"tv", ConfigTemplate::TV},       {"wearable", ConfigTemplate::WEARABLE},
+    {"auto", ConfigTemplate::AUTO},   {"slim", ConfigTemplate::SLIM},
+    {"go", ConfigTemplate::GO},       {"foldable", ConfigTemplate::FOLDABLE}};
+
+// Definition of phone instance template in Json format
+static const char* kPhoneInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 4096
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 720,
+                "height": 1280,
+                "dpi": 320
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of tablet instance template in Json format
+static const char* kTabletInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 4096
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 2560,
+                "height": 1800,
+                "dpi": 320
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of tablet instance template in Json format
+static const char* kTvInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 2048
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 1920,
+                "height": 1080,
+                "dpi": 213
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of tablet instance template in Json format
+static const char* kWearableInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 1536,
+        "use_sdcard" : false
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 450,
+                "height": 450,
+                "dpi": 320
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of auto instance template in Json format
+static const char* kAutoInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 4096
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 1080,
+                "height": 600,
+                "dpi": 120
+            },
+            {
+                "width": 400,
+                "height": 600,
+                "dpi": 120
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of auto instance template in Json format
+static const char* kSlimInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 2048,
+        "use_sdcard" : false
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 720,
+                "height": 1280,
+                "dpi": 320
+            }
+        ]
+    }
+}
+  )"""";
+
+// Definition of go instance template in Json format
+static const char* kGoInstanceTemplate = R""""(
+{
+    "vm": {
+        "memory_mb": 2048
+    },
+    "graphics":{
+        "displays":[
+            {
+                "width": 720,
+                "height": 1280,
+                "dpi": 320
+            }
+        ]
+    }
+}
+  )"""";
+
+static const char* kFoldableInstanceTemplate = R""""(
+{
+    "vm": {
+            "memory_mb": 4096,
+            "custom_actions" : [
+                    {
+                            "device_states": [
+                                    {
+                                            "lid_switch_open": false,
+                                            "hinge_angle_value": 0
+                                    }
+                            ],
+                            "button":{
+                                    "command":"device_state_closed",
+                                    "title":"Device State Closed",
+                                    "icon_name":"smartphone"
+                            }
+                    },
+                    {
+                            "device_states": [
+                                    {
+                                            "lid_switch_open": true,
+                                            "hinge_angle_value": 90
+                                    }
+                            ],
+                            "button":{
+                                    "command":"device_state_half_opened",
+                                    "title":"Device State Half-Opened",
+                                    "icon_name":"laptop"
+                            }
+                    },
+                    {
+                            "device_states": [
+                                    {
+                                            "lid_switch_open": true,
+                                            "hinge_angle_value": 180
+                                    }
+                            ],
+                            "button":{
+                                    "command":"device_state_opened",
+                                    "title":"Device State Opened",
+                                    "icon_name":"tablet"
+                            }
+                    }
+            ]
+    },
+    "graphics":{
+            "displays":[
+                {
+                    "width": 1768,
+                    "height": 2208,
+                    "dpi": 374
+                },
+                {
+                    "width": 832,
+                    "height": 2268,
+                    "dpi": 387
+                }
+            ]
+    }
+}
+  )"""";
+
+Json::Value ExtractJsonTemplate(const Json::Value& instance,
+                                const char* template_string) {
+  std::string json_text(template_string);
+  Json::Value result;
+
+  Json::Reader reader;
+  reader.parse(json_text, result);
+  MergeTwoJsonObjs(result, instance);
+  return result;
+}
+
+Json::Value ExtractInstaneTemplate(const Json::Value& instance) {
+  std::string instance_template = instance["@import"].asString();
+  ConfigTemplate selected_template =
+      kSupportedTemplatesKeyMap.at(instance_template);
+
+  Json::Value result;
+
+  switch (selected_template) {
+    case ConfigTemplate::PHONE:
+      // Extract phone instance configs from input template
+      result = ExtractJsonTemplate(instance, kPhoneInstanceTemplate);
+      break;
+    case ConfigTemplate::TABLET:
+      // Extract tablet instance configs from input template
+      result = ExtractJsonTemplate(instance, kTabletInstanceTemplate);
+      break;
+    case ConfigTemplate::TV:
+      // Extract tv instance configs from input template
+      result = ExtractJsonTemplate(instance, kTvInstanceTemplate);
+      break;
+    case ConfigTemplate::WEARABLE:
+      // Extract wearable instance configs from input template
+      result = ExtractJsonTemplate(instance, kWearableInstanceTemplate);
+      break;
+    case ConfigTemplate::AUTO:
+      // Extract auto instance configs from input template
+      result = ExtractJsonTemplate(instance, kAutoInstanceTemplate);
+      break;
+    case ConfigTemplate::SLIM:
+      // Extract slim instance configs from input template
+      result = ExtractJsonTemplate(instance, kSlimInstanceTemplate);
+      break;
+    case ConfigTemplate::GO:
+      // Extract go instance configs from input template
+      result = ExtractJsonTemplate(instance, kGoInstanceTemplate);
+      break;
+    case ConfigTemplate::FOLDABLE:
+      // Extract foldable instance configs from input template
+      result = ExtractJsonTemplate(instance, kFoldableInstanceTemplate);
+      break;
+
+    default:
+      // handle unsupported @import flag values
+      result = instance;
+      break;
+  }
+
+  return result;
+}
+
+void ExtractLaunchTemplates(Json::Value& root) {
+  int num_instances = root.size();
+  for (unsigned int i = 0; i < num_instances; i++) {
+    // Validate @import flag values are supported or not
+    if (root[i].isMember("@import")) {
+      // Extract instance configs from input template and override current
+      // instance
+      root[i] = ExtractInstaneTemplate(root[i]);
+    }
+  }
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/launch_cvd_templates.h b/host/commands/cvd/parser/launch_cvd_templates.h
new file mode 100644
index 0000000..718667b
--- /dev/null
+++ b/host/commands/cvd/parser/launch_cvd_templates.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+
+namespace cuttlefish {
+
+void ExtractLaunchTemplates(Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/load_configs_parser.cpp b/host/commands/cvd/parser/load_configs_parser.cpp
new file mode 100644
index 0000000..c8d9ce3
--- /dev/null
+++ b/host/commands/cvd/parser/load_configs_parser.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/parser/load_configs_parser.h"
+
+#include <android-base/file.h>
+#include <gflags/gflags.h>
+
+#include <stdio.h>
+#include <fstream>
+#include <string>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/parser/cf_configs_instances.h"
+#include "host/commands/cvd/parser/cf_flags_validator.h"
+#include "host/commands/cvd/parser/fetch_cvd_parser.h"
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+
+namespace cuttlefish {
+
+Result<Json::Value> ParseJsonFile(const std::string& file_path) {
+  std::string file_content;
+  using android::base::ReadFileToString;
+  CF_EXPECT(ReadFileToString(file_path.c_str(), &file_content,
+                             /* follow_symlinks */ true));
+  auto root = CF_EXPECT(ParseJson(file_content), "Failed parsing JSON file");
+  return root;
+}
+
+Result<CvdFlags> ParseCvdConfigs(Json::Value& root) {
+  CvdFlags results;
+
+  CF_EXPECT(ValidateCfConfigs(root), "Loaded Json validation failed");
+
+  results.launch_cvd_flags = ParseLaunchCvdConfigs(root);
+
+  results.fetch_cvd_flags = ParseFetchCvdConfigs(root);
+
+  return results;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/parser/load_configs_parser.h b/host/commands/cvd/parser/load_configs_parser.h
new file mode 100644
index 0000000..280f7c5
--- /dev/null
+++ b/host/commands/cvd/parser/load_configs_parser.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <json/json.h>
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/parser/fetch_cvd_parser.h"
+
+namespace cuttlefish {
+
+typedef struct _CvdFlags {
+  std::vector<std::string> launch_cvd_flags;
+  FetchCvdConfigs fetch_cvd_flags;
+} CvdFlags;
+
+Result<Json::Value> ParseJsonFile(const std::string& file_path);
+
+Result<CvdFlags> ParseCvdConfigs(Json::Value& root);
+
+};  // namespace cuttlefish
diff --git a/host/commands/cvd/proto/Android.bp b/host/commands/cvd/proto/Android.bp
index 75de449..f8ba480 100644
--- a/host/commands/cvd/proto/Android.bp
+++ b/host/commands/cvd/proto/Android.bp
@@ -23,8 +23,22 @@
     host_supported: true,
     proto: {
         export_proto_headers: true,
-        type: "lite",
-        //include_dirs: ["external/protobuf/src"],
+        type: "full",
     },
-    srcs: ["cvd_server.proto"],
+    srcs: [
+        "cvd_server.proto",
+    ],
+}
+
+cc_library_static {
+    name: "libcuttlefish_acloud_proto",
+    host_supported: true,
+    proto: {
+        export_proto_headers: true,
+        type: "full",
+    },
+    srcs: [
+        "user_config.proto",
+        "internal_config.proto",
+    ],
 }
diff --git a/host/commands/cvd/proto/cvd_server.proto b/host/commands/cvd/proto/cvd_server.proto
index 903f03e..eddc2cf 100644
--- a/host/commands/cvd/proto/cvd_server.proto
+++ b/host/commands/cvd/proto/cvd_server.proto
@@ -74,6 +74,11 @@
   WAIT_BEHAVIOR_COMPLETE = 2;
 }
 
+// the arguments that are used by selector inside the server
+message SelectorOption {
+  repeated string args = 1;
+}
+
 message CommandRequest {
   // The args that should be executed, including the subcommand.
   repeated string args = 1;
@@ -81,5 +86,24 @@
   map<string, string> env = 2;
   string working_directory = 3;
   WaitBehavior wait_behavior = 4;
+  SelectorOption selector_opts = 5;
 }
-message CommandResponse {}
+
+/*
+ * The fields are required to be filled only for a successful "cvd start" cmd
+ */
+message InstanceGroupInfo {
+  string group_name = 1;
+  message PerInstanceInfo {
+    string name = 1;
+    uint32 instance_id = 2;
+  }
+  repeated PerInstanceInfo instances = 2;
+  repeated string home_directories = 3;
+}
+
+message CommandResponse {
+  oneof response_report {
+    InstanceGroupInfo instance_group_info = 1;
+  }
+}
diff --git a/host/commands/cvd/proto/internal_config.proto b/host/commands/cvd/proto/internal_config.proto
new file mode 100644
index 0000000..782e2ef
--- /dev/null
+++ b/host/commands/cvd/proto/internal_config.proto
@@ -0,0 +1,112 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package cuttlefish.acloud;
+
+// Default values to use when user did not provide a value.
+// Each field should match a field in user_config.UserConfig.
+message DefaultUserConfig {
+  optional string machine_type = 1;
+  optional string network = 2;
+  // Default extra data disk size.
+  optional int32 extra_data_disk_size_gb = 3;
+  // Metadata for creating Compute Engine instance
+  // The map will be updated with values from user config.
+  map<string, string> metadata_variable = 4;
+  // [CVD only] The name of the stable host image
+  optional string stable_host_image_name = 5;
+  // [CVD only] The project where stable host image is
+  optional string stable_host_image_project = 6;
+  // [GOLDFISH only] The name of the stable host image
+  optional string stable_goldfish_host_image_name = 7;
+  // [GOLDFISH only] The project where stable host image is
+  optional string stable_goldfish_host_image_project = 8;
+  // [CHEEPS only] The name of the stable host image
+  optional string stable_cheeps_host_image_name = 9;
+  // [CHEEPS only] The project where stable host image is
+  optional string stable_cheeps_host_image_project = 10;
+  // The pattern of the instance name, e.g. ins-{uuid}-{build_id}-{build_target}
+  // the parts in {} will be automatically replaced with the actual value if
+  // you specify them in the pattern, uuid will be automatically generated.
+  optional string instance_name_pattern = 11;
+  // [CVD only] Version of fetch_cvd to use.
+  optional string fetch_cvd_version = 12;
+}
+
+// Internal configuration
+// TODO: Currently we specify resolutions and orientations
+// for all device types in the same config. And all branches are
+// using the same config at data/default.config. However,
+// each branch only supports a subset of devices. So ideally,
+// we should have one config per branch, and only specify supported
+// devices for that branch in the config.
+message InternalConfig {
+  optional DefaultUserConfig default_usr_cfg = 1;
+  // Device resolution
+  map<string, string> device_resolution_map = 2;
+  // Device default orientation
+  map<string, string> device_default_orientation_map = 3;
+  // Minimum gce instance size, e.g. n1-standard-1
+  optional string min_machine_size = 4;
+  // The name of the default disk image, e.g. avd-system.tar.gz
+  optional string disk_image_name = 5;
+  // The mime type of the disk image, e.g. 'application/x-tar'
+  optional string disk_image_mime_type = 6;
+  // The file extension of disk image, e.g. ".tar.gz"
+  optional string disk_image_extension = 7;
+  // The name of the raw image name that should apper in the tar gz file.
+  // e.g. "disk.raw"
+  optional string disk_raw_image_name = 8;
+  // The file extension of a raw linux image file, e.g. "img"
+  // If file is of this type, it will be compressed to avd-system.tar.gz
+  optional string disk_raw_image_extension = 9;
+  // Default data disk device to use when extra_data_disk_size_gb
+  // is greater than 0
+  optional string default_extra_data_disk_device = 10;
+  // A map from size_gb to the name of a precreated_data_image
+  map<int32, string> precreated_data_image = 11;
+  // Branches and corresponding minimum build_ids for which
+  // this config is valid for.
+  map<string, int32> valid_branch_and_min_build_id = 12;
+
+  // Path of a file where Oauth2 credential data will be cached.
+  // For example, ".acloud_oauth2.dat". This file will be created under
+  // the home directory if the user is authenticated via Oauth2 method.
+  // The file name by convention usually starts with a dot noting it is
+  // a hidden file.
+  optional string creds_cache_file = 13;
+  // user_agent is a string noting which software it is.
+  // It is used during the Oauth2 authentication flow. It is okay to
+  // make up a value, e.g. "acloud".
+  optional string user_agent = 14;
+
+  // Error messages to be displayed to user when the user
+  // does not have access to the cloud project.
+  // Key is the name of the project.
+  // Value is the error message to show.
+  map<string, string> no_project_access_msg_map = 15;
+
+  // [CVD only] The kernel build target: "kernel". This is unlikely to change.
+  optional string kernel_build_target = 16;
+
+  // [GOLDFISH only] The emulator build target:
+  // "emulator-linux_x64_nolocationui". It's very unlikely that this will ever
+  // change.
+  optional string emulator_build_target = 17;
+
+  // Common hw property
+  map<string, string> common_hw_property_map = 18;
+}
diff --git a/host/commands/cvd/proto/user_config.proto b/host/commands/cvd/proto/user_config.proto
new file mode 100644
index 0000000..1fb815d
--- /dev/null
+++ b/host/commands/cvd/proto/user_config.proto
@@ -0,0 +1,133 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package cuttlefish.acloud;
+
+// Hold configurations from user.
+message UserConfig {
+  // Account information for accessing Cloud API
+  optional string service_account_name = 1;
+  optional string service_account_private_key_path = 2;
+
+  // Compute Engine project name
+  optional string project = 3;
+  // Compute Engine zone name, e.g. "us-central1-f"
+  optional string zone = 4;
+  optional string machine_type = 5;
+  // Compute Engine network name, e.g. "default"
+  optional string network = 6;
+
+  // SSH key configuration
+  optional string ssh_private_key_path = 7;
+  optional string ssh_public_key_path = 8;
+
+  // Storage configuration
+  optional string storage_bucket_name = 9;
+
+  // Desired orientation, e.g. 'portrait' or 'landscape'
+  optional string orientation = 10;
+  // Desired resolution
+  optional string resolution = 11;
+  // Size of extra data disk.
+  optional int32 extra_data_disk_size_gb = 12;
+  // Metadata for creating Compute Engine instance
+  map<string, string> metadata_variable = 13;
+
+  // client_id and client secret are required when user authenticates via
+  // Oauth2 flow with their user account (not service account).
+  //   * They are created in the cloud project console -> API manager.
+  //   * They are used to authorize the app to talk to the cloud project
+  //     on behalf of the user.
+  //   * They by themselves do not authenticate the user.
+  //   * They are stored as plain text in the configuration file so they are
+  //     not that secret. Generally, we should not share it with people we
+  //     don't trust.
+  //   * All users talking to the same cloud project can share the same
+  //     client_id and client_secret.
+  optional string client_id = 14;
+  optional string client_secret = 15;
+
+  // [CVD only] The name of the stable host image released by Cloud Android team
+  optional string stable_host_image_name = 16;
+  // [CVD only] The name of the host image family released by Cloud Android team
+  optional string stable_host_image_family = 17;
+  // [CVD only] The project that the stable host image is released to
+  optional string stable_host_image_project = 18;
+
+  // [GOLDFISH only] The name of the stable host image released by Android
+  // Emulator (emu-dev) team
+  optional string stable_goldfish_host_image_name = 19;
+  // [GOLDFISH only] The project that the stable goldfish host image is
+  // released to (emu-dev-cts)
+
+  optional string stable_goldfish_host_image_project = 20;
+
+  // Account information for accessing Cloud API
+  // This is the new way to provide service account auth.
+  optional string service_account_json_private_key_path = 21;
+
+  // Desired hw_property
+  optional string hw_property = 22;
+
+  // [CHEEPS only] The name of the stable host image released by the ARC
+  // (arc-eng) team
+  optional string stable_cheeps_host_image_name = 23;
+  // [CHEEPS only] The project that the stable host image is released to
+  optional string stable_cheeps_host_image_project = 24;
+
+  // [CVD only] It will get passed into the launch_cvd command if not empty.
+  // In version 0.7.2 and later.
+  optional string launch_args = 25;
+
+  // The pattern of the instance name, e.g. ins-{uuid}-{build_id}-{build_target}
+  // the parts in {} will be automatically replaced with the actual value if
+  // you specify them in the pattern, uuid will be automatically generated.
+  optional string instance_name_pattern = 26;
+
+  // List of scopes that will be given to the instance
+  // https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances#changeserviceaccountandscopes
+  repeated string extra_scopes = 27;
+
+  // Provide some additional parameters to build the ssh tunnel.
+  optional string extra_args_ssh_tunnel = 28;
+
+  // [CVD only] Version of fetch_cvd to use.
+  optional string fetch_cvd_version = 29;
+
+  // [CVD only] Enable multi stage function.
+  optional bool enable_multi_stage = 30;
+
+  // [CHEEPS only] The name of the L1 betty image (used with Cheeps controller)
+  optional string betty_image = 31;
+
+  // [Oxygen only] The OAuth Credentials of API key.
+  optional string api_key = 32;
+
+  // [Oxygen only] The API service url.
+  optional string api_url = 33;
+
+  // [Oxygen only] The client to call oxygen api.
+  optional string oxygen_client = 34;
+
+  // [Oxygen only] The args append to lease command.
+  optional string oxygen_lease_args = 35;
+
+  // Storage options of created GCP instance, e.g. pd-standard, pd-ssd.
+  optional string disk_type = 36;
+
+  // [CVD only] Ssh connect with hostname.
+  optional bool connect_hostname = 37;
+}
diff --git a/host/commands/cvd/reset_client_utils.cpp b/host/commands/cvd/reset_client_utils.cpp
new file mode 100644
index 0000000..5d82942
--- /dev/null
+++ b/host/commands/cvd/reset_client_utils.cpp
@@ -0,0 +1,415 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/reset_client_utils.h"
+
+#include <signal.h>
+
+#include <algorithm>
+#include <cctype>
+#include <iomanip>   // std::setw
+#include <iostream>  // std::endl
+#include <sstream>
+#include <unordered_set>
+
+#include <android-base/file.h>
+#include <android-base/parseint.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/proc_file_utils.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/reset_client_utils.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+static bool IsTrue(const std::string& value) {
+  std::unordered_set<std::string> true_strings = {"y", "yes", "true"};
+  std::string value_in_lower_case = value;
+  /*
+   * https://en.cppreference.com/w/cpp/string/byte/tolower
+   *
+   * char should be converted to unsigned char first.
+   */
+  std::transform(value_in_lower_case.begin(), value_in_lower_case.end(),
+                 value_in_lower_case.begin(),
+                 [](unsigned char c) { return std::tolower(c); });
+  return Contains(true_strings, value_in_lower_case);
+}
+
+Result<RunCvdProcessManager::RunCvdProcInfo>
+RunCvdProcessManager::AnalyzeRunCvdProcess(const pid_t pid) {
+  auto proc_info = CF_EXPECT(ExtractProcInfo(pid));
+  RunCvdProcInfo info;
+  info.pid_ = proc_info.pid_;
+  info.exec_path_ = proc_info.actual_exec_path_;
+  info.cmd_args_ = std::move(proc_info.args_);
+  info.envs_ = std::move(proc_info.envs_);
+  CF_EXPECT(Contains(info.envs_, "HOME"));
+  info.home_ = info.envs_.at("HOME");
+  if (Contains(info.envs_, kAndroidHostOut)) {
+    info.android_host_out_ = info.envs_[kAndroidHostOut];
+  } else {
+    if (Contains(info.envs_, kAndroidSoongHostOut)) {
+      info.android_host_out_ = info.envs_[kAndroidSoongHostOut];
+    }
+  }
+  CF_EXPECT(Contains(info.envs_, kCuttlefishInstanceEnvVarName));
+  int id;
+  CF_EXPECT(android::base::ParseInt(
+      info.envs_.at(kCuttlefishInstanceEnvVarName), &id));
+  info.id_ = static_cast<unsigned>(id);
+  if (!Contains(info.envs_, kAndroidHostOut) &&
+      !Contains(info.envs_, kAndroidSoongHostOut)) {
+    const std::string server_host_out =
+        android::base::Dirname(android::base::GetExecutableDirectory());
+    info.envs_[kAndroidHostOut] = server_host_out;
+    info.envs_[kAndroidSoongHostOut] = server_host_out;
+  }
+
+  if (Contains(info.envs_, kCvdMarkEnv) && IsTrue(info.envs_.at(kCvdMarkEnv))) {
+    info.is_cvd_server_started_ = true;
+  }
+
+  std::vector<std::string> stop_bins{"cvd_internal_stop", "stop_cvd"};
+
+  if (Contains(info.envs_, kAndroidHostOut)) {
+    for (const auto& bin : stop_bins) {
+      std::string internal_stop_path =
+          ConcatToString(info.envs_.at(kAndroidHostOut), "/bin/", bin);
+      if (!FileExists(internal_stop_path)) {
+        continue;
+      }
+      info.stop_cvd_path_ = std::move(internal_stop_path);
+      return info;
+    }
+  }
+
+  for (const auto& bin : stop_bins) {
+    std::string stop_cvd_path =
+        ConcatToString(info.envs_.at(kAndroidSoongHostOut), "/bin/", bin);
+    if (!FileExists(stop_cvd_path)) {
+      continue;
+    }
+    info.stop_cvd_path_ = std::move(stop_cvd_path);
+    return info;
+  }
+
+  return CF_ERR("cvd_internal_stop or stop_cvd cannot be found for "
+                << " pid #" << pid);
+}
+
+static Command CreateStopCvdCommand(const std::string& stopper_path,
+                                    const cvd_common::Envs& envs,
+                                    const cvd_common::Args& args) {
+  Command command(cpp_basename(stopper_path));
+  command.SetExecutable(stopper_path);
+  for (const auto& arg : args) {
+    command.AddParameter(arg);
+  }
+  for (const auto& [key, value] : envs) {
+    command.AddEnvironmentVariable(key, value);
+  }
+  return command;
+}
+
+Result<RunCvdProcessManager> RunCvdProcessManager::Get() {
+  RunCvdProcessManager run_cvd_processes_manager;
+  run_cvd_processes_manager.cf_groups_ =
+      CF_EXPECT(run_cvd_processes_manager.CollectInfo());
+  return run_cvd_processes_manager;
+}
+
+Result<std::vector<RunCvdProcessManager::GroupProcInfo>>
+RunCvdProcessManager::CollectInfo() {
+  auto run_cvd_pids = CF_EXPECT(CollectPidsByExecName("run_cvd"));
+  std::vector<RunCvdProcInfo> run_cvd_infos;
+  run_cvd_infos.reserve(run_cvd_pids.size());
+  for (const auto run_cvd_pid : run_cvd_pids) {
+    auto run_cvd_info_result = AnalyzeRunCvdProcess(run_cvd_pid);
+    if (!run_cvd_info_result.ok()) {
+      LOG(ERROR) << "Failed to collect information for run_cvd at #"
+                 << run_cvd_pid << std::endl
+                 << run_cvd_info_result.error().Trace();
+      continue;
+    }
+    run_cvd_infos.push_back(*run_cvd_info_result);
+  }
+
+  // home --> group map
+  std::unordered_map<std::string, GroupProcInfo> groups;
+  for (auto& run_cvd_info : run_cvd_infos) {
+    const auto home = run_cvd_info.home_;
+    if (!Contains(groups, home)) {
+      // create using a default constructor
+      groups[home] = GroupProcInfo();
+      groups[home].home_ = home;
+      groups[home].exec_path_ = run_cvd_info.exec_path_;
+      groups[home].stop_cvd_path_ = run_cvd_info.stop_cvd_path_;
+      groups[home].is_cvd_server_started_ = run_cvd_info.is_cvd_server_started_;
+      groups[home].android_host_out_ = run_cvd_info.android_host_out_;
+    }
+    auto& id_instance_map = groups[home].instances_;
+    if (!Contains(id_instance_map, run_cvd_info.id_)) {
+      id_instance_map[run_cvd_info.id_] = GroupProcInfo::InstanceInfo{
+          .pids_ = std::set<pid_t>{run_cvd_info.pid_},
+          .envs_ = std::move(run_cvd_info.envs_),
+          .cmd_args_ = std::move(run_cvd_info.cmd_args_),
+          .id_ = run_cvd_info.id_};
+      continue;
+    }
+    // this is the other run_cvd process under the same instance i
+    id_instance_map[run_cvd_info.id_].pids_.insert(run_cvd_info.pid_);
+  }
+  std::vector<GroupProcInfo> output;
+  output.reserve(groups.size());
+  for (auto& [_, group] : groups) {
+    output.push_back(std::move(group));
+  }
+  return output;
+}
+
+Result<void> RunCvdProcessManager::RunStopCvd(const GroupProcInfo& group_info,
+                                              const bool clear_runtime_dirs) {
+  const auto& stopper_path = group_info.stop_cvd_path_;
+  int ret_code = 0;
+  cvd_common::Envs stop_cvd_envs;
+  stop_cvd_envs["HOME"] = group_info.home_;
+  if (group_info.android_host_out_) {
+    stop_cvd_envs[kAndroidHostOut] = group_info.android_host_out_.value();
+    stop_cvd_envs[kAndroidSoongHostOut] = group_info.android_host_out_.value();
+  } else {
+    auto android_host_out = StringFromEnv(
+        kAndroidHostOut,
+        android::base::Dirname(android::base::GetExecutableDirectory()));
+    stop_cvd_envs[kAndroidHostOut] = android_host_out;
+    stop_cvd_envs[kAndroidSoongHostOut] = android_host_out;
+  }
+
+  if (clear_runtime_dirs) {
+    Command first_stop_cvd = CreateStopCvdCommand(
+        stopper_path, stop_cvd_envs, {"--clear_instance_dirs=true"});
+    LOG(ERROR) << "Running HOME=" << stop_cvd_envs.at("HOME") << " "
+               << stopper_path << " --clear_instance_dirs";
+    std::string stdout_str;
+    std::string stderr_str;
+    ret_code = RunWithManagedStdio(std::move(first_stop_cvd), nullptr,
+                                   std::addressof(stdout_str),
+                                   std::addressof(stderr_str));
+    // TODO(kwstephenkim): deletes manually if `stop_cvd --clear_instance_dirs`
+    // failed.
+  }
+  if (!clear_runtime_dirs || ret_code != 0) {
+    if (clear_runtime_dirs) {
+      LOG(ERROR) << "Failed to run " << stopper_path
+                 << " --clear_runtime_dirs=true";
+      LOG(ERROR) << "Perhaps --clear_instance_dirs is not taken.";
+      LOG(ERROR) << "Trying again without it";
+    }
+    Command second_stop_cvd =
+        CreateStopCvdCommand(stopper_path, stop_cvd_envs, {});
+    LOG(ERROR) << "Running HOME=" << stop_cvd_envs.at("HOME") << " "
+               << stopper_path;
+    std::string stdout_str;
+    std::string stderr_str;
+    ret_code = RunWithManagedStdio(std::move(second_stop_cvd), nullptr,
+                                   std::addressof(stdout_str),
+                                   std::addressof(stderr_str));
+  }
+  if (ret_code != 0) {
+    std::stringstream error;
+    error << "HOME=" << group_info.home_
+          << group_info.stop_cvd_path_ + " Failed.";
+    return CF_ERR(error.str());
+  }
+  LOG(ERROR) << "\"" << stopper_path << " successfully "
+             << "\" stopped instances at HOME=" << group_info.home_;
+  return {};
+}
+
+Result<void> RunCvdProcessManager::RunStopCvdAll(
+    const bool cvd_server_children_only, const bool clear_instance_dirs) {
+  for (const auto& group_info : cf_groups_) {
+    if (cvd_server_children_only && !group_info.is_cvd_server_started_) {
+      continue;
+    }
+    auto stop_cvd_result = RunStopCvd(group_info, clear_instance_dirs);
+    if (!stop_cvd_result.ok()) {
+      LOG(ERROR) << stop_cvd_result.error().Trace();
+      continue;
+    }
+  }
+  return {};
+}
+
+static bool IsStillRunCvd(const pid_t pid) {
+  std::string pid_dir = ConcatToString("/proc/", pid);
+  if (!FileExists(pid_dir)) {
+    return false;
+  }
+  auto owner_result = OwnerUid(pid_dir);
+  if (!owner_result.ok() || (getuid() != *owner_result)) {
+    return false;
+  }
+  auto extract_proc_info_result = ExtractProcInfo(pid);
+  if (!extract_proc_info_result.ok()) {
+    return false;
+  }
+  return (cpp_basename(extract_proc_info_result->actual_exec_path_) ==
+          "run_cvd");
+}
+
+Result<void> RunCvdProcessManager::SendSignals(
+    const bool cvd_server_children_only) {
+  auto recollected_run_cvd_pids = CF_EXPECT(CollectPidsByExecName("run_cvd"));
+  std::unordered_set<pid_t> failed_pids;
+  for (const auto& group_info : cf_groups_) {
+    if (cvd_server_children_only && !group_info.is_cvd_server_started_) {
+      continue;
+    }
+    for (const auto& [_, instance] : group_info.instances_) {
+      const auto& pids = instance.pids_;
+      for (const auto pid : pids) {
+        if (!Contains(recollected_run_cvd_pids, pid)) {
+          // pid is alive but reassigned to non-run_cvd process
+          continue;
+        }
+        if (!IsStillRunCvd(pid)) {
+          // pid is now assigned to a different process
+          continue;
+        }
+        auto ret_sigkill = kill(pid, SIGKILL);
+        if (ret_sigkill == 0) {
+          LOG(ERROR) << "SIGKILL was delivered to pid #" << pid;
+        } else {
+          LOG(ERROR) << "SIGKILL was not delivered to pid #" << pid;
+        }
+        if (!IsStillRunCvd(pid)) {
+          continue;
+        }
+        LOG(ERROR) << "Will still send SIGHUP as run_cvd #" << pid
+                   << " has not been terminated by SIGKILL.";
+        auto ret_sighup = kill(pid, SIGHUP);
+        if (ret_sighup != 0) {
+          LOG(ERROR) << "SIGHUP sent to process #" << pid << " but all failed.";
+        }
+        if (ret_sigkill != 0 && ret_sighup != 0) {
+          failed_pids.insert(pid);
+        }
+      }
+    }
+  }
+  std::stringstream error_msg_stream;
+  error_msg_stream << "Some run_cvd processes were not killed: {";
+  for (const auto& pid : failed_pids) {
+    error_msg_stream << pid << ",";
+  }
+  auto error_msg = error_msg_stream.str();
+  if (!failed_pids.empty()) {
+    error_msg.pop_back();
+  }
+  error_msg.append("}");
+  CF_EXPECT(failed_pids.empty(), error_msg);
+  return {};
+}
+
+void RunCvdProcessManager::DeleteLockFiles(
+    const bool cvd_server_children_only) {
+  for (const auto& group_info : cf_groups_) {
+    if (cvd_server_children_only && !group_info.is_cvd_server_started_) {
+      continue;
+    }
+    const auto& instances = group_info.instances_;
+    std::string lock_file_prefix = "/tmp/acloud_cvd_temp/local-instance-";
+    for (const auto& [id, _] : instances) {
+      std::stringstream lock_file_path_stream;
+      lock_file_path_stream << lock_file_prefix << id << ".lock";
+      auto lock_file_path = lock_file_path_stream.str();
+      if (FileExists(lock_file_path) && !DirectoryExists(lock_file_path)) {
+        if (RemoveFile(lock_file_path)) {
+          LOG(ERROR) << "Reset the lock file: " << lock_file_path;
+        } else {
+          LOG(ERROR) << "Failed to reset lock file: " << lock_file_path;
+        }
+      }
+    }
+  }
+}
+
+Result<void> KillAllCuttlefishInstances(const DeviceClearOptions& options) {
+  RunCvdProcessManager manager = CF_EXPECT(RunCvdProcessManager::Get());
+  CF_EXPECT(manager.KillAllCuttlefishInstances(options.cvd_server_children_only,
+                                               options.clear_instance_dirs));
+  return {};
+}
+
+Result<void> KillCvdServerProcess() {
+  std::vector<pid_t> self_exe_pids =
+      CF_EXPECT(CollectPidsByArgv0(kServerExecPath));
+  if (self_exe_pids.empty()) {
+    LOG(ERROR) << "cvd server is not running.";
+    return {};
+  }
+  std::vector<pid_t> cvd_server_pids;
+  /**
+   * Finds processes whose executable path is kServerExecPath, and
+   * that is owned by getuid(), and that has the "INTERNAL_server_fd"
+   * in the arguments list.
+   */
+  for (const auto pid : self_exe_pids) {
+    auto proc_info_result = ExtractProcInfo(pid);
+    if (!proc_info_result.ok()) {
+      LOG(ERROR) << "Failed to extract process info for pid " << pid;
+      continue;
+    }
+    auto owner_uid_result = OwnerUid(pid);
+    if (!owner_uid_result.ok()) {
+      LOG(ERROR) << "Failed to find the uid for pid " << pid;
+      continue;
+    }
+    if (getuid() != *owner_uid_result) {
+      continue;
+    }
+    for (const auto& arg : proc_info_result->args_) {
+      if (Contains(arg, "INTERNAL_server_fd")) {
+        cvd_server_pids.push_back(pid);
+        break;
+      }
+    }
+  }
+  if (cvd_server_pids.empty()) {
+    LOG(ERROR)
+        << "Cvd server process is not found. Perhaps, it is not running.";
+    return {};
+  }
+  if (cvd_server_pids.size() > 1) {
+    LOG(ERROR) << "There are " << cvd_server_pids.size() << " server processes "
+               << "running while it should be up to 1.";
+  }
+  for (const auto pid : cvd_server_pids) {
+    auto kill_ret = kill(pid, SIGKILL);
+    if (kill_ret != 0) {
+      LOG(ERROR) << "kill(" << pid << ", SIGKILL) failed.";
+    } else {
+      LOG(ERROR) << "Cvd server process #" << pid << " is killed.";
+    }
+  }
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/reset_client_utils.h b/host/commands/cvd/reset_client_utils.h
new file mode 100644
index 0000000..fe8c724
--- /dev/null
+++ b/host/commands/cvd/reset_client_utils.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <memory>
+#include <mutex>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class RunCvdProcessManager {
+  struct RunCvdProcInfo {
+    pid_t pid_;
+    std::string home_;
+    std::string exec_path_;
+    cvd_common::Envs envs_;
+    cvd_common::Args cmd_args_;
+    std::string stop_cvd_path_;
+    bool is_cvd_server_started_;
+    std::optional<std::string> android_host_out_;
+    unsigned id_;
+  };
+
+  struct GroupProcInfo {
+    std::string home_;
+    std::string exec_path_;
+    std::string stop_cvd_path_;
+    bool is_cvd_server_started_;
+    std::optional<std::string> android_host_out_;
+    struct InstanceInfo {
+      std::set<pid_t> pids_;
+      cvd_common::Envs envs_;
+      cvd_common::Args cmd_args_;
+      unsigned id_;
+    };
+    // instance id to instance info mapping
+    std::unordered_map<unsigned, InstanceInfo> instances_;
+  };
+
+ public:
+  static Result<RunCvdProcessManager> Get();
+  RunCvdProcessManager(const RunCvdProcessManager&) = delete;
+  RunCvdProcessManager(RunCvdProcessManager&&) = default;
+  Result<void> KillAllCuttlefishInstances(const bool cvd_server_children_only,
+                                          const bool clear_runtime_dirs) {
+    auto stop_cvd_result =
+        RunStopCvdAll(cvd_server_children_only, clear_runtime_dirs);
+    if (!stop_cvd_result.ok()) {
+      LOG(ERROR) << stop_cvd_result.error().Message();
+    }
+    auto send_signals_result = SendSignals(cvd_server_children_only);
+    if (!send_signals_result.ok()) {
+      LOG(ERROR) << send_signals_result.error().Message();
+    }
+    DeleteLockFiles(cvd_server_children_only);
+    cf_groups_.clear();
+    auto recollect_info_result = CollectInfo();
+    if (!recollect_info_result.ok()) {
+      LOG(ERROR) << "Recollecting run_cvd processes information failed.";
+      LOG(ERROR) << recollect_info_result.error().Trace();
+    }
+    return {};
+  }
+
+ private:
+  RunCvdProcessManager() = default;
+  static Result<void> RunStopCvd(const GroupProcInfo& run_cvd_info,
+                                 const bool clear_runtime_dirs);
+  Result<void> RunStopCvdAll(const bool cvd_server_children_only,
+                             const bool clear_runtime_dirs);
+  Result<void> SendSignals(const bool cvd_server_children_only);
+  Result<RunCvdProcInfo> AnalyzeRunCvdProcess(const pid_t pid);
+  void DeleteLockFiles(const bool cvd_server_children_only);
+  Result<std::vector<GroupProcInfo>> CollectInfo();
+  std::vector<GroupProcInfo> cf_groups_;
+};
+
+struct DeviceClearOptions {
+  bool cvd_server_children_only;
+  bool clear_instance_dirs;
+};
+
+/*
+ * Runs stop_cvd for all cuttlefish instances found based on run_cvd processes,
+ * and send SIGKILL to the run_cvd processes.
+ *
+ * If cvd_server_children_only is set, it kills the run_cvd processes that were
+ * started by a cvd server process.
+ */
+Result<void> KillAllCuttlefishInstances(const DeviceClearOptions& options);
+
+Result<void> KillCvdServerProcess();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/scope_guard.cpp b/host/commands/cvd/scope_guard.cpp
deleted file mode 100644
index a0e8bbb..0000000
--- a/host/commands/cvd/scope_guard.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "host/commands/cvd/scope_guard.h"
-
-#include <functional>
-
-namespace cuttlefish {
-
-ScopeGuard::ScopeGuard() = default;
-
-ScopeGuard::ScopeGuard(std::function<void()> fn) : fn_(fn) {}
-
-ScopeGuard::ScopeGuard(ScopeGuard&&) = default;
-
-ScopeGuard& ScopeGuard::operator=(ScopeGuard&&) = default;
-
-ScopeGuard::~ScopeGuard() {
-  if (fn_) {
-    fn_();
-  }
-}
-
-void ScopeGuard::Cancel() { fn_ = nullptr; }
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/scope_guard.h b/host/commands/cvd/scope_guard.h
deleted file mode 100644
index 596bc66..0000000
--- a/host/commands/cvd/scope_guard.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#pragma once
-
-#include <functional>
-
-namespace cuttlefish {
-
-class ScopeGuard {
- public:
-  ScopeGuard();
-  explicit ScopeGuard(std::function<void()> fn);
-  ScopeGuard(ScopeGuard&&);
-  ~ScopeGuard();
-  ScopeGuard& operator=(ScopeGuard&&);
-
-  void Cancel();
-
- private:
-  std::function<void()> fn_;
-};
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/Android.bp b/host/commands/cvd/selector/Android.bp
new file mode 100644
index 0000000..dae6cf7
--- /dev/null
+++ b/host/commands/cvd/selector/Android.bp
@@ -0,0 +1,41 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libcvd_selector",
+    srcs: [
+        "arguments_lexer.cpp",
+        "arguments_separator.cpp",
+        "creation_analyzer.cpp",
+        "device_selector_utils.cpp",
+        "group_selector.cpp",
+        "instance_database.cpp",
+        "instance_database_impl.cpp",
+        "instance_database_types.cpp",
+        "instance_database_utils.cpp",
+        "instance_group_record.cpp",
+        "instance_record.cpp",
+        "instance_selector.cpp",
+        "selector_common_parser.cpp",
+        "selector_constants.cpp",
+        "selector_option_parser_utils.cpp",
+        "start_selector_parser.cpp",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
diff --git a/host/commands/cvd/selector/arguments_lexer.cpp b/host/commands/cvd/selector/arguments_lexer.cpp
new file mode 100644
index 0000000..e047e48
--- /dev/null
+++ b/host/commands/cvd/selector/arguments_lexer.cpp
@@ -0,0 +1,228 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/arguments_lexer.h"
+
+#include <algorithm>
+#include <regex>
+#include <vector>
+
+#include <android-base/strings.h>
+
+#include "host/commands/cvd/selector/instance_database_utils.h"
+
+namespace cuttlefish {
+namespace selector {
+namespace {
+
+template <typename... Sets>
+bool Included(const std::string& item, Sets&&... containers) {
+  return ((Contains(std::forward<Sets>(containers), item)) || ... || false);
+}
+
+}  // namespace
+
+/*
+ * Eventually, we get two sets, each include strings start with "-" or "--".
+ *
+ * Say, the two sets are BaseSet and NoPrependedSet.
+ *
+ * Given a boolean flag --foo, these will happen:
+ *   BaseSet = BaseSet U {"--foo", "-foo"}
+ *   NoPrependedSet = NoPrependedSet U  {"--nofoo", "-nofoo"}
+ * Given a non boolean flag --bar, these will happen:
+ *   BaseSet = BaseSet U {"--bar", "-bar"}
+ *
+ * Later on, when the parser reads a token, the parser will look up the
+ * two sets to see if the token that is supposedly a flag is a known
+ * flag.
+ */
+Result<ArgumentsLexerBuilder::FlagPatterns>
+ArgumentsLexerBuilder::GenerateFlagPatterns(
+    const LexerFlagsSpecification& known_flags) {
+  FlagPatterns flag_patterns;
+  for (const auto& non_bool_flag : known_flags.known_value_flags) {
+    const auto one_dash = "-" + non_bool_flag;
+    const auto two_dashes = "--" + non_bool_flag;
+    CF_EXPECT(!ArgumentsLexer::Registered(one_dash, flag_patterns));
+    CF_EXPECT(!ArgumentsLexer::Registered(two_dashes, flag_patterns));
+    flag_patterns.value_patterns.insert(one_dash);
+    flag_patterns.value_patterns.insert(two_dashes);
+  }
+  for (const auto& bool_flag : known_flags.known_boolean_flags) {
+    const auto one_dash = "-" + bool_flag;
+    const auto two_dashes = "--" + bool_flag;
+    const auto one_dash_with_no = "-no" + bool_flag;
+    const auto two_dashes_with_no = "--no" + bool_flag;
+    CF_EXPECT(!ArgumentsLexer::Registered(one_dash, flag_patterns));
+    CF_EXPECT(!ArgumentsLexer::Registered(two_dashes, flag_patterns));
+    CF_EXPECT(!ArgumentsLexer::Registered(one_dash_with_no, flag_patterns));
+    CF_EXPECT(!ArgumentsLexer::Registered(two_dashes_with_no, flag_patterns));
+    flag_patterns.bool_patterns.insert(one_dash);
+    flag_patterns.bool_patterns.insert(two_dashes);
+    flag_patterns.bool_no_patterns.insert(one_dash_with_no);
+    flag_patterns.bool_no_patterns.insert(two_dashes_with_no);
+  }
+  return flag_patterns;
+}
+
+Result<std::unique_ptr<ArgumentsLexer>> ArgumentsLexerBuilder::Build(
+    const LexerFlagsSpecification& known_flags) {
+  auto flag_patterns = CF_EXPECT(GenerateFlagPatterns(known_flags));
+  ArgumentsLexer* new_lexer = new ArgumentsLexer(std::move(flag_patterns));
+  CF_EXPECT(new_lexer != nullptr,
+            "Memory allocation for ArgumentsLexer failed.");
+  return std::unique_ptr<ArgumentsLexer>{new_lexer};
+}
+
+ArgumentsLexer::ArgumentsLexer(FlagPatterns&& flag_patterns)
+    : flag_patterns_{std::move(flag_patterns)} {
+  valid_bool_values_in_lower_cases_ = std::move(
+      std::unordered_set<std::string>{"true", "false", "yes", "no", "y", "n"});
+}
+
+bool ArgumentsLexer::Registered(const std::string& flag_string,
+                                const FlagPatterns& flag_patterns) {
+  return Included(flag_string, flag_patterns.value_patterns,
+                  flag_patterns.bool_patterns, flag_patterns.bool_no_patterns);
+}
+
+Result<ArgToken> ArgumentsLexer::Process(const std::string& token) const {
+  if (token == "--") {
+    return ArgToken{ArgType::kDoubleDash, token};
+  }
+  std::regex flag_and_value_pattern("[\\-][\\-]?[^\\-]+.*=.*");
+  std::regex flag_pattern("[\\-][\\-]?[^\\-]+.*");
+  std::regex base_pattern("[^\\-]+.*");
+  if (std::regex_match(token, base_pattern)) {
+    return ArgToken{ArgType::kPositional, token};
+  }
+  if (!std::regex_match(token, flag_pattern)) {
+    return ArgToken{ArgType::kError, token};
+  }
+  // --flag=value
+  if (std::regex_match(token, flag_and_value_pattern)) {
+    auto [flag_string, value] = CF_EXPECT(Separate(token));
+    // is --flag registered?
+    if (Contains(flag_patterns_.value_patterns, flag_string)) {
+      return ArgToken{ArgType::kKnownFlagAndValue, token};
+    }
+    return ArgToken{ArgType::kUnknownFlag, token};
+  }
+  if (Contains(flag_patterns_.value_patterns, token)) {
+    return ArgToken{ArgType::kKnownValueFlag, token};
+  }
+  if (Contains(flag_patterns_.bool_patterns, token)) {
+    return ArgToken{ArgType::kKnownBoolFlag, token};
+  }
+  if (Contains(flag_patterns_.bool_no_patterns, token)) {
+    return ArgToken{ArgType::kKnownBoolNoFlag, token};
+  }
+  return ArgToken{ArgType::kUnknownFlag, token};
+}
+
+Result<std::vector<ArgToken>> ArgumentsLexer::Tokenize(
+    const CvdProtobufArg& args) {
+  std::vector<std::string> args_vec;
+  args_vec.reserve(args.size());
+  for (const auto& arg : args) {
+    args_vec.emplace_back(arg);
+  }
+  auto arg_tokens = CF_EXPECT(Tokenize(args_vec));
+  return arg_tokens;
+}
+
+Result<std::vector<ArgToken>> ArgumentsLexer::Tokenize(
+    const std::string& args, const std::string delim) {
+  auto args_vec = android::base::Tokenize(args, delim);
+  auto arg_tokens = CF_EXPECT(Tokenize(args_vec));
+  return arg_tokens;
+}
+
+Result<std::vector<ArgToken>> ArgumentsLexer::Tokenize(
+    const std::vector<std::string>& args) {
+  std::vector<ArgToken> tokenized;
+  auto intersection =
+      Intersection(flag_patterns_.value_patterns, flag_patterns_.bool_patterns);
+  CF_EXPECT(intersection.empty());
+  auto preprocessed_args = CF_EXPECT(Preprocess(args));
+  for (const auto& arg : preprocessed_args) {
+    auto arg_token = CF_EXPECT(Process(arg));
+    tokenized.emplace_back(arg_token);
+  }
+  return tokenized;
+}
+
+static std::string ToLower(const std::string& src) {
+  std::string lower_cased_value;
+  lower_cased_value.resize(src.size());
+  std::transform(src.begin(), src.end(), lower_cased_value.begin(), ::tolower);
+  return lower_cased_value;
+}
+
+Result<ArgumentsLexer::FlagValuePair> ArgumentsLexer::Separate(
+    const std::string& equal_included_string) const {
+  CF_EXPECT(Contains(equal_included_string, "="));
+  auto equal_sign_pos = equal_included_string.find_first_of('=');
+  auto first_token = equal_included_string.substr(0, equal_sign_pos);
+  auto second_token = equal_included_string.substr(equal_sign_pos + 1);
+  return FlagValuePair{.flag_string = first_token, .value = second_token};
+}
+
+Result<std::vector<std::string>> ArgumentsLexer::Preprocess(
+    const std::vector<std::string>& args) {
+  std::vector<std::string> new_args;
+  std::regex pattern("[\\-][\\-]?[^\\-]+.*=.*");
+  for (const auto& arg : args) {
+    if (!std::regex_match(arg, pattern)) {
+      new_args.emplace_back(arg);
+      continue;
+    }
+    // needs to split based on the first '='
+    // --something=another_thing or
+    //  -something=another_thing
+    const auto [flag_string, value] = CF_EXPECT(Separate(arg));
+
+    if (Contains(flag_patterns_.bool_patterns, flag_string)) {
+      const auto low_cased_value = ToLower(value);
+      CF_EXPECT(Contains(valid_bool_values_in_lower_cases_, low_cased_value),
+                "The value for the boolean flag " << flag_string << ", "
+                                                  << value << " is not valid");
+      if (low_cased_value == "true" || low_cased_value == "yes") {
+        new_args.emplace_back(flag_string);
+        continue;
+      }
+      auto base_pos = flag_string.find_first_not_of('-');
+      auto base = flag_string.substr(base_pos);
+      new_args.emplace_back("--no" + base);
+      continue;
+    }
+
+    if (Contains(flag_patterns_.bool_no_patterns, flag_string)) {
+      CF_EXPECT(android::base::StartsWith(flag_string, "-no") ||
+                android::base::StartsWith(flag_string, "--no"));
+      // if --nohelp=XYZ, the "=XYZ" is ignored.
+      new_args.emplace_back(flag_string);
+      continue;
+    }
+
+    new_args.emplace_back(arg);
+  }
+  return new_args;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/arguments_lexer.h b/host/commands/cvd/selector/arguments_lexer.h
new file mode 100644
index 0000000..ecf40c3
--- /dev/null
+++ b/host/commands/cvd/selector/arguments_lexer.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <string>
+#include <unordered_set>
+
+#include <cvd_server.pb.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/**
+ * A "token" is each piece of command line argument that is mostly
+ * separated by " ".
+ *
+ * Each token has a type. The type is a useful information for the
+ * grammar parser, which will use this lexer.
+ *
+ * Before going into the details, we assume that a set of flags are
+ * pre-registered, and the user may still give unregisterred flags.
+ *
+ * Note that the purpose of this lexer/parser is to separate cvd
+ * client specific arguments and the "subcmd" from the rest. So,
+ * "registered" arguments would be the cvd client specific arguments.
+ * The unregisterred arguments would be for the sub tool.
+ *
+ * Also, in terms of lexing, boolean flags are different from other
+ * value-taking flags. A boolean flag --foo could be --nofoo.
+ *
+ * 1. kKnownValueFlag
+ *    --foo, -foo that may take a non-boolean value
+ * 2. kKnownFlagAndValue
+ *    --foo=value, -foo=value, which does not take more values
+ * 3. kKnownBoolFlag
+ *    --daemon, -daemon, etc, which may take a boolean arg
+ * 4. kKnownBoolNoFlag
+ *    --nodaemon, -nodaemon, etc, which does not take another argument.
+ * 5. kUnknownFlag
+ *    -anything_else or --anything_else
+ *    --anything_else=any_value, etc
+ *    Note that if we don't know the type of the flag, we will have to forward
+ *    the entire thing to the subcmd as is.
+ * 6. kPositional
+ *    mostly without leading "-" or "--"
+ * 7. kDoubleDash
+ *    A literally "--"
+ *    cvd and its subtools as of not are not really using that.
+ *    However, it might be useful in the future for any subtool of cvd, so
+ *    we allow "--" in the subcmd arguments only in the parser level.
+ *    In the lexer level, we simply returns kDoubleDash token.
+ * 8. kError
+ *    The rest.
+ *
+ */
+enum class ArgType : int {
+  kKnownValueFlag,
+  kKnownFlagAndValue,
+  kKnownBoolFlag,
+  kKnownBoolNoFlag,
+  kUnknownFlag,
+  kPositional,
+  kDoubleDash,
+  kError
+};
+
+class ArgToken {
+ public:
+  ArgToken() = delete;
+  ArgToken(const ArgType arg_type, const std::string& token)
+      : type_(arg_type), token_(token) {}
+  ArgToken(const ArgToken& src) = default;
+  ArgToken(ArgToken&& src) = default;
+  ArgToken& operator=(const ArgToken& src) {
+    type_ = src.type_;
+    token_ = src.token_;
+    return *this;
+  }
+  ArgToken& operator=(ArgToken&& src) {
+    type_ = std::move(src.type_);
+    token_ = std::move(src.token_);
+    return *this;
+  }
+
+  auto Type() const { return type_; }
+  const auto& Token() const { return token_; }
+  auto& Token() { return token_; }
+  bool operator==(const ArgToken& dst) const {
+    return Type() == dst.Type() && Token() == dst.Token();
+  }
+
+ private:
+  ArgType type_;
+  std::string token_;
+};
+
+class ArgumentsLexer {
+  friend class ArgumentsLexerBuilder;
+  using CvdProtobufArg = google::protobuf::RepeatedPtrField<std::string>;
+
+ public:
+  Result<std::vector<ArgToken>> Tokenize(const std::vector<std::string>& args);
+  Result<std::vector<ArgToken>> Tokenize(const CvdProtobufArg& args);
+  Result<std::vector<ArgToken>> Tokenize(const std::string& args,
+                                         const std::string delim = " ");
+
+ private:
+  // Lexer factory function will internally generate this,
+  // and give it to ArgumentsLexer.
+  struct FlagPatterns {
+    /* represents flags that takes values
+     * e.g. -group_name, --group_name (which may take an additional
+     * positional arg, or use its default value.)
+     *
+     * With the given example, this set shall be:
+     *  {"-group_name", "--group_name"}
+     */
+    std::unordered_set<std::string> value_patterns;
+    /* boolean flags
+     * e.g. --daemon, --nodaemon
+     *
+     * With the given example, this set shall be:
+     *  {"-daemon", "--daemon"}
+     */
+    std::unordered_set<std::string> bool_patterns;
+    // e.g. {"-nodaemon", "--nodaemon"}
+    std::unordered_set<std::string> bool_no_patterns;
+  };
+  ArgumentsLexer(FlagPatterns&& flag_patterns);
+
+  // preprocess boolean flags:
+  //  e.g. --help=yes --> --help
+  //       --help=faLSe --> --nohelp
+  Result<std::vector<std::string>> Preprocess(
+      const std::vector<std::string>& args);
+  Result<ArgToken> Process(const std::string& token) const;
+
+  struct FlagValuePair {
+    std::string flag_string;
+    std::string value;
+  };
+  Result<FlagValuePair> Separate(
+      const std::string& equal_included_string) const;
+  // flag_string starts with "-" or "--"
+  static bool Registered(const std::string& flag_string,
+                         const FlagPatterns& flag_patterns);
+  bool Registered(const std::string& flag_string) const {
+    return Registered(flag_string, flag_patterns_);
+  }
+  std::unordered_set<std::string> valid_bool_values_in_lower_cases_;
+  FlagPatterns flag_patterns_;
+};
+
+// input to the lexer factory function
+struct LexerFlagsSpecification {
+  std::unordered_set<std::string> known_boolean_flags;
+  std::unordered_set<std::string> known_value_flags;
+};
+
+/*
+ * At the top level, there are only two tokens: flag and positional tokens.
+ *
+ * A flag token starts with "-" or "--" followed by one or more non "-" letters.
+ * A positional token starts with any character other than "-".
+ *
+ * Between flag tokens, there are "known" and "unknown" flag tokens.
+ *
+ */
+class ArgumentsLexerBuilder {
+  using FlagPatterns = ArgumentsLexer::FlagPatterns;
+
+ public:
+  static Result<std::unique_ptr<ArgumentsLexer>> Build(
+      const LexerFlagsSpecification& known_flags);
+
+ private:
+  static Result<FlagPatterns> GenerateFlagPatterns(
+      const LexerFlagsSpecification& known_flags);
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/arguments_separator.cpp b/host/commands/cvd/selector/arguments_separator.cpp
new file mode 100644
index 0000000..b1f72a1
--- /dev/null
+++ b/host/commands/cvd/selector/arguments_separator.cpp
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/arguments_separator.h"
+
+#include <deque>
+
+#include <android-base/strings.h>
+
+#include "common/libs/utils/contains.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<std::unique_ptr<ArgumentsSeparator>> ArgumentsSeparator::Parse(
+    const FlagsRegistration& flag_registration,
+    const std::vector<std::string>& input_args) {
+  LexerFlagsSpecification lexer_flag_spec{
+      .known_boolean_flags = flag_registration.known_boolean_flags,
+      .known_value_flags = flag_registration.known_value_flags,
+  };
+  auto lexer = CF_EXPECT(ArgumentsLexerBuilder::Build(lexer_flag_spec));
+  CF_EXPECT(lexer != nullptr);
+  ArgumentsSeparator* new_arg_separator =
+      new ArgumentsSeparator(std::move(lexer), input_args, flag_registration);
+  CF_EXPECT(new_arg_separator != nullptr,
+            "Memory allocation failed for ArgumentSeparator");
+  std::unique_ptr<ArgumentsSeparator> arg_separator{new_arg_separator};
+  CF_EXPECT(arg_separator->Parse());
+  return std::move(arg_separator);
+}
+
+Result<std::unique_ptr<ArgumentsSeparator>> ArgumentsSeparator::Parse(
+    const FlagsRegistration& flag_registration,
+    const CvdProtobufArg& input_args) {
+  std::vector<std::string> input_args_vec;
+  input_args_vec.reserve(input_args.size());
+  for (const auto& input_arg : input_args) {
+    input_args_vec.emplace_back(input_arg);
+  }
+  auto arg_separator = CF_EXPECT(Parse(flag_registration, input_args_vec));
+  return std::move(arg_separator);
+}
+
+Result<std::unique_ptr<ArgumentsSeparator>> ArgumentsSeparator::Parse(
+    const FlagsRegistration& flag_registration, const std::string& input_args,
+    const std::string delim) {
+  std::vector<std::string> input_args_vec =
+      android::base::Tokenize(input_args, delim);
+  auto arg_separator = CF_EXPECT(Parse(flag_registration, input_args_vec));
+  return std::move(arg_separator);
+}
+
+ArgumentsSeparator::ArgumentsSeparator(
+    std::unique_ptr<ArgumentsLexer>&& lexer,
+    const std::vector<std::string>& input_args,
+    const FlagsRegistration& flag_registration)
+    : lexer_(std::move(lexer)),
+      input_args_(input_args),
+      known_boolean_flags_(flag_registration.known_boolean_flags),
+      known_value_flags_(flag_registration.known_value_flags),
+      valid_subcmds_(flag_registration.valid_subcommands) {}
+
+Result<void> ArgumentsSeparator::Parse() {
+  auto output = CF_EXPECT(ParseInternal());
+  prog_path_ = std::move(output.prog_path);
+  cvd_args_ = std::move(output.cvd_args);
+  sub_cmd_ = std::move(output.sub_cmd);
+  sub_cmd_args_ = std::move(output.sub_cmd_args);
+  return {};
+}
+
+/*
+ * prog_name, <optional cvd flags>, sub_cmd, <optional sub_cmd flags>
+ *
+ * -- could be included, which makes things complicated. However, if -- is
+ * part of cvd flags, it's ill-formatted. If -- is among sub_cmd flags,
+ * we will just forward it.
+ *
+ * If something like this is really needed, use the suggested alternative:
+ *    original: cvd --some_flag -- --this-is-value start --subcmd_args
+ * alternative: cvd --some_flag="--this-is-value" start --subcmd_args
+ *
+ */
+Result<ArgumentsSeparator::Output> ArgumentsSeparator::ParseInternal() {
+  CF_EXPECT(lexer_ != nullptr);
+  CF_EXPECT(!input_args_.empty());
+  Output output;
+
+  auto tokenized = CF_EXPECT(lexer_->Tokenize(input_args_));
+  std::deque<ArgToken> tokens_queue{tokenized.begin(), tokenized.end()};
+
+  // take program path/name
+  CF_EXPECT(!tokens_queue.empty() &&
+            tokens_queue.front().Type() == ArgType::kPositional);
+  output.prog_path = std::move(tokens_queue.front().Token());
+  tokens_queue.pop_front();
+
+  // break loop either if there is no token or
+  // the subcommand token is consumed
+  bool cvd_flags_mode = true;
+  while (!tokens_queue.empty() && cvd_flags_mode) {
+    const auto current = std::move(tokens_queue.front());
+    const auto current_type = current.Type();
+    const auto& current_token = current.Token();
+    tokens_queue.pop_front();
+
+    // look up next if any
+    std::optional<ArgToken> next;
+    if (!tokens_queue.empty()) {
+      next = tokens_queue.front();
+    }
+
+    switch (current_type) {
+      case ArgType::kKnownValueFlag: {
+        output.cvd_args.emplace_back(current_token);
+        if (next && next->Type() == ArgType::kPositional) {
+          output.cvd_args.emplace_back(next->Token());
+          tokens_queue.pop_front();
+        }
+      } break;
+      case ArgType::kKnownFlagAndValue:
+      case ArgType::kKnownBoolFlag:
+      case ArgType::kKnownBoolNoFlag: {
+        output.cvd_args.emplace_back(current_token);
+      } break;
+      case ArgType::kPositional: {
+        output.sub_cmd = current.Token();
+        CF_EXPECT(output.sub_cmd != std::nullopt);
+        CF_EXPECT(Contains(valid_subcmds_, output.sub_cmd),
+                  "Subcommand " << *(output.sub_cmd) << " is not valid");
+        cvd_flags_mode = false;
+      } break;
+      case ArgType::kDoubleDash: {
+        return CF_ERR("--"
+                      << " is not allowed within cvd specific flags.");
+      }
+      case ArgType::kUnknownFlag:
+      case ArgType::kError: {
+        return CF_ERR(current.Token()
+                      << " in cvd-specific flags is disallowed.");
+      }
+    }
+  }
+  while (!tokens_queue.empty()) {
+    auto token = std::move(tokens_queue.front().Token());
+    output.sub_cmd_args.emplace_back(std::move(token));
+    tokens_queue.pop_front();
+  }
+  return output;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/arguments_separator.h b/host/commands/cvd/selector/arguments_separator.h
new file mode 100644
index 0000000..a7f7899
--- /dev/null
+++ b/host/commands/cvd/selector/arguments_separator.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_set>
+#include <vector>
+
+#include <cvd_server.pb.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/arguments_lexer.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/**
+ * The very first parser for cmdline that separates:
+ *
+ *  1. program name/path
+ *  2. cvd specific options such as --clean, selector options, etc
+ *  3. subcmd
+ *  4. subcmd arguments
+ *
+ * Note that the user's command line arguments are in this order:
+ *  $ program_path/name <optional cvd-specific flags> \
+ *                      subcmd <optional subcmd arguments>
+ *
+ * For the parser's sake, there are a few more rules.
+ *
+ * 1. All the optional cvd-specific flags should be pre-registered. Usually,
+ * the subcmd arguments do not have to be registered. However, cvd-specific
+ * flags must be.
+ *
+ *  E.g. "--clean" is the only registered cvd-specific flag, which happened
+ *      to be bool.
+ *       These are okay:
+ *         cvd --clean start --never-exist-flag
+ *         cvd --noclean stop
+ *         cvd start
+ *
+ *       However, this is not okay:
+ *        cvd --daemon start
+ *
+ *  2. --
+ *  E.g. cvd --clean start --have --some --args -- a b c d e
+ *  -- is basically for subcommands. cvd itself does not use it.
+ *  If -- is within cvd arguments, it is ill-formatted. If it is within
+ *  subcommands arguments, we simply forward it to the subtool as is.
+ *
+ */
+class ArgumentsSeparator {
+  using CvdProtobufArg = google::protobuf::RepeatedPtrField<std::string>;
+
+ public:
+  struct FlagsRegistration {
+    std::unordered_set<std::string> known_boolean_flags;
+    std::unordered_set<std::string> known_value_flags;
+    std::unordered_set<std::string> valid_subcommands;
+  };
+  static Result<std::unique_ptr<ArgumentsSeparator>> Parse(
+      const FlagsRegistration& flag_registration,
+      const std::vector<std::string>& input_args);
+  static Result<std::unique_ptr<ArgumentsSeparator>> Parse(
+      const FlagsRegistration& flag_registration,
+      const CvdProtobufArg& input_args);
+  static Result<std::unique_ptr<ArgumentsSeparator>> Parse(
+      const FlagsRegistration& flag_registration, const std::string& input_args,
+      const std::string delim = " ");
+
+  const std::string& ProgPath() const { return prog_path_; }
+  const std::vector<std::string>& CvdArgs() const { return cvd_args_; }
+  std::optional<std::string> SubCmd() const { return sub_cmd_; }
+  const std::vector<std::string>& SubCmdArgs() const { return sub_cmd_args_; }
+
+ private:
+  ArgumentsSeparator(std::unique_ptr<ArgumentsLexer>&& lexer,
+                     const std::vector<std::string>& input_args,
+                     const FlagsRegistration& flag_registration);
+
+  bool IsFlag(const ArgType arg_type) const;
+  struct Output {
+    std::string prog_path;
+    std::vector<std::string> cvd_args;
+    std::optional<std::string> sub_cmd;
+    std::vector<std::string> sub_cmd_args;
+  };
+  Result<void> Parse();
+  Result<Output> ParseInternal();
+
+  // internals
+  std::unique_ptr<ArgumentsLexer> lexer_;
+
+  // inputs
+  std::vector<std::string> input_args_;
+  std::unordered_set<std::string> known_boolean_flags_;
+  std::unordered_set<std::string> known_value_flags_;
+  std::unordered_set<std::string> valid_subcmds_;
+
+  // outputs
+  std::string prog_path_;
+  std::vector<std::string> cvd_args_;
+  std::optional<std::string> sub_cmd_;
+  std::vector<std::string> sub_cmd_args_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/constant_reference.h b/host/commands/cvd/selector/constant_reference.h
new file mode 100644
index 0000000..199cad8
--- /dev/null
+++ b/host/commands/cvd/selector/constant_reference.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+
+namespace cuttlefish {
+namespace selector {
+
+class LocalInstanceGroup;
+class LocalInstance;
+
+template <typename T>
+class ConstRef {
+  static_assert(std::is_same<T, LocalInstanceGroup>::value ||
+                std::is_same<T, LocalInstance>::value);
+
+ public:
+  ConstRef(ConstRef& ref) = default;
+  ConstRef(const ConstRef& ref) = default;
+  ConstRef(ConstRef&& ref) = default;
+
+  ConstRef(const T& t) : inner_wrapper_(t) {}
+  ConstRef(T&&) = delete;
+
+  ConstRef& operator=(const ConstRef& other) {
+    inner_wrapper_ = other.inner_wrapper_;
+    return *this;
+  }
+
+  operator const T&() const noexcept { return inner_wrapper_.get(); }
+
+  const T& Get() const noexcept { return inner_wrapper_.get(); }
+
+  /**
+   * comparison based on the address of underlying object
+   *
+   * Note that, per instance (group), there is only one LocalInstance(Group)
+   * object is created during the program's life time. Besides, they don't
+   * offer operator==, either. ConstRef<LocalInstance(Group)> has to be in
+   * a set.
+   */
+  bool operator==(const ConstRef& rhs) const noexcept {
+    return std::addressof(Get()) == std::addressof(rhs.Get());
+  }
+
+ private:
+  std::reference_wrapper<const T> inner_wrapper_;
+};
+
+template <class T>
+ConstRef<T> Cref(const T& t) noexcept {
+  return ConstRef<T>(t);
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
+
+/**
+ * the assumption is, if std::addressof(lhs) != std::addressof(rhs),
+ * the two LocalInstance objects are actually different. There is only
+ * on LocalInstance(Group) object per a given cuttlefish instance (group).
+ */
+template <typename T>
+struct std::hash<cuttlefish::selector::ConstRef<T>> {
+  std::size_t operator()(
+      const cuttlefish::selector::ConstRef<T>& ref) const noexcept {
+    const auto ptr = std::addressof(ref.Get());
+    return std::hash<const T*>()(ptr);
+  }
+};
diff --git a/host/commands/cvd/selector/creation_analyzer.cpp b/host/commands/cvd/selector/creation_analyzer.cpp
new file mode 100644
index 0000000..19b407e
--- /dev/null
+++ b/host/commands/cvd/selector/creation_analyzer.cpp
@@ -0,0 +1,393 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/creation_analyzer.h"
+
+#include <sys/types.h>
+
+#include <algorithm>
+#include <map>
+#include <regex>
+#include <set>
+#include <string>
+
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace selector {
+
+static bool IsCvdStart(const std::string& cmd) {
+  if (cmd.empty()) {
+    return false;
+  }
+  return cmd == "start";
+}
+
+Result<GroupCreationInfo> CreationAnalyzer::Analyze(
+    const std::string& cmd, const CreationAnalyzerParam& param,
+    const ucred& credential, const InstanceDatabase& instance_database,
+    InstanceLockFileManager& instance_lock_file_manager) {
+  CF_EXPECT(IsCvdStart(cmd),
+            "CreationAnalyzer::Analyze() is for cvd start only.");
+  const auto client_uid = credential.uid;
+  auto selector_options_parser =
+      CF_EXPECT(StartSelectorParser::ConductSelectFlagsParser(
+          client_uid, param.selector_args, param.cmd_args, param.envs));
+  CreationAnalyzer analyzer(param, credential,
+                            std::move(selector_options_parser),
+                            instance_database, instance_lock_file_manager);
+  auto result = CF_EXPECT(analyzer.Analyze());
+  return result;
+}
+
+CreationAnalyzer::CreationAnalyzer(
+    const CreationAnalyzerParam& param, const ucred& credential,
+    StartSelectorParser&& selector_options_parser,
+    const InstanceDatabase& instance_database,
+    InstanceLockFileManager& instance_file_lock_manager)
+    : cmd_args_(param.cmd_args),
+      envs_(param.envs),
+      selector_args_(param.selector_args),
+      credential_(credential),
+      selector_options_parser_{std::move(selector_options_parser)},
+      instance_database_{instance_database},
+      instance_file_lock_manager_{instance_file_lock_manager} {}
+
+static std::unordered_map<unsigned, InstanceLockFile> ConstructIdLockFileMap(
+    std::vector<InstanceLockFile>&& lock_files) {
+  std::unordered_map<unsigned, InstanceLockFile> mapping;
+  for (auto& lock_file : lock_files) {
+    const unsigned id = static_cast<unsigned>(lock_file.Instance());
+    mapping.insert({id, std::move(lock_file)});
+  }
+  lock_files.clear();
+  return mapping;
+}
+
+static Result<void> IsIdAvailable(const InstanceDatabase& instance_database,
+                                  const unsigned id) {
+  auto subset =
+      CF_EXPECT(instance_database.FindInstances(Query{kInstanceIdField, id}));
+  CF_EXPECT(subset.empty());
+  return {};
+}
+
+Result<std::vector<PerInstanceInfo>>
+CreationAnalyzer::AnalyzeInstanceIdsInternal(
+    const std::vector<unsigned>& requested_instance_ids) {
+  CF_EXPECT(!requested_instance_ids.empty(),
+            "Instance IDs were specified, so should be one or more.");
+  for (const auto id : requested_instance_ids) {
+    CF_EXPECT(IsIdAvailable(instance_database_, id),
+              "instance ID #" << id << " is requeested but not available.");
+  }
+
+  std::vector<std::string> per_instance_names;
+  if (selector_options_parser_.PerInstanceNames()) {
+    per_instance_names = *selector_options_parser_.PerInstanceNames();
+    CF_EXPECT_EQ(per_instance_names.size(), requested_instance_ids.size());
+  } else {
+    for (const auto id : requested_instance_ids) {
+      per_instance_names.push_back(std::to_string(id));
+    }
+  }
+
+  std::map<unsigned, std::string> id_name_pairs;
+  for (size_t i = 0; i != requested_instance_ids.size(); i++) {
+    id_name_pairs[requested_instance_ids.at(i)] = per_instance_names.at(i);
+  }
+
+  std::vector<PerInstanceInfo> instance_info;
+  bool must_acquire_file_locks = selector_options_parser_.MustAcquireFileLock();
+  if (!must_acquire_file_locks) {
+    for (const auto& [id, name] : id_name_pairs) {
+      instance_info.emplace_back(id, name);
+    }
+    return instance_info;
+  }
+  auto acquired_all_file_locks =
+      CF_EXPECT(instance_file_lock_manager_.LockAllAvailable());
+  auto id_to_lockfile_map =
+      ConstructIdLockFileMap(std::move(acquired_all_file_locks));
+  for (const auto& [id, instance_name] : id_name_pairs) {
+    CF_EXPECT(Contains(id_to_lockfile_map, id),
+              "Instance ID " << id << " lock file can't be locked.");
+    auto& lock_file = id_to_lockfile_map.at(id);
+    instance_info.emplace_back(id, instance_name, std::move(lock_file));
+  }
+  return instance_info;
+}
+
+/*
+ * Filters out the ids in id_pool that already exist in instance_database
+ */
+static Result<std::vector<unsigned>> CollectUnusedIds(
+    const InstanceDatabase& instance_database,
+    std::vector<unsigned>&& id_pool) {
+  std::vector<unsigned> collected_ids;
+  for (const auto id : id_pool) {
+    if (IsIdAvailable(instance_database, id).ok()) {
+      collected_ids.push_back(id);
+    }
+  }
+  return collected_ids;
+}
+
+struct NameLockFilePair {
+  std::string name;
+  InstanceLockFile lock_file;
+};
+Result<std::vector<PerInstanceInfo>>
+CreationAnalyzer::AnalyzeInstanceIdsInternal() {
+  CF_EXPECT(selector_options_parser_.MustAcquireFileLock(),
+            "For now, cvd server always acquire the file locks "
+                << "when IDs are automatically allocated.");
+
+  // As this test was done earlier, this line must not fail
+  const auto n_instances = selector_options_parser_.RequestedNumInstances();
+  auto acquired_all_file_locks =
+      CF_EXPECT(instance_file_lock_manager_.LockAllAvailable());
+  auto id_to_lockfile_map =
+      ConstructIdLockFileMap(std::move(acquired_all_file_locks));
+
+  /* generate n_instances consecutive ids. For backward compatibility,
+   * we prefer n consecutive ids for now.
+   */
+  std::vector<unsigned> id_pool;
+  id_pool.reserve(id_to_lockfile_map.size());
+  for (const auto& [id, _] : id_to_lockfile_map) {
+    id_pool.push_back(id);
+  }
+  auto unused_id_pool =
+      CF_EXPECT(CollectUnusedIds(instance_database_, std::move(id_pool)));
+  auto unique_id_allocator = std::move(IdAllocator::New(unused_id_pool));
+  CF_EXPECT(unique_id_allocator != nullptr,
+            "Memory allocation for UniqueResourceAllocator failed.");
+
+  // auto-generation means the user did not specify much: e.g. "cvd start"
+  // In this case, the user may expect the instance id to be 1+
+  using ReservationSet = UniqueResourceAllocator<unsigned>::ReservationSet;
+  std::optional<ReservationSet> allocated_ids_opt;
+  if (selector_options_parser_.IsMaybeDefaultGroup()) {
+    allocated_ids_opt = unique_id_allocator->TakeRange(1, 1 + n_instances);
+  }
+  if (!allocated_ids_opt) {
+    allocated_ids_opt =
+        unique_id_allocator->UniqueConsecutiveItems(n_instances);
+  }
+  CF_EXPECT(allocated_ids_opt != std::nullopt, "Unique ID allocation failed.");
+
+  std::vector<unsigned> allocated_ids;
+  allocated_ids.reserve(allocated_ids_opt->size());
+  for (const auto& reservation : *allocated_ids_opt) {
+    allocated_ids.push_back(reservation.Get());
+  }
+  std::sort(allocated_ids.begin(), allocated_ids.end());
+
+  const auto per_instance_names_opt =
+      selector_options_parser_.PerInstanceNames();
+  if (per_instance_names_opt) {
+    CF_EXPECT(per_instance_names_opt->size() == allocated_ids.size());
+  }
+  std::vector<PerInstanceInfo> instance_info;
+  for (size_t i = 0; i != allocated_ids.size(); i++) {
+    const auto id = allocated_ids.at(i);
+    std::string name = (per_instance_names_opt ? per_instance_names_opt->at(i)
+                                               : std::to_string(id));
+    instance_info.emplace_back(id, name, std::move(id_to_lockfile_map.at(id)));
+  }
+  return instance_info;
+}
+
+Result<std::vector<PerInstanceInfo>> CreationAnalyzer::AnalyzeInstanceIds() {
+  auto requested_instance_ids = selector_options_parser_.InstanceIds();
+  return requested_instance_ids
+             ? CF_EXPECT(AnalyzeInstanceIdsInternal(*requested_instance_ids))
+             : CF_EXPECT(AnalyzeInstanceIdsInternal());
+}
+
+/*
+ * 1. Remove --num_instances, --instance_nums, --base_instance_num if any.
+ * 2. If the ids are consecutive and ordered, add:
+ *   --base_instance_num=min --num_instances=ids.size()
+ * 3. If not, --instance_nums=<ids>
+ *
+ */
+static Result<std::vector<std::string>> UpdateInstanceArgs(
+    std::vector<std::string>&& args, const std::vector<unsigned>& ids) {
+  CF_EXPECT(ids.empty() == false);
+
+  std::vector<std::string> new_args{std::move(args)};
+  std::string old_instance_nums;
+  std::string old_num_instances;
+  std::string old_base_instance_num;
+
+  std::vector<Flag> instance_id_flags{
+      GflagsCompatFlag("instance_nums", old_instance_nums),
+      GflagsCompatFlag("num_instances", old_num_instances),
+      GflagsCompatFlag("base_instance_num", old_base_instance_num)};
+  // discard old ones
+  ParseFlags(instance_id_flags, new_args);
+
+  auto max = *(std::max_element(ids.cbegin(), ids.cend()));
+  auto min = *(std::min_element(ids.cbegin(), ids.cend()));
+
+  const bool is_consecutive = ((max - min) == (ids.size() - 1));
+  const bool is_sorted = std::is_sorted(ids.begin(), ids.end());
+
+  if (!is_consecutive || !is_sorted) {
+    std::string flag_value = android::base::Join(ids, ",");
+    new_args.push_back("--instance_nums=" + flag_value);
+    return new_args;
+  }
+
+  // sorted and consecutive, so let's use old flags
+  // like --num_instances and --base_instance_num
+  new_args.push_back("--num_instances=" + std::to_string(ids.size()));
+  new_args.push_back("--base_instance_num=" + std::to_string(min));
+  return new_args;
+}
+
+Result<std::vector<std::string>> CreationAnalyzer::UpdateWebrtcDeviceId(
+    std::vector<std::string>&& args,
+    const std::vector<PerInstanceInfo>& per_instance_info) {
+  std::vector<std::string> new_args{std::move(args)};
+  std::string flag_value;
+  std::vector<Flag> webrtc_device_id_flag{
+      GflagsCompatFlag("webrtc_device_id", flag_value)};
+  std::vector<std::string> copied_args{new_args};
+  CF_EXPECT(ParseFlags(webrtc_device_id_flag, copied_args));
+
+  if (!flag_value.empty()) {
+    return new_args;
+  }
+
+  CF_EXPECT(!group_name_.empty());
+  std::vector<std::string> device_name_list;
+  for (const auto& instance : per_instance_info) {
+    const auto& per_instance_name = instance.per_instance_name_;
+    std::string device_name = group_name_ + "-" + per_instance_name;
+    device_name_list.push_back(device_name);
+  }
+  // take --webrtc_device_id flag away
+  new_args = std::move(copied_args);
+  new_args.push_back("--webrtc_device_id=" +
+                     android::base::Join(device_name_list, ","));
+  return new_args;
+}
+
+Result<GroupCreationInfo> CreationAnalyzer::Analyze() {
+  auto instance_info = CF_EXPECT(AnalyzeInstanceIds());
+  std::vector<unsigned> ids;
+  ids.reserve(instance_info.size());
+  for (const auto& instance : instance_info) {
+    ids.push_back(instance.instance_id_);
+  }
+  cmd_args_ = CF_EXPECT(UpdateInstanceArgs(std::move(cmd_args_), ids));
+
+  group_name_ = CF_EXPECT(AnalyzeGroupName(instance_info));
+  cmd_args_ =
+      CF_EXPECT(UpdateWebrtcDeviceId(std::move(cmd_args_), instance_info));
+
+  home_ = CF_EXPECT(AnalyzeHome());
+  envs_["HOME"] = home_;
+
+  CF_EXPECT(Contains(envs_, kAndroidHostOut));
+  std::string android_product_out_path = Contains(envs_, kAndroidProductOut)
+                                             ? envs_.at(kAndroidProductOut)
+                                             : envs_.at(kAndroidHostOut);
+  GroupCreationInfo report = {.home = home_,
+                              .host_artifacts_path = envs_.at(kAndroidHostOut),
+                              .product_out_path = android_product_out_path,
+                              .group_name = group_name_,
+                              .instances = std::move(instance_info),
+                              .args = cmd_args_,
+                              .envs = envs_};
+  return report;
+}
+
+Result<std::string> CreationAnalyzer::AnalyzeGroupName(
+    const std::vector<PerInstanceInfo>& per_instance_infos) const {
+  if (selector_options_parser_.GroupName()) {
+    return selector_options_parser_.GroupName().value();
+  }
+  // auto-generate group name
+  std::vector<unsigned> ids;
+  ids.reserve(per_instance_infos.size());
+  for (const auto& per_instance_info : per_instance_infos) {
+    ids.push_back(per_instance_info.instance_id_);
+  }
+  std::string base_name = GenDefaultGroupName();
+  if (selector_options_parser_.IsMaybeDefaultGroup()) {
+    /*
+     * this base_name might be already taken. In that case, the user's
+     * request should fail in the InstanceDatabase
+     */
+    auto groups =
+        CF_EXPECT(instance_database_.FindGroups({kGroupNameField, base_name}));
+    CF_EXPECT(groups.empty(), "The default instance group name, \""
+                                  << base_name << "\" has been already taken.");
+    return base_name;
+  }
+
+  /* We cannot return simply "cvd" as we do not want duplication in the group
+   * name across the instance groups owned by the user. Note that the set of ids
+   * are expected to be unique to the user, so we use the ids. If ever the end
+   * user happened to have already used the generated name, we did our best, and
+   * cvd start will fail with a proper error message.
+   */
+  auto unique_suffix =
+      std::to_string(*std::min_element(ids.begin(), ids.end()));
+  return base_name + "_" + unique_suffix;
+}
+
+Result<std::string> CreationAnalyzer::AnalyzeHome() const {
+  auto system_wide_home = CF_EXPECT(SystemWideUserHome(credential_.uid));
+  if (Contains(envs_, "HOME") && envs_.at("HOME") != system_wide_home) {
+    return envs_.at("HOME");
+  }
+
+  if (selector_options_parser_.IsMaybeDefaultGroup()) {
+    auto groups = CF_EXPECT(
+        instance_database_.FindGroups({kHomeField, system_wide_home}));
+    if (groups.empty()) {
+      return system_wide_home;
+    }
+  }
+
+  CF_EXPECT(!group_name_.empty(),
+            "To auto-generate HOME, the group name is a must.");
+  const auto client_uid = credential_.uid;
+  const auto client_gid = credential_.gid;
+  std::string auto_generated_home =
+      CF_EXPECT(ParentOfAutogeneratedHomes(client_uid, client_gid));
+  auto_generated_home.append("/" + std::to_string(client_uid));
+  auto_generated_home.append("/" + group_name_);
+  CF_EXPECT(EnsureDirectoryExistsAllTheWay(auto_generated_home));
+  return auto_generated_home;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/creation_analyzer.h b/host/commands/cvd/selector/creation_analyzer.h
new file mode 100644
index 0000000..3d34e21
--- /dev/null
+++ b/host/commands/cvd/selector/creation_analyzer.h
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/socket.h>  // for ucred
+
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/unique_resource_allocator.h"
+#include "host/commands/cvd/instance_lock.h"
+#include "host/commands/cvd/selector/instance_database.h"
+#include "host/commands/cvd/selector/start_selector_parser.h"
+
+namespace cuttlefish {
+namespace selector {
+
+struct PerInstanceInfo {
+  // for the sake of std::vector::emplace_back
+  PerInstanceInfo(const unsigned id, const std::string& per_instance_name,
+                  InstanceLockFile&& instance_file_lock)
+      : instance_id_(id),
+        per_instance_name_(per_instance_name),
+        instance_file_lock_(std::move(instance_file_lock)) {}
+
+  PerInstanceInfo(const unsigned id, const std::string& per_instance_name)
+      : instance_id_(id), per_instance_name_(per_instance_name) {}
+
+  const unsigned instance_id_;
+  const std::string per_instance_name_;
+  std::optional<InstanceLockFile> instance_file_lock_;
+};
+
+/**
+ * Creation is currently group by group
+ *
+ * If you want one instance, you should create a group with one instance.
+ */
+struct GroupCreationInfo {
+  std::string home;
+  std::string host_artifacts_path;  ///< e.g. out/host/linux-x86
+  // set to host_artifacts_path if no ANDROID_PRODUCT_OUT
+  std::string product_out_path;
+  std::string group_name;
+  std::vector<PerInstanceInfo> instances;
+  std::vector<std::string> args;
+  std::unordered_map<std::string, std::string> envs;
+};
+
+/**
+ * Instance IDs:
+ *  Use the InstanceNumCalculator's logic
+ *
+ * HOME directory:
+ *  If given in envs and is different from the system-wide home, use it
+ *  If not, try ParentOfAutogeneratedHomes()/uid/${group_name}
+ *
+ * host_artifacts_path:
+ *  ANDROID_HOST_OUT must be given.
+ *
+ * Group name:
+ *  if a group name is not given, automatically generate:
+ *   default_prefix + "_" + one_of_ids
+ *
+ * Per-instance name:
+ *  When not given, use std::string(id) as the per instance name of each
+ *
+ * Number of instances:
+ *  Controlled by --instance_nums, --num_instances, etc.
+ *  Also controlled by --instance_name
+ *
+ * p.s.
+ *  dependency: (a-->b means b depends on a)
+ *    group_name --> HOME
+ *    instance ids --> per_instance_name
+ *
+ */
+class CreationAnalyzer {
+ public:
+  struct CreationAnalyzerParam {
+    const std::vector<std::string>& cmd_args;
+    const std::unordered_map<std::string, std::string>& envs;
+    const std::vector<std::string>& selector_args;
+  };
+
+  static Result<GroupCreationInfo> Analyze(
+      const std::string& cmd, const CreationAnalyzerParam& param,
+      const ucred& credential, const InstanceDatabase& instance_database,
+      InstanceLockFileManager& instance_lock_file_manager);
+
+ private:
+  using IdAllocator = UniqueResourceAllocator<unsigned>;
+
+  CreationAnalyzer(const CreationAnalyzerParam& param, const ucred& credential,
+                   StartSelectorParser&& selector_options_parser,
+                   const InstanceDatabase& instance_database,
+                   InstanceLockFileManager& instance_lock_file_manager);
+
+  Result<GroupCreationInfo> Analyze();
+
+  /**
+   * calculate n_instances_ and instance_ids_
+   */
+  Result<std::vector<PerInstanceInfo>> AnalyzeInstanceIds();
+
+  /*
+   * When group name is nil, it is auto-generated using instance ids
+   *
+   * If the instanc group is the default one, the group name is cvd. Otherwise,
+   * for given instance ids, {i}, the group name will be cvd_i.
+   */
+  Result<std::string> AnalyzeGroupName(
+      const std::vector<PerInstanceInfo>&) const;
+
+  /**
+   * Figures out the HOME directory
+   *
+   * The issue is that many times, HOME is anyway implicitly given. Thus, only
+   * if the HOME value is not equal to the HOME directory recognized by the
+   * system, it can be safely regarded as overridden by the user.
+   *
+   * If that is not the case, we use a automatically generated value as HOME.
+   * If the group instance is the default one, we still use the user's system-
+   * widely recognized home. If not, we populate them user /tmp/.cf/<uid>/
+   *
+   */
+  Result<std::string> AnalyzeHome() const;
+
+  Result<std::vector<PerInstanceInfo>> AnalyzeInstanceIdsInternal();
+  Result<std::vector<PerInstanceInfo>> AnalyzeInstanceIdsInternal(
+      const std::vector<unsigned>& requested_instance_ids);
+
+  /*
+   * Adds --webrtc_device_id when necessary to cmd_args_
+   */
+  Result<std::vector<std::string>> UpdateWebrtcDeviceId(
+      std::vector<std::string>&& args,
+      const std::vector<PerInstanceInfo>& per_instance_info);
+
+  // inputs
+  std::vector<std::string> cmd_args_;
+  std::unordered_map<std::string, std::string> envs_;
+  std::vector<std::string> selector_args_;
+  const ucred credential_;
+
+  // information to return later
+  std::string home_;
+  std::string group_name_;
+
+  // internal, temporary
+  StartSelectorParser selector_options_parser_;
+  const InstanceDatabase& instance_database_;
+  InstanceLockFileManager& instance_file_lock_manager_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/device_selector_utils.cpp b/host/commands/cvd/selector/device_selector_utils.cpp
new file mode 100644
index 0000000..e2f36f1
--- /dev/null
+++ b/host/commands/cvd/selector/device_selector_utils.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/device_selector_utils.h"
+
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<LocalInstanceGroup> GetDefaultGroup(
+    const InstanceDatabase& instance_database, const uid_t client_uid) {
+  const auto& all_groups = instance_database.InstanceGroups();
+  if (all_groups.size() == 1) {
+    return *(all_groups.front());
+  }
+  std::string system_wide_home = CF_EXPECT(SystemWideUserHome(client_uid));
+  auto group =
+      CF_EXPECT(instance_database.FindGroup({kHomeField, system_wide_home}));
+  return group;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/device_selector_utils.h b/host/commands/cvd/selector/device_selector_utils.h
new file mode 100644
index 0000000..3d944c9
--- /dev/null
+++ b/host/commands/cvd/selector/device_selector_utils.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * @file Utils shared by device selectors for non-start operations
+ *
+ */
+
+#include <sys/types.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/instance_database.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<LocalInstanceGroup> GetDefaultGroup(
+    const InstanceDatabase& instance_database, const uid_t client_uid);
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/group_selector.cpp b/host/commands/cvd/selector/group_selector.cpp
new file mode 100644
index 0000000..7100216
--- /dev/null
+++ b/host/commands/cvd/selector/group_selector.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/group_selector.h"
+#include "host/commands/cvd/selector/device_selector_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<GroupSelector> GroupSelector::GetSelector(
+    const cvd_common::Args& selector_args, const Queries& extra_queries,
+    const cvd_common::Envs& envs, const uid_t uid) {
+  cvd_common::Args selector_args_copied{selector_args};
+  SelectorCommonParser common_parser =
+      CF_EXPECT(SelectorCommonParser::Parse(uid, selector_args_copied, envs));
+  std::stringstream unused_args;
+  unused_args << "{";
+  for (const auto& arg : selector_args_copied) {
+    unused_args << arg << ", ";
+  }
+  std::string unused_arg_list = unused_args.str();
+  if (!selector_args_copied.empty()) {
+    unused_arg_list.pop_back();
+    unused_arg_list.pop_back();
+  }
+  unused_arg_list.append("}");
+  if (!selector_args_copied.empty()) {
+    LOG(ERROR) << "Warning: there are unused selector options. "
+               << unused_arg_list;
+  }
+
+  // search by group and instances
+  // search by HOME if overridden
+  Queries queries;
+  if (IsHomeOverridden(common_parser)) {
+    CF_EXPECT(common_parser.Home());
+    queries.emplace_back(kHomeField, common_parser.Home().value());
+  }
+  if (common_parser.GroupName()) {
+    queries.emplace_back(kGroupNameField, common_parser.GroupName().value());
+  }
+  if (common_parser.PerInstanceNames()) {
+    const auto per_instance_names = common_parser.PerInstanceNames().value();
+    for (const auto& per_instance_name : per_instance_names) {
+      queries.emplace_back(kInstanceNameField, per_instance_name);
+    }
+  }
+
+  for (const auto& extra_query : extra_queries) {
+    queries.push_back(extra_query);
+  }
+
+  GroupSelector group_selector(uid, queries);
+  return group_selector;
+}
+
+bool GroupSelector::IsHomeOverridden(
+    const SelectorCommonParser& common_parser) {
+  auto home_overridden_result = common_parser.HomeOverridden();
+  if (!home_overridden_result.ok()) {
+    return false;
+  }
+  return *home_overridden_result;
+}
+
+Result<LocalInstanceGroup> GroupSelector::FindGroup(
+    const InstanceDatabase& instance_database) {
+  if (queries_.empty()) {
+    auto default_group = CF_EXPECT(FindDefaultGroup(instance_database));
+    return default_group;
+  }
+  auto groups = CF_EXPECT(instance_database.FindGroups(queries_));
+  CF_EXPECT(groups.size() == 1, "groups.size() = " << groups.size());
+  return *(groups.cbegin());
+}
+
+Result<LocalInstanceGroup> GroupSelector::FindDefaultGroup(
+    const InstanceDatabase& instance_database) {
+  auto group = CF_EXPECT(GetDefaultGroup(instance_database, client_uid_));
+  return group;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/group_selector.h b/host/commands/cvd/selector/group_selector.h
new file mode 100644
index 0000000..8fa2979
--- /dev/null
+++ b/host/commands/cvd/selector/group_selector.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/instance_database.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/selector_common_parser.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+class GroupSelector {
+ public:
+  static Result<GroupSelector> GetSelector(
+      const cvd_common::Args& selector_args, const Queries& extra_queries,
+      const cvd_common::Envs& envs, const uid_t uid);
+  /*
+   * If default, try running single instance group. If multiple, try to find
+   * HOME == SystemWideUserHome. If not exists, give up.
+   *
+   * If group given, find group, and check if all instance names are included
+   *
+   * If group not given, not yet supported. Will be in next CLs
+   */
+  Result<LocalInstanceGroup> FindGroup(
+      const InstanceDatabase& instance_database);
+
+ private:
+  GroupSelector(const uid_t uid, const Queries& queries)
+      : client_uid_{uid}, queries_{queries} {}
+
+  // used by Select()
+  static bool IsHomeOverridden(const SelectorCommonParser& common_parser);
+
+  Result<LocalInstanceGroup> FindDefaultGroup(
+      const InstanceDatabase& instance_database);
+
+  const uid_t client_uid_;
+  const Queries queries_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database.cpp b/host/commands/cvd/selector/instance_database.cpp
new file mode 100644
index 0000000..81b8d21
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_database.h"
+
+#include <numeric>  // std::iota
+
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+
+InstanceDatabase::InstanceDatabase() {
+  group_handlers_[kHomeField] = [this](const Value& field_value) {
+    return FindGroupsByHome(field_value);
+  };
+  group_handlers_[kGroupNameField] = [this](const Value& field_value) {
+    return FindGroupsByGroupName(field_value);
+  };
+  group_handlers_[kInstanceNameField] = [this](const Value& field_value) {
+    return FindGroupsByInstanceName(field_value);
+  };
+  instance_handlers_[kInstanceIdField] = [this](const Value& field_value) {
+    return FindInstancesById(field_value);
+  };
+  instance_handlers_[kGroupNameField] = [this](const Value& field_value) {
+    return FindInstancesByGroupName(field_value);
+  };
+  instance_handlers_[kInstanceNameField] = [this](const Value& field_value) {
+    return FindInstancesByInstanceName(field_value);
+  };
+}
+
+bool InstanceDatabase::IsEmpty() const {
+  return local_instance_groups_.empty();
+}
+
+template <typename T>
+Result<Set<ConstRef<T>>> InstanceDatabase::Find(
+    const Query& query,
+    const Map<FieldName, ConstHandler<T>>& handler_map) const {
+  static_assert(std::is_same<T, LocalInstance>::value ||
+                std::is_same<T, LocalInstanceGroup>::value);
+  const auto& [key, value] = query;
+  auto itr = handler_map.find(key);
+  if (itr == handler_map.end()) {
+    return CF_ERR("Handler does not exist for query " << key);
+  }
+  return (itr->second)(value);
+}
+
+template <typename T>
+Result<Set<ConstRef<T>>> InstanceDatabase::Find(
+    const Queries& queries,
+    const Map<FieldName, ConstHandler<T>>& handler_map) const {
+  static_assert(std::is_same<T, LocalInstance>::value ||
+                std::is_same<T, LocalInstanceGroup>::value);
+  if (queries.empty()) {
+    return CF_ERR("Queries must not be empty");
+  }
+  auto first_set = CF_EXPECT(Find<T>(queries[0], handler_map));
+  for (int i = 1; i < queries.size(); i++) {
+    auto subset = CF_EXPECT(Find<T>(queries[i], handler_map));
+    first_set = Intersection(first_set, subset);
+  }
+  return {first_set};
+}
+
+template <typename T>
+Result<ConstRef<T>> InstanceDatabase::FindOne(
+    const Query& query,
+    const Map<FieldName, ConstHandler<T>>& handler_map) const {
+  auto set = CF_EXPECT(Find<T>(query, handler_map));
+  CF_EXPECT_EQ(set.size(), 1, "Only one Instance (Group) is allowed.");
+  return {*set.cbegin()};
+}
+
+template <typename T>
+Result<ConstRef<T>> InstanceDatabase::FindOne(
+    const Queries& queries,
+    const Map<FieldName, ConstHandler<T>>& handler_map) const {
+  auto set = CF_EXPECT(Find<T>(queries, handler_map));
+  CF_EXPECT_EQ(set.size(), 1, "Only one Instance (Group) is allowed.");
+  return {*set.cbegin()};
+}
+
+Result<Set<ConstRef<LocalInstanceGroup>>> InstanceDatabase::FindGroups(
+    const Query& query) const {
+  return Find<LocalInstanceGroup>(query, group_handlers_);
+}
+
+Result<Set<ConstRef<LocalInstanceGroup>>> InstanceDatabase::FindGroups(
+    const Queries& queries) const {
+  return Find<LocalInstanceGroup>(queries, group_handlers_);
+}
+
+Result<Set<ConstRef<LocalInstance>>> InstanceDatabase::FindInstances(
+    const Query& query) const {
+  return Find<LocalInstance>(query, instance_handlers_);
+}
+
+Result<Set<ConstRef<LocalInstance>>> InstanceDatabase::FindInstances(
+    const Queries& queries) const {
+  return Find<LocalInstance>(queries, instance_handlers_);
+}
+
+Result<ConstRef<LocalInstanceGroup>> InstanceDatabase::FindGroup(
+    const Query& query) const {
+  return FindOne<LocalInstanceGroup>(query, group_handlers_);
+}
+
+Result<ConstRef<LocalInstanceGroup>> InstanceDatabase::FindGroup(
+    const Queries& queries) const {
+  return FindOne<LocalInstanceGroup>(queries, group_handlers_);
+}
+
+Result<ConstRef<LocalInstance>> InstanceDatabase::FindInstance(
+    const Query& query) const {
+  return FindOne<LocalInstance>(query, instance_handlers_);
+}
+
+Result<ConstRef<LocalInstance>> InstanceDatabase::FindInstance(
+    const Queries& queries) const {
+  return FindOne<LocalInstance>(queries, instance_handlers_);
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database.h b/host/commands/cvd/selector/instance_database.h
new file mode 100644
index 0000000..74ad25a
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/constant_reference.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+
+namespace cuttlefish {
+namespace selector {
+
+// TODO(kwstephenkim): make this per-user instance database
+class InstanceDatabase {
+  template <typename T>
+  using ConstHandler = std::function<Result<Set<ConstRef<T>>>(const Value&)>;
+
+  using ConstGroupHandler = ConstHandler<LocalInstanceGroup>;
+  using ConstInstanceHandler = ConstHandler<LocalInstance>;
+
+ public:
+  InstanceDatabase();
+  bool IsEmpty() const;
+
+  struct AddInstanceGroupParam {
+    std::string group_name;
+    std::string home_dir;
+    std::string host_artifacts_path;
+    std::string product_out_path;
+  };
+  /** Adds instance group.
+   *
+   * If group_name or home_dir is already taken or host_artifacts_path is
+   * not likely an artifacts path, CF_ERR is returned.
+   */
+  Result<ConstRef<LocalInstanceGroup>> AddInstanceGroup(
+      const AddInstanceGroupParam& param);
+
+  Json::Value Serialize() const;
+  Result<void> LoadFromJson(const Json::Value&);
+
+  /**
+   * Adds instance to the group.
+   *
+   * If id is duplicated in the scope of the InstanceDatabase or instance_name
+   * is not unique within the group, CF_ERR is returned.
+   */
+  Result<void> AddInstance(const std::string& group_name, const unsigned id,
+                           const std::string& instance_name);
+
+  struct InstanceInfo {
+    const unsigned id;
+    const std::string name;
+  };
+  Result<void> AddInstances(const std::string& group_name,
+                            const std::vector<InstanceInfo>& instances);
+
+  Result<void> SetBuildId(const std::string& group_name,
+                          const std::string& build_id);
+
+  /*
+   *  auto group = CF_EXPEC(FindGroups(...));
+   *  RemoveInstanceGroup(group)
+   */
+  bool RemoveInstanceGroup(const LocalInstanceGroup& group);
+  bool RemoveInstanceGroup(const std::string& group_name);
+  void Clear();
+
+  Result<Set<ConstRef<LocalInstanceGroup>>> FindGroups(
+      const Query& query) const;
+  Result<Set<ConstRef<LocalInstanceGroup>>> FindGroups(
+      const Queries& queries) const;
+  Result<Set<ConstRef<LocalInstance>>> FindInstances(const Query& query) const;
+  Result<Set<ConstRef<LocalInstance>>> FindInstances(
+      const Queries& queries) const;
+  const auto& InstanceGroups() const { return local_instance_groups_; }
+
+  /*
+   * FindGroup/Instance method must be used when exactly one instance/group
+   * is expected to match the query
+   */
+  Result<ConstRef<LocalInstanceGroup>> FindGroup(const Query& query) const;
+  Result<ConstRef<LocalInstanceGroup>> FindGroup(const Queries& queries) const;
+  Result<ConstRef<LocalInstance>> FindInstance(const Query& query) const;
+  Result<ConstRef<LocalInstance>> FindInstance(const Queries& queries) const;
+
+ private:
+  template <typename T>
+  Result<Set<ConstRef<T>>> Find(
+      const Query& query,
+      const Map<FieldName, ConstHandler<T>>& handler_map) const;
+
+  template <typename T>
+  Result<Set<ConstRef<T>>> Find(
+      const Queries& queries,
+      const Map<FieldName, ConstHandler<T>>& handler_map) const;
+
+  template <typename T>
+  Result<ConstRef<T>> FindOne(
+      const Query& query,
+      const Map<FieldName, ConstHandler<T>>& handler_map) const;
+
+  template <typename T>
+  Result<ConstRef<T>> FindOne(
+      const Queries& queries,
+      const Map<FieldName, ConstHandler<T>>& handler_map) const;
+
+  std::vector<std::unique_ptr<LocalInstanceGroup>>::iterator FindIterator(
+      const LocalInstanceGroup& group);
+
+  // actual Find implementations
+  Result<Set<ConstRef<LocalInstanceGroup>>> FindGroupsByHome(
+      const Value& home) const;
+  Result<Set<ConstRef<LocalInstanceGroup>>> FindGroupsByGroupName(
+      const Value& group_name) const;
+  Result<Set<ConstRef<LocalInstanceGroup>>> FindGroupsByInstanceName(
+      const Value& instance_name) const;
+  Result<Set<ConstRef<LocalInstance>>> FindInstancesById(const Value& id) const;
+  Result<Set<ConstRef<LocalInstance>>> FindInstancesByGroupName(
+      const Value& instance_specific_name) const;
+  Result<Set<ConstRef<LocalInstance>>> FindInstancesByInstanceName(
+      const Value& group_name) const;
+
+  Result<LocalInstanceGroup*> FindMutableGroup(const std::string& group_name);
+
+  Result<void> LoadGroupFromJson(const Json::Value& group_json);
+
+  std::vector<std::unique_ptr<LocalInstanceGroup>> local_instance_groups_;
+  Map<FieldName, ConstGroupHandler> group_handlers_;
+  Map<FieldName, ConstInstanceHandler> instance_handlers_;
+
+  static constexpr const char kJsonGroups[] = "Groups";
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database_impl.cpp b/host/commands/cvd/selector/instance_database_impl.cpp
new file mode 100644
index 0000000..af19585
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database_impl.cpp
@@ -0,0 +1,330 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_database.h"
+
+#include <algorithm>
+#include <regex>
+#include <sstream>
+
+#include <android-base/file.h>
+#include <android-base/parseint.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+
+std::vector<std::unique_ptr<LocalInstanceGroup>>::iterator
+InstanceDatabase::FindIterator(const LocalInstanceGroup& group) {
+  for (auto itr = local_instance_groups_.begin();
+       itr != local_instance_groups_.end(); itr++) {
+    if (itr->get() == std::addressof(group)) {
+      return itr;
+    }
+  }
+  // must not reach here
+  return local_instance_groups_.end();
+}
+
+void InstanceDatabase::Clear() { local_instance_groups_.clear(); }
+
+Result<ConstRef<LocalInstanceGroup>> InstanceDatabase::AddInstanceGroup(
+    const AddInstanceGroupParam& param) {
+  CF_EXPECT(IsValidGroupName(param.group_name),
+            "GroupName " << param.group_name << " is ill-formed.");
+  CF_EXPECT(EnsureDirectoryExistsAllTheWay(param.home_dir),
+            "HOME dir, " << param.home_dir << " does not exist");
+  CF_EXPECT(PotentiallyHostArtifactsPath(param.host_artifacts_path),
+            "ANDROID_HOST_OUT, " << param.host_artifacts_path
+                                 << " is not a tool dir");
+  std::vector<Query> queries = {{kHomeField, param.home_dir},
+                                {kGroupNameField, param.group_name}};
+  for (const auto& query : queries) {
+    auto instance_groups =
+        CF_EXPECT(Find<LocalInstanceGroup>(query, group_handlers_));
+    std::stringstream err_msg;
+    err_msg << query.field_name_ << " : " << query.field_value_
+            << " is already taken.";
+    CF_EXPECT(instance_groups.empty(), err_msg.str());
+  }
+  auto new_group =
+      new LocalInstanceGroup({.group_name = param.group_name,
+                              .home_dir = param.home_dir,
+                              .host_artifacts_path = param.host_artifacts_path,
+                              .product_out_path = param.product_out_path});
+  CF_EXPECT(new_group != nullptr);
+  local_instance_groups_.emplace_back(new_group);
+  const auto raw_ptr = local_instance_groups_.back().get();
+  ConstRef<LocalInstanceGroup> const_ref = *raw_ptr;
+  return {const_ref};
+}
+
+Result<void> InstanceDatabase::AddInstance(const std::string& group_name,
+                                           const unsigned id,
+                                           const std::string& instance_name) {
+  LocalInstanceGroup* group_ptr = CF_EXPECT(FindMutableGroup(group_name));
+  LocalInstanceGroup& group = *group_ptr;
+
+  CF_EXPECT(IsValidInstanceName(instance_name),
+            "instance_name " << instance_name << " is invalid.");
+  auto itr = FindIterator(group);
+  CF_EXPECT(
+      itr != local_instance_groups_.end() && *itr != nullptr,
+      "Adding instances to non-existing group " + group.InternalGroupName());
+
+  auto instances =
+      CF_EXPECT(FindInstances({kInstanceIdField, std::to_string(id)}));
+  if (instances.size() != 0) {
+    return CF_ERR("instance id " << id << " is taken");
+  }
+
+  auto instances_by_name = CF_EXPECT((*itr)->FindByInstanceName(instance_name));
+  if (!instances_by_name.empty()) {
+    return CF_ERR("instance name " << instance_name << " is taken");
+  }
+  return (*itr)->AddInstance(id, instance_name);
+}
+
+Result<void> InstanceDatabase::AddInstances(
+    const std::string& group_name, const std::vector<InstanceInfo>& instances) {
+  for (const auto& instance_info : instances) {
+    CF_EXPECT(AddInstance(group_name, instance_info.id, instance_info.name));
+  }
+  return {};
+}
+
+Result<void> InstanceDatabase::SetBuildId(const std::string& group_name,
+                                          const std::string& build_id) {
+  auto* group_ptr = CF_EXPECT(FindMutableGroup(group_name));
+  auto& group = *group_ptr;
+  group.SetBuildId(build_id);
+  return {};
+}
+
+Result<LocalInstanceGroup*> InstanceDatabase::FindMutableGroup(
+    const std::string& group_name) {
+  LocalInstanceGroup* group_ptr = nullptr;
+  for (auto& group_uniq_ptr : local_instance_groups_) {
+    if (group_uniq_ptr && group_uniq_ptr->GroupName() == group_name) {
+      group_ptr = group_uniq_ptr.get();
+      break;
+    }
+  }
+  CF_EXPECT(group_ptr != nullptr,
+            "Instance Group named as " << group_name << " is not found.");
+  return group_ptr;
+}
+
+bool InstanceDatabase::RemoveInstanceGroup(const std::string& group_name) {
+  auto group_result = FindGroup({kGroupNameField, group_name});
+  if (!group_result.ok()) {
+    return false;
+  }
+  const LocalInstanceGroup& group = group_result->Get();
+  return RemoveInstanceGroup(group);
+}
+
+bool InstanceDatabase::RemoveInstanceGroup(const LocalInstanceGroup& group) {
+  auto itr = FindIterator(group);
+  // *itr is the reference to the unique pointer object
+  if (itr == local_instance_groups_.end() || !(*itr)) {
+    return false;
+  }
+  local_instance_groups_.erase(itr);
+  return true;
+}
+
+Result<Set<ConstRef<LocalInstanceGroup>>> InstanceDatabase::FindGroupsByHome(
+    const std::string& home) const {
+  auto subset = CollectToSet<LocalInstanceGroup>(
+      local_instance_groups_,
+      [&home](const std::unique_ptr<LocalInstanceGroup>& group) {
+        if (!group) {
+          return false;
+        }
+        if (group->HomeDir() == home) {
+          return true;
+        }
+        if (group->HomeDir().empty() || home.empty()) {
+          return false;
+        }
+        // The two paths must be an absolute path.
+        // this is guaranteed by the CreationAnalyzer
+        std::string home_realpath;
+        std::string group_home_realpath;
+        if (!android::base::Realpath(home, std::addressof(home_realpath))) {
+          return false;
+        }
+        if (!android::base::Realpath(group->HomeDir(),
+                                     std::addressof(group_home_realpath))) {
+          return false;
+        }
+        return home_realpath == group_home_realpath;
+      });
+  return AtMostOne(subset, GenerateTooManyInstancesErrorMsg(1, kHomeField));
+}
+
+Result<Set<ConstRef<LocalInstanceGroup>>>
+InstanceDatabase::FindGroupsByGroupName(const std::string& group_name) const {
+  auto subset = CollectToSet<LocalInstanceGroup>(
+      local_instance_groups_,
+      [&group_name](const std::unique_ptr<LocalInstanceGroup>& group) {
+        return (group && group->GroupName() == group_name);
+      });
+  return AtMostOne(subset,
+                   GenerateTooManyInstancesErrorMsg(1, kGroupNameField));
+}
+
+Result<Set<ConstRef<LocalInstanceGroup>>>
+InstanceDatabase::FindGroupsByInstanceName(
+    const std::string& instance_name) const {
+  auto subset = CollectToSet<LocalInstanceGroup>(
+      local_instance_groups_,
+      [&instance_name](const std::unique_ptr<LocalInstanceGroup>& group) {
+        if (!group) {
+          return false;
+        }
+        auto instance_set_result = group->FindByInstanceName(instance_name);
+        return instance_set_result.ok() && (instance_set_result->size() == 1);
+      });
+  return subset;
+}
+
+Result<Set<ConstRef<LocalInstance>>> InstanceDatabase::FindInstancesById(
+    const std::string& id) const {
+  int parsed_int = 0;
+  if (!android::base::ParseInt(id, &parsed_int)) {
+    return CF_ERR(id << " cannot be converted to an integer");
+  }
+  auto collector =
+      [parsed_int](const std::unique_ptr<LocalInstanceGroup>& group)
+      -> Result<Set<ConstRef<LocalInstance>>> {
+    CF_EXPECT(group != nullptr);
+    return group->FindById(parsed_int);
+  };
+  auto subset = CollectAllElements<LocalInstance, LocalInstanceGroup>(
+      collector, local_instance_groups_);
+  CF_EXPECT(subset.ok());
+  return AtMostOne(*subset,
+                   GenerateTooManyInstancesErrorMsg(1, kInstanceIdField));
+}
+
+Result<Set<ConstRef<LocalInstance>>>
+InstanceDatabase::FindInstancesByInstanceName(
+    const Value& instance_specific_name) const {
+  auto collector = [&instance_specific_name](
+                       const std::unique_ptr<LocalInstanceGroup>& group)
+      -> Result<Set<ConstRef<LocalInstance>>> {
+    CF_EXPECT(group != nullptr);
+    return (group->FindByInstanceName(instance_specific_name));
+  };
+  return CollectAllElements<LocalInstance, LocalInstanceGroup>(
+      collector, local_instance_groups_);
+}
+
+Result<Set<ConstRef<LocalInstance>>> InstanceDatabase::FindInstancesByGroupName(
+    const Value& group_name) const {
+  auto collector =
+      [&group_name](const std::unique_ptr<LocalInstanceGroup>& group)
+      -> Result<Set<ConstRef<LocalInstance>>> {
+    CF_EXPECT(group != nullptr);
+    if (group->GroupName() != group_name) {
+      Set<ConstRef<LocalInstance>> empty_set;
+      return empty_set;
+    }
+    return (group->FindAllInstances());
+  };
+  return CollectAllElements<LocalInstance, LocalInstanceGroup>(
+      collector, local_instance_groups_);
+}
+
+Json::Value InstanceDatabase::Serialize() const {
+  Json::Value instance_db_json;
+  int i = 0;
+  Json::Value group_array;
+  for (const auto& local_instance_group : local_instance_groups_) {
+    group_array[i] = local_instance_group->Serialize();
+    ++i;
+  }
+  instance_db_json[kJsonGroups] = group_array;
+  return instance_db_json;
+}
+
+Result<void> InstanceDatabase::LoadGroupFromJson(
+    const Json::Value& group_json) {
+  const std::string group_name =
+      group_json[LocalInstanceGroup::kJsonGroupName].asString();
+  const std::string home_dir =
+      group_json[LocalInstanceGroup::kJsonHomeDir].asString();
+  const std::string host_artifacts_path =
+      group_json[LocalInstanceGroup::kJsonHostArtifactPath].asString();
+  const std::string product_out_path =
+      group_json[LocalInstanceGroup::kJsonProductOutPath].asString();
+  const std::string build_id_value =
+      group_json[LocalInstanceGroup::kJsonBuildId].asString();
+  std::optional<std::string> build_id;
+  if (build_id_value != LocalInstanceGroup::kJsonUnknownBuildId) {
+    build_id = build_id_value;
+  }
+  const auto new_group_ref =
+      CF_EXPECT(AddInstanceGroup({.group_name = group_name,
+                                  .home_dir = home_dir,
+                                  .host_artifacts_path = host_artifacts_path,
+                                  .product_out_path = product_out_path}));
+  if (build_id) {
+    CF_EXPECT(SetBuildId(group_name, *build_id));
+  }
+  const Json::Value& instances_json_array =
+      group_json[LocalInstanceGroup::kJsonInstances];
+  for (int i = 0; i < instances_json_array.size(); i++) {
+    const Json::Value& instance_json = instances_json_array[i];
+    const std::string instance_name =
+        instance_json[LocalInstance::kJsonInstanceName].asString();
+    const std::string instance_id =
+        instance_json[LocalInstance::kJsonInstanceId].asString();
+    int id;
+    auto parse_result =
+        android::base::ParseInt(instance_id, std::addressof(id));
+    if (!parse_result) {
+      CF_EXPECT(parse_result == true,
+                "Invalid instance ID in instance json: " << instance_id);
+      RemoveInstanceGroup(new_group_ref.Get());
+    }
+    auto add_instance_result = AddInstance(group_name, id, instance_name);
+    if (!add_instance_result.ok()) {
+      RemoveInstanceGroup(new_group_ref.Get());
+      CF_EXPECT(add_instance_result.ok(), add_instance_result.error().Trace());
+    }
+  }
+  return {};
+}
+
+Result<void> InstanceDatabase::LoadFromJson(const Json::Value& db_json) {
+  const Json::Value& group_array = db_json[kJsonGroups];
+  int n_groups = group_array.size();
+  for (int i = 0; i < n_groups; i++) {
+    CF_EXPECT(LoadGroupFromJson(group_array[i]));
+  }
+  return {};
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database_types.cpp b/host/commands/cvd/selector/instance_database_types.cpp
new file mode 100644
index 0000000..994917c
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database_types.cpp
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_database_types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Query::Query(const std::string& field_name, const std::string& field_value)
+    : field_name_(field_name), field_value_(field_value) {}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database_types.h b/host/commands/cvd/selector/instance_database_types.h
new file mode 100644
index 0000000..7710483
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database_types.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+namespace cuttlefish {
+namespace selector {
+namespace selector_impl {
+
+template <typename ValueType>
+using ToStringTypeReturnType =
+    decltype(void(std::to_string(std::declval<ValueType&>())));
+
+template <typename T, typename = void>
+struct IsToStringOk : std::false_type {};
+
+template <typename T>
+struct IsToStringOk<T, ToStringTypeReturnType<T>> : std::true_type {};
+
+}  // namespace selector_impl
+
+using FieldName = std::string;
+using Value = std::string;
+// e.g. if intended to search by --home=/home/vsoc-01,
+// field_name_ is "home" and the field_value_ is "/home/vsoc-01"
+struct Query {
+  template <typename ValueType,
+            typename = std::enable_if_t<
+                selector_impl::IsToStringOk<ValueType>::value, void>>
+  Query(const std::string& field_name, ValueType&& field_value)
+      : field_name_(field_name),
+        field_value_(std::to_string(std::forward<ValueType>(field_value))) {}
+  Query(const std::string& field_name, const std::string& field_value);
+
+  FieldName field_name_;
+  Value field_value_;
+};
+using Queries = std::vector<Query>;
+
+template <typename T>
+using Set = std::unordered_set<T>;
+
+template <typename K, typename V>
+using Map = std::unordered_map<K, V>;
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database_utils.cpp b/host/commands/cvd/selector/instance_database_utils.cpp
new file mode 100644
index 0000000..0aa8368
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database_utils.cpp
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_database_utils.h"
+
+#include <regex>
+#include <set>
+#include <sstream>
+#include <string_view>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/files.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<std::string> GetCuttlefishConfigPath(const std::string& home) {
+  std::string home_realpath;
+  CF_EXPECT(DirectoryExists(home), "Invalid Home Directory");
+  CF_EXPECT(android::base::Realpath(home, &home_realpath));
+  static const char kSuffix[] = "/cuttlefish_assembly/cuttlefish_config.json";
+  std::string config_path = AbsolutePath(home_realpath + kSuffix);
+  CF_EXPECT(FileExists(config_path), "No config file exists");
+  return {config_path};
+}
+
+std::string GenInternalGroupName() {
+  std::string_view internal_name{kCvdNamePrefix};  // "cvd-"
+  internal_name.remove_suffix(1);                  // "cvd"
+  return std::string(internal_name);
+}
+
+std::string GenDefaultGroupName() { return GenInternalGroupName(); }
+
+std::string LocalDeviceNameRule(const std::string& group_name,
+                                const std::string& instance_name) {
+  return group_name + "-" + instance_name;
+}
+
+bool IsValidGroupName(const std::string& token) {
+  std::regex regular_expr("[A-Za-z_][A-Za-z_0-9]*");
+  return std::regex_match(token, regular_expr);
+}
+
+bool IsValidInstanceName(const std::string& token) {
+  if (token.empty()) {
+    return false;
+  }
+  std::regex base_regular_expr("[A-Za-z_0-9]+");
+  auto pieces = android::base::Split(token, "-");
+  for (const auto& piece : pieces) {
+    if (!std::regex_match(piece, base_regular_expr)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+Result<DeviceName> BreakDeviceName(const std::string& device_name) {
+  CF_EXPECT(!device_name.empty());
+  CF_EXPECT(Contains(device_name, '-'));
+  auto dash_pos = device_name.find_first_of('-');
+  // - must be neither the first nor the last character
+  CF_EXPECT(dash_pos != 0 && dash_pos != (device_name.size() - 1));
+  const auto group_name = device_name.substr(0, dash_pos);
+  const auto instance_name = device_name.substr(dash_pos + 1);
+  return DeviceName{.group_name = group_name,
+                    .per_instance_name = instance_name};
+}
+
+bool IsValidDeviceName(const std::string& token) {
+  if (token.empty()) {
+    return false;
+  }
+  auto result = BreakDeviceName(token);
+  if (!result.ok()) {
+    return false;
+  }
+  const auto [group_name, instance_name] = *result;
+  return IsValidGroupName(group_name) && IsValidInstanceName(instance_name);
+}
+
+bool PotentiallyHostArtifactsPath(const std::string& host_artifacts_path) {
+  if (host_artifacts_path.empty() || !DirectoryExists(host_artifacts_path)) {
+    return false;
+  }
+  const auto host_bin_path = host_artifacts_path + "/bin";
+  auto contents_result = DirectoryContents(host_bin_path);
+  if (!contents_result.ok()) {
+    return false;
+  }
+  std::vector<std::string> contents = std::move(*contents_result);
+  std::set<std::string> contents_set{std::move_iterator(contents.begin()),
+                                     std::move_iterator(contents.end())};
+  std::set<std::string> launchers = {"cvd", "launch_cvd"};
+  std::vector<std::string> result;
+  std::set_intersection(launchers.cbegin(), launchers.cend(),
+                        contents_set.cbegin(), contents_set.cend(),
+                        std::back_inserter(result));
+  return !result.empty();
+}
+
+std::string GenerateTooManyInstancesErrorMsg(const int n,
+                                             const std::string& field_name) {
+  std::stringstream s;
+  s << "Only up to " << n << " must match";
+  if (!field_name.empty()) {
+    s << " by the field " << field_name;
+  }
+  return s.str();
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_database_utils.h b/host/commands/cvd/selector/instance_database_utils.h
new file mode 100644
index 0000000..198064a
--- /dev/null
+++ b/host/commands/cvd/selector/instance_database_utils.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <memory>
+#include <string>
+
+#include "common/libs/utils/collect.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/constant_reference.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<std::string> GetCuttlefishConfigPath(const std::string& home);
+
+std::string GenInternalGroupName();
+std::string GenDefaultGroupName();
+std::string LocalDeviceNameRule(const std::string& group_name,
+                                const std::string& instance_name);
+
+// [A-Za-z0-9_]+, e.g. 0, tv, my_phone07, etc
+// Or, it can include "-" in the middle
+// ([A-Za-z0-9_]+[-])*[A-Za-z0-9_]
+bool IsValidInstanceName(const std::string& token);
+
+// [A-Za-z_][A-Za-z0-9_]*, e.g. cool_group, cv0_d, cf, etc
+// but can't start with [0-9]
+bool IsValidGroupName(const std::string& token);
+
+// <valid group name>-<valid instance name>
+bool IsValidDeviceName(const std::string& token);
+
+struct DeviceName {
+  std::string group_name;
+  std::string per_instance_name;
+};
+Result<DeviceName> BreakDeviceName(const std::string& device_name);
+
+/**
+ * Runs simple tests to see if it could potentially be a host artifacts dir
+ *
+ */
+bool PotentiallyHostArtifactsPath(const std::string& host_binaries_dir);
+
+/**
+ * simply returns:
+ *
+ * "Only up to n must match" or
+ * "Only up to n must match by field " + FieldName
+ *
+ */
+std::string GenerateTooManyInstancesErrorMsg(const int n,
+                                             const std::string& field_name);
+
+/**
+ * return all the elements in container that satisfies predicate.
+ *
+ * Container has Wrappers, where each Wrapper is typically,
+ * std::unique/shared_ptr of T, or some wrapper of T, etc. Set is a set of T.
+ *
+ * This method returns the Set of T, as long as its corresponding Wrapper in
+ * Container meets the predicate.
+ */
+template <typename T, typename Wrapper, typename Set, typename Container>
+Set Collect(const Container& container,
+            std::function<bool(const Wrapper&)> predicate,
+            std::function<T(const Wrapper&)> convert) {
+  Set output;
+  for (const auto& t : container) {
+    if (!predicate(t)) {
+      continue;
+    }
+    output.insert(convert(t));
+  }
+  return output;
+}
+
+/*
+ * Returns a Set of ConstRef<T>, which essentially satisfies "predicate"
+ *
+ * Container has a list/set of std::unique_ptr<T>. We collect all the
+ * const references of each object owned by Container, which meets the
+ * condition defined by predicate.
+ *
+ */
+template <typename T, typename Container>
+Set<ConstRef<T>> CollectToSet(
+    Container&& container,
+    std::function<bool(const std::unique_ptr<T>&)> predicate) {
+  auto convert = [](const std::unique_ptr<T>& uniq_ptr) {
+    return Cref(*uniq_ptr);
+  };
+  return Collect<ConstRef<T>, std::unique_ptr<T>, Set<ConstRef<T>>>(
+      std::forward<Container>(container), std::move(predicate),
+      std::move(convert));
+}
+
+/**
+ * Given:
+ *  Containers have a list of n `Container`s. Each Container may have
+ *  m Element. Each is stored as a unique_ptr.
+ *
+ * Goal:
+ *  To collect Elements from each Container with Container's APIs. The
+ *  collected Elements meet the condition implicitly defined in collector.
+ *
+ * E.g. InstanceDatabase has InstanceGroups, each has Instances. We want
+ * all the Instances its build-target was TV. Then, collector will look
+ * like this:
+ * [&build_target](const std::unique_ptr<InstanceGroup>& group) {
+ *   return group->FindByBuildTarget(build_target);
+ * }
+ *
+ * We take the union of all the returned subsets from each collector call.
+ */
+template <typename Element, typename Container, typename Containers>
+Result<Set<ConstRef<Element>>> CollectAllElements(
+    std::function<
+        Result<Set<ConstRef<Element>>>(const std::unique_ptr<Container>&)>
+        collector,
+    const Containers& outermost_container) {
+  Set<ConstRef<Element>> output;
+  for (const auto& container_ptr : outermost_container) {
+    auto subset = CF_EXPECT(collector(container_ptr));
+    output.insert(subset.cbegin(), subset.cend());
+  }
+  return {output};
+}
+
+template <typename S>
+Result<typename std::remove_reference<S>::type> AtMostOne(
+    S&& s, const std::string& err_msg) {
+  CF_EXPECT(AtMostN(std::forward<S>(s), 1), err_msg);
+  return {std::forward<S>(s)};
+}
+
+template <typename RetSet, typename AnyContainer>
+RetSet Intersection(const RetSet& u, AnyContainer&& v) {
+  RetSet result;
+  if (u.empty() || v.empty()) {
+    return result;
+  }
+  for (auto const& e : v) {
+    if (Contains(u, e)) {
+      result.insert(e);
+    }
+  }
+  return result;
+}
+
+template <typename RetSet, typename AnyContainer, typename... Containers>
+RetSet Intersection(const RetSet& u, AnyContainer&& v, Containers&&... s) {
+  RetSet first = Intersection(u, std::forward<AnyContainer>(v));
+  if (first.empty()) {
+    return first;
+  }
+  return Intersection(first, std::forward<Containers>(s)...);
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_group_record.cpp b/host/commands/cvd/selector/instance_group_record.cpp
new file mode 100644
index 0000000..5045944
--- /dev/null
+++ b/host/commands/cvd/selector/instance_group_record.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_group_record.h"
+
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+
+LocalInstanceGroup::LocalInstanceGroup(const InstanceGroupParam& param)
+    : home_dir_{param.home_dir},
+      host_artifacts_path_{param.host_artifacts_path},
+      product_out_path_{param.product_out_path},
+      internal_group_name_(GenInternalGroupName()),
+      group_name_(param.group_name) {}
+
+LocalInstanceGroup::LocalInstanceGroup(const LocalInstanceGroup& src)
+    : home_dir_{src.home_dir_},
+      host_artifacts_path_{src.host_artifacts_path_},
+      product_out_path_{src.product_out_path_},
+      internal_group_name_{src.internal_group_name_},
+      group_name_{src.group_name_},
+      build_id_{src.build_id_},
+      instances_{CopyInstances(src.instances_)} {}
+
+LocalInstanceGroup& LocalInstanceGroup::operator=(
+    const LocalInstanceGroup& src) {
+  if (this == std::addressof(src)) {
+    return *this;
+  }
+  home_dir_ = src.home_dir_;
+  host_artifacts_path_ = src.host_artifacts_path_;
+  product_out_path_ = src.product_out_path_;
+  internal_group_name_ = src.internal_group_name_;
+  group_name_ = src.group_name_;
+  build_id_ = src.build_id_;
+  instances_ = CopyInstances(src.instances_);
+  return *this;
+}
+
+Set<std::unique_ptr<LocalInstance>> LocalInstanceGroup::CopyInstances(
+    const Set<std::unique_ptr<LocalInstance>>& src_instances) {
+  Set<std::unique_ptr<LocalInstance>> copied;
+  // Due to the const reference to the parent, LocalInstanceGroup,
+  // the LocalInstance class does not have a copy constructor
+  for (const auto& src_instance : src_instances) {
+    LocalInstance* new_instance = new LocalInstance(
+        *this, src_instance->InstanceId(), src_instance->PerInstanceName());
+    copied.emplace(new_instance);
+  }
+  return copied;
+}
+
+Result<std::string> LocalInstanceGroup::GetCuttlefishConfigPath() const {
+  return ::cuttlefish::selector::GetCuttlefishConfigPath(HomeDir());
+}
+
+Result<void> LocalInstanceGroup::AddInstance(const unsigned instance_id,
+                                             const std::string& instance_name) {
+  if (HasInstance(instance_id)) {
+    return CF_ERR("Instance Id " << instance_id << " is taken");
+  }
+  LocalInstance* instance =
+      new LocalInstance(*this, instance_id, instance_name);
+  instances_.emplace(std::unique_ptr<LocalInstance>(instance));
+  return {};
+}
+
+Result<Set<ConstRef<LocalInstance>>> LocalInstanceGroup::FindById(
+    const unsigned id) const {
+  auto subset = CollectToSet<LocalInstance>(
+      instances_, [&id](const std::unique_ptr<LocalInstance>& instance) {
+        return instance && (instance->InstanceId() == id);
+      });
+  return AtMostOne(subset,
+                   GenerateTooManyInstancesErrorMsg(1, kInstanceIdField));
+}
+
+Result<Set<ConstRef<LocalInstance>>> LocalInstanceGroup::FindByInstanceName(
+    const std::string& instance_name) const {
+  auto subset = CollectToSet<LocalInstance>(
+      instances_,
+      [&instance_name](const std::unique_ptr<LocalInstance>& instance) {
+        return instance && (instance->PerInstanceName() == instance_name);
+      });
+
+  // note that inside a group, the instance name is unique. However,
+  // across groups, they can be multiple
+  return AtMostOne(subset,
+                   GenerateTooManyInstancesErrorMsg(1, kInstanceNameField));
+}
+
+Result<Set<ConstRef<LocalInstance>>> LocalInstanceGroup::FindAllInstances()
+    const {
+  auto subset = CollectToSet<LocalInstance>(
+      instances_, [](const std::unique_ptr<LocalInstance>& instance) {
+        if (instance) {
+          return true;
+        }
+        return false;
+      });
+  return subset;
+}
+
+bool LocalInstanceGroup::HasInstance(const unsigned instance_id) const {
+  for (const auto& instance : instances_) {
+    if (!instance) {
+      continue;
+    }
+    if (instance_id == instance->InstanceId()) {
+      return true;
+    }
+  }
+  return false;
+}
+
+void LocalInstanceGroup::SetBuildId(const std::string& build_id) {
+  build_id_ = build_id;
+}
+
+Json::Value LocalInstanceGroup::Serialize() const {
+  Json::Value group_json;
+  group_json[kJsonGroupName] = group_name_;
+  group_json[kJsonHomeDir] = home_dir_;
+  group_json[kJsonHostArtifactPath] = host_artifacts_path_;
+  group_json[kJsonProductOutPath] = product_out_path_;
+  auto build_id_opt = BuildId();
+  group_json[kJsonBuildId] = build_id_opt ? *build_id_opt : kJsonUnknownBuildId;
+  int i = 0;
+  Json::Value instances_array_json;
+  for (const auto& instance : instances_) {
+    Json::Value instance_json = Serialize(instance);
+    instance_json[kJsonParent] = group_name_;
+    instances_array_json[i] = instance_json;
+    i++;
+  }
+  group_json[kJsonInstances] = instances_array_json;
+  return group_json;
+}
+
+Json::Value LocalInstanceGroup::Serialize(
+    const std::unique_ptr<LocalInstance>& instance) const {
+  Json::Value instance_json;
+  if (!instance) {
+    return instance_json;
+  }
+  instance_json[LocalInstance::kJsonInstanceName] = instance->PerInstanceName();
+  instance_json[LocalInstance::kJsonInstanceId] =
+      std::to_string(instance->InstanceId());
+  return instance_json;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_group_record.h b/host/commands/cvd/selector/instance_group_record.h
new file mode 100644
index 0000000..a302092
--- /dev/null
+++ b/host/commands/cvd/selector/instance_group_record.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <string>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/json.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/constant_reference.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/instance_record.h"
+
+namespace cuttlefish {
+namespace selector {
+
+class InstanceDatabase;
+
+/**
+ * TODO(kwstephenkim): add more methods, fields, and abstract out Instance
+ *
+ * Needs design changes to support both Remote Instances
+ */
+class LocalInstanceGroup {
+  friend InstanceDatabase;
+
+ public:
+  LocalInstanceGroup(const LocalInstanceGroup& src);
+  LocalInstanceGroup& operator=(const LocalInstanceGroup& src);
+
+  const std::string& InternalGroupName() const { return internal_group_name_; }
+  const std::string& GroupName() const { return group_name_; }
+  const std::string& HomeDir() const { return home_dir_; }
+  const std::string& HostArtifactsPath() const { return host_artifacts_path_; }
+  const std::string& ProductOutPath() const { return product_out_path_; }
+  const std::optional<std::string>& BuildId() const { return build_id_; }
+  Result<std::string> GetCuttlefishConfigPath() const;
+  const Set<std::unique_ptr<LocalInstance>>& Instances() const {
+    return instances_;
+  }
+  Json::Value Serialize() const;
+
+  /**
+   * return error if instance id of instance is taken AND that taken id
+   * belongs to this group
+   */
+  Result<void> AddInstance(const unsigned instance_id,
+                           const std::string& instance_name);
+  bool HasInstance(const unsigned instance_id) const;
+  void SetBuildId(const std::string& build_id);
+  Result<Set<ConstRef<LocalInstance>>> FindById(const unsigned id) const;
+  /**
+   * Find by per-instance name.
+   *
+   * If the device name is cvd-foo or cvd-4, "cvd" is the group name,
+   * "foo" or "4" is the per-instance names, and "cvd-foo" or "cvd-4" is
+   * the device name.
+   */
+  Result<Set<ConstRef<LocalInstance>>> FindByInstanceName(
+      const std::string& instance_name) const;
+
+  // returns all instances in the dedicated data type
+  Result<Set<ConstRef<LocalInstance>>> FindAllInstances() const;
+
+ private:
+  struct InstanceGroupParam {
+    std::string group_name;
+    std::string home_dir;
+    std::string host_artifacts_path;
+    std::string product_out_path;
+  };
+  LocalInstanceGroup(const InstanceGroupParam& param);
+  // Eventually copies the instances of a src to *this
+  Set<std::unique_ptr<LocalInstance>> CopyInstances(
+      const Set<std::unique_ptr<LocalInstance>>& src_instances);
+  Json::Value Serialize(const std::unique_ptr<LocalInstance>& instance) const;
+
+  std::string home_dir_;
+  std::string host_artifacts_path_;
+  std::string product_out_path_;
+
+  // for now, "cvd", which is "cvd-".remove_suffix(1)
+  std::string internal_group_name_;
+  std::string group_name_;
+  // This will be initialized after the LocalInstanceGroup is created,
+  // which is also after the device completes the boot.
+  std::optional<std::string> build_id_;
+  Set<std::unique_ptr<LocalInstance>> instances_;
+
+  static constexpr const char kJsonGroupName[] = "Group Name";
+  static constexpr const char kJsonHomeDir[] = "Runtime/Home Dir";
+  static constexpr const char kJsonHostArtifactPath[] = "Host Tools Dir";
+  static constexpr const char kJsonProductOutPath[] = "Product Out Dir";
+  static constexpr const char kJsonInstances[] = "Instances";
+  static constexpr const char kJsonParent[] = "Parent Group";
+  static constexpr const char kJsonBuildId[] = "Build Id";
+  static constexpr const char kJsonUnknownBuildId[] = "Unknown Build";
+
+  /*
+   * Expose constructor to the tests in InstanceRecord unit test suite.
+   *
+   * To create InstanceRecords, we should create InstanceGroup first.
+   */
+  FRIEND_TEST(CvdInstanceRecordUnitTest, Fields);
+  FRIEND_TEST(CvdInstanceRecordUnitTest, Copy);
+
+  friend class CvdInstanceGroupUnitTest;
+  friend class CvdInstanceGroupSearchUnitTest;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_record.cpp b/host/commands/cvd/selector/instance_record.cpp
new file mode 100644
index 0000000..afe6ac4
--- /dev/null
+++ b/host/commands/cvd/selector/instance_record.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_record.h"
+
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+
+namespace cuttlefish {
+namespace selector {
+
+LocalInstance::LocalInstance(const LocalInstanceGroup& parent_group,
+                             const unsigned instance_id,
+                             const std::string& instance_name)
+    : parent_group_(parent_group),
+      instance_id_(instance_id),
+      internal_name_(std::to_string(instance_id_)),
+      per_instance_name_(instance_name) {}
+
+unsigned LocalInstance::InstanceId() const { return instance_id_; }
+
+std::string LocalInstance::InternalDeviceName() const {
+  return LocalDeviceNameRule(parent_group_.InternalGroupName(), internal_name_);
+}
+
+const std::string& LocalInstance::InternalName() const {
+  return internal_name_;
+}
+
+std::string LocalInstance::DeviceName() const {
+  return LocalDeviceNameRule(parent_group_.GroupName(), per_instance_name_);
+}
+
+const std::string& LocalInstance::PerInstanceName() const {
+  return per_instance_name_;
+}
+
+const LocalInstanceGroup& LocalInstance::ParentGroup() const {
+  return parent_group_;
+}
+
+LocalInstance::Copy LocalInstance::GetCopy() const {
+  Copy copy(*this);
+  return copy;
+}
+
+LocalInstance::Copy::Copy(const LocalInstance& src)
+    : internal_name_{src.InternalName()},
+      internal_device_name_{src.InternalDeviceName()},
+      instance_id_{src.InstanceId()},
+      per_instance_name_{src.PerInstanceName()},
+      device_name_{src.DeviceName()},
+      mock_group_{MockParentParam{
+          .home_dir = src.ParentGroup().HomeDir(),
+          .host_artifacts_path = src.ParentGroup().HostArtifactsPath(),
+          .internal_group_name = src.ParentGroup().InternalGroupName(),
+          .group_name = src.ParentGroup().GroupName(),
+          .build_id = src.ParentGroup().BuildId()}} {}
+
+LocalInstance::Copy::MockParent::MockParent(const MockParentParam& params)
+    : home_dir_{params.home_dir},
+      host_artifacts_path_{params.host_artifacts_path},
+      internal_group_name_{params.internal_group_name},
+      group_name_{params.group_name},
+      build_id_{params.build_id} {}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_record.h b/host/commands/cvd/selector/instance_record.h
new file mode 100644
index 0000000..f6b86b5
--- /dev/null
+++ b/host/commands/cvd/selector/instance_record.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+#include <string>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace selector {
+
+class LocalInstanceGroup;
+
+/**
+ * TODO(kwstephenkim): add more methods, fields, and abstract out Instance
+ *
+ * Needs design changes to support both Remote and Local Instances
+ */
+class LocalInstance {
+  friend class LocalInstanceGroup;
+  friend class InstanceDatabase;
+
+ public:
+  /* names:
+   *
+   * Many components in Cuttlefish traditionally expect the name to be "cvd-N,"
+   * and rely on "N" to avoid conflicts in the global resource uses.
+   *
+   * Thus, we will eventually maintain the internal device name for those
+   * existing cuttlefish implementation, and the user-given name.
+   *
+   */
+  const std::string& InternalName() const;
+  std::string InternalDeviceName() const;
+
+  unsigned InstanceId() const;
+  const std::string& PerInstanceName() const;
+  std::string DeviceName() const;
+
+  const LocalInstanceGroup& ParentGroup() const;
+
+  class Copy {
+    friend class LocalInstance;
+    struct MockParentParam {
+      std::string home_dir;
+      std::string host_artifacts_path;
+      std::string internal_group_name;
+      std::string group_name;
+      std::optional<std::string> build_id;
+    };
+
+   public:
+    /* when Copy is used, it is already disconnected from the original parent
+     * group. Thus, it should carry the snapshot of needed information about
+     * the parent group
+     */
+    class MockParent {
+     public:
+      MockParent(const MockParentParam&);
+      const std::string& InternalGroupName() const {
+        return internal_group_name_;
+      }
+      const std::string& GroupName() const { return group_name_; }
+      const std::string& HomeDir() const { return home_dir_; }
+      const std::string& HostArtifactsPath() const {
+        return host_artifacts_path_;
+      }
+      const std::optional<std::string>& BuildId() const { return build_id_; }
+
+     private:
+      std::string home_dir_;
+      std::string host_artifacts_path_;
+      std::string internal_group_name_;
+      std::string group_name_;
+      std::optional<std::string> build_id_;
+    };
+    Copy(const LocalInstance& src);
+    const std::string& InternalName() const { return internal_name_; }
+    const std::string& InternalDeviceName() const {
+      return internal_device_name_;
+    }
+    unsigned InstanceId() const { return instance_id_; }
+    const std::string& PerInstanceName() const { return per_instance_name_; }
+    const std::string& DeviceName() const { return device_name_; }
+    const MockParent& ParentGroup() const { return mock_group_; }
+
+   private:
+    std::string internal_name_;
+    std::string internal_device_name_;
+    unsigned instance_id_;
+    std::string per_instance_name_;
+    std::string device_name_;
+    MockParent mock_group_;
+  };
+  Copy GetCopy() const;
+
+ private:
+  LocalInstance(const LocalInstanceGroup& parent_group,
+                const unsigned instance_id, const std::string& instance_name);
+
+  static constexpr const char kJsonInstanceId[] = "Instance Id";
+  static constexpr const char kJsonInstanceName[] = "Per-Instance Name";
+
+  const LocalInstanceGroup& parent_group_;
+  unsigned instance_id_;
+  std::string internal_name_;  ///< for now, it is to_string(instance_id_)
+  /** the instance specific name to be appended to the group name
+   *
+   * by default, to_string(instance_id_). The default value is decided by
+   * InstanceGroupRecord, as that's the only class that will create this
+   * instance
+   */
+  std::string per_instance_name_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_selector.cpp b/host/commands/cvd/selector/instance_selector.cpp
new file mode 100644
index 0000000..38281d1
--- /dev/null
+++ b/host/commands/cvd/selector/instance_selector.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/instance_selector.h"
+
+#include <android-base/parseint.h>
+
+#include "host/commands/cvd/selector/device_selector_utils.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<InstanceSelector> InstanceSelector::GetSelector(
+    const cvd_common::Args& selector_args, const Queries& extra_queries,
+    const cvd_common::Envs& envs, const uid_t uid) {
+  cvd_common::Args selector_args_copied{selector_args};
+  SelectorCommonParser common_parser =
+      CF_EXPECT(SelectorCommonParser::Parse(uid, selector_args_copied, envs));
+  std::stringstream unused_args;
+  unused_args << "{";
+  for (const auto& arg : selector_args_copied) {
+    unused_args << arg << ", ";
+  }
+  std::string unused_arg_list = unused_args.str();
+  if (!selector_args_copied.empty()) {
+    unused_arg_list.pop_back();
+    unused_arg_list.pop_back();
+  }
+  unused_arg_list.append("}");
+  if (!selector_args_copied.empty()) {
+    LOG(ERROR) << "Warning: there are unused selector options. "
+               << unused_arg_list;
+  }
+
+  // search by instance and instances
+  // search by HOME if overridden
+  Queries queries;
+  if (IsHomeOverridden(common_parser)) {
+    CF_EXPECT(common_parser.Home());
+    queries.emplace_back(kHomeField, common_parser.Home().value());
+  }
+  if (common_parser.GroupName()) {
+    queries.emplace_back(kGroupNameField, common_parser.GroupName().value());
+  }
+  if (common_parser.PerInstanceNames()) {
+    const auto per_instance_names = common_parser.PerInstanceNames().value();
+    CF_EXPECT_LE(per_instance_names.size(), 1,
+                 "Instance Selector only picks up to 1 instance and thus "
+                 "only take up to 1 instance_name");
+    if (!per_instance_names.empty()) {
+      queries.emplace_back(kInstanceNameField, per_instance_names.front());
+    }
+  }
+  if (Contains(envs, kCuttlefishInstanceEnvVarName)) {
+    int id;
+    const std::string instance_id_str = envs.at(kCuttlefishInstanceEnvVarName);
+    if (android::base::ParseInt(instance_id_str, std::addressof(id))) {
+      queries.emplace_back(kInstanceIdField, std::to_string(id));
+    } else {
+      LOG(ERROR) << kCuttlefishInstanceEnvVarName << "=" << id
+                 << " was given but it must have one valid instance ID.";
+    }
+  }
+
+  for (const auto& extra_query : extra_queries) {
+    queries.push_back(extra_query);
+  }
+
+  InstanceSelector instance_selector(uid, queries);
+  return instance_selector;
+}
+
+bool InstanceSelector::IsHomeOverridden(
+    const SelectorCommonParser& common_parser) {
+  auto home_overridden_result = common_parser.HomeOverridden();
+  if (!home_overridden_result.ok()) {
+    return false;
+  }
+  return *home_overridden_result;
+}
+
+Result<LocalInstance::Copy> InstanceSelector::FindInstance(
+    const InstanceDatabase& instance_database) {
+  if (queries_.empty()) {
+    auto default_instance = CF_EXPECT(FindDefaultInstance(instance_database));
+    return default_instance;
+  }
+
+  auto instances = CF_EXPECT(instance_database.FindInstances(queries_));
+  CF_EXPECT(instances.size() == 1, "instances.size() = " << instances.size());
+  auto& instance = *(instances.cbegin());
+  return instance.Get().GetCopy();
+}
+
+Result<LocalInstance::Copy> InstanceSelector::FindDefaultInstance(
+    const InstanceDatabase& instance_database) {
+  auto group = CF_EXPECT(GetDefaultGroup(instance_database, client_uid_));
+  const auto instances = CF_EXPECT(group.FindAllInstances());
+  CF_EXPECT_EQ(instances.size(), 1,
+               "Default instance is the single instance in the default group.");
+  return instances.cbegin()->Get().GetCopy();
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/instance_selector.h b/host/commands/cvd/selector/instance_selector.h
new file mode 100644
index 0000000..d55c947
--- /dev/null
+++ b/host/commands/cvd/selector/instance_selector.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/instance_database.h"
+#include "host/commands/cvd/selector/selector_common_parser.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+class InstanceSelector {
+ public:
+  static Result<InstanceSelector> GetSelector(
+      const cvd_common::Args& selector_args, const Queries& extra_queries,
+      const cvd_common::Envs& envs, const uid_t uid);
+  /*
+   * If default, try running single instance group. If multiple, try to find
+   * HOME == SystemWideUserHome. If not exists, give up.
+   *
+   * If group given, find group, and check if all instance names are included
+   *
+   * If group not given, not yet supported. Will be in next CLs
+   */
+  Result<LocalInstance::Copy> FindInstance(
+      const InstanceDatabase& instance_database);
+
+ private:
+  InstanceSelector(const uid_t uid, const Queries& queries)
+      : client_uid_{uid}, queries_(queries) {}
+  static bool IsHomeOverridden(const SelectorCommonParser& common_parser);
+
+  Result<LocalInstance::Copy> FindDefaultInstance(
+      const InstanceDatabase& instance_database);
+  bool HasCuttlefishInstance() const;
+
+  const uid_t client_uid_;
+  const Queries queries_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_common_parser.cpp b/host/commands/cvd/selector/selector_common_parser.cpp
new file mode 100644
index 0000000..53030fa
--- /dev/null
+++ b/host/commands/cvd/selector/selector_common_parser.cpp
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/selector_common_parser.h"
+
+#include <unistd.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/selector/selector_option_parser_utils.h"
+
+namespace cuttlefish {
+namespace selector {
+
+Result<SelectorCommonParser> SelectorCommonParser::Parse(
+    const uid_t client_uid, cvd_common::Args& selector_args,
+    const cvd_common::Envs& envs) {
+  std::string system_wide_home = CF_EXPECT(SystemWideUserHome(client_uid));
+  SelectorCommonParser parser(system_wide_home, envs);
+  CF_EXPECT(parser.ParseOptions(selector_args));
+  return std::move(parser);
+}
+
+SelectorCommonParser::SelectorCommonParser(const std::string& client_user_home,
+                                           const cvd_common::Envs& envs)
+    : client_user_home_(client_user_home), envs_{envs} {}
+
+Result<bool> SelectorCommonParser::HomeOverridden() const {
+  return Contains(envs_, "HOME") && (client_user_home_ != envs_.at("HOME"));
+}
+
+std::optional<std::string> SelectorCommonParser::Home() const {
+  if (Contains(envs_, "HOME")) {
+    return envs_.at("HOME");
+  }
+  return std::nullopt;
+}
+
+Result<void> SelectorCommonParser::ParseOptions(
+    cvd_common::Args& selector_args) {
+  // Handling name-related options
+  auto group_name_flag =
+      CF_EXPECT(SelectorFlags::Get().GetFlag(SelectorFlags::kGroupName));
+  auto instance_name_flag =
+      CF_EXPECT(SelectorFlags::Get().GetFlag(SelectorFlags::kInstanceName));
+  std::optional<std::string> group_name_opt =
+      CF_EXPECT(group_name_flag.FilterFlag<std::string>(selector_args));
+  std::optional<std::string> instance_name_opt =
+      CF_EXPECT(instance_name_flag.FilterFlag<std::string>(selector_args));
+
+  NameFlagsParam name_flags_param{.group_name = group_name_opt,
+                                  .instance_names = instance_name_opt};
+  auto parsed_name_flags = CF_EXPECT(HandleNameOpts(name_flags_param));
+  group_name_ = parsed_name_flags.group_name;
+  instance_names_ = parsed_name_flags.instance_names;
+  return {};
+}
+
+Result<SelectorCommonParser::ParsedNameFlags>
+SelectorCommonParser::HandleNameOpts(const NameFlagsParam& name_flags) const {
+  std::optional<std::string> group_name_output;
+  std::optional<std::vector<std::string>> instance_names_output;
+  if (name_flags.group_name) {
+    group_name_output = CF_EXPECT(HandleGroupName(name_flags.group_name));
+  }
+
+  if (name_flags.instance_names) {
+    instance_names_output =
+        std::move(CF_EXPECT(HandleInstanceNames(name_flags.instance_names)));
+  }
+  return {ParsedNameFlags{.group_name = std::move(group_name_output),
+                          .instance_names = std::move(instance_names_output)}};
+}
+
+Result<std::vector<std::string>> SelectorCommonParser::HandleInstanceNames(
+    const std::optional<std::string>& per_instance_names) const {
+  CF_EXPECT(per_instance_names && !per_instance_names.value().empty());
+
+  auto instance_names =
+      CF_EXPECT(SeparateButWithNoEmptyToken(per_instance_names.value(), ","));
+  for (const auto& instance_name : instance_names) {
+    CF_EXPECT(IsValidInstanceName(instance_name));
+  }
+  std::unordered_set<std::string> duplication_check{instance_names.cbegin(),
+                                                    instance_names.cend()};
+  CF_EXPECT(duplication_check.size() == instance_names.size());
+  return instance_names;
+}
+
+Result<std::string> SelectorCommonParser::HandleGroupName(
+    const std::optional<std::string>& group_name) const {
+  CF_EXPECT(group_name && !group_name.value().empty());
+  CF_EXPECT(IsValidGroupName(group_name.value()), group_name.value()
+                                                      << " failed");
+  return {group_name.value()};
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_common_parser.h b/host/commands/cvd/selector/selector_common_parser.h
new file mode 100644
index 0000000..14d2555
--- /dev/null
+++ b/host/commands/cvd/selector/selector_common_parser.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+class SelectorCommonParser {
+ public:
+  // parses common selector options, and drop the used selector_args
+  static Result<SelectorCommonParser> Parse(const uid_t client_uid,
+                                            cvd_common::Args& selector_args,
+                                            const cvd_common::Envs& envs);
+
+  std::optional<std::string> GroupName() const { return group_name_; }
+
+  std::optional<std::vector<std::string>> PerInstanceNames() const {
+    return instance_names_;
+  }
+
+  // CF_ERR --> unknown, true --> overridden, false --> not overridden.
+  Result<bool> HomeOverridden() const;
+  std::optional<std::string> Home() const;
+
+  /*
+   * returns if selector flags has device select options: e.g. --group_name
+   *
+   * this is mainly to see if cvd start is about the default instance.
+   */
+  bool HasDeviceSelectOption() const { return group_name_ || instance_names_; }
+
+ private:
+  SelectorCommonParser(const std::string& client_user_home,
+                       const cvd_common::Envs& envs);
+
+  Result<void> ParseOptions(cvd_common::Args& selector_args);
+  struct ParsedNameFlags {
+    std::optional<std::string> group_name;
+    std::optional<std::vector<std::string>> instance_names;
+  };
+  struct NameFlagsParam {
+    std::optional<std::string> group_name;
+    std::optional<std::string> instance_names;
+  };
+  Result<ParsedNameFlags> HandleNameOpts(
+      const NameFlagsParam& name_flags) const;
+  Result<std::string> HandleGroupName(
+      const std::optional<std::string>& group_name) const;
+  Result<std::vector<std::string>> HandleInstanceNames(
+      const std::optional<std::string>& per_instance_names) const;
+
+  // temporarily keeps the leftover of the input cmd_args
+  // Will be never used after parsing is done
+  std::string client_user_home_;
+  const cvd_common::Envs& envs_;
+
+  // processed result
+  std::optional<std::string> group_name_;
+  std::optional<std::vector<std::string>> instance_names_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_constants.cpp b/host/commands/cvd/selector/selector_constants.cpp
new file mode 100644
index 0000000..4ca4a96
--- /dev/null
+++ b/host/commands/cvd/selector/selector_constants.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/selector_constants.h"
+
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <deque>
+#include <sstream>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/users.h"
+
+namespace cuttlefish {
+namespace selector {
+
+enum class OwnershipType { kUser, kGroup, kOthers };
+
+static OwnershipType GetOwnershipType(const struct stat& file_stat,
+                                      const uid_t uid, const gid_t gid) {
+  if (file_stat.st_uid == uid) {
+    return OwnershipType::kUser;
+  }
+  if (file_stat.st_gid == gid) {
+    return OwnershipType::kGroup;
+  }
+  return OwnershipType::kOthers;
+}
+
+struct RequirePermission {
+  const bool needs_read_permission;
+  const bool needs_write_permission;
+  const bool needs_exec_permission;
+};
+
+static Result<void> CheckPermission(const OwnershipType ownership_type,
+                                    const struct stat& file_stat,
+                                    const RequirePermission& perm) {
+  const auto perm_bits = file_stat.st_mode;
+
+  switch (ownership_type) {
+    case OwnershipType::kUser: {
+      CF_EXPECT(!perm.needs_read_permission || (perm_bits & S_IRUSR));
+      CF_EXPECT(!perm.needs_write_permission || (perm_bits & S_IWUSR));
+      CF_EXPECT(!perm.needs_exec_permission || (perm_bits & S_IXUSR));
+      return {};
+    }
+    case OwnershipType::kGroup: {
+      CF_EXPECT(!perm.needs_read_permission || (perm_bits & S_IRGRP));
+      CF_EXPECT(!perm.needs_write_permission || (perm_bits & S_IWGRP));
+      CF_EXPECT(!perm.needs_exec_permission || (perm_bits & S_IXGRP));
+      return {};
+    }
+    case OwnershipType::kOthers:
+      break;
+  }
+  CF_EXPECT(!perm.needs_read_permission || (perm_bits & S_IROTH));
+  CF_EXPECT(!perm.needs_write_permission || (perm_bits & S_IWOTH));
+  CF_EXPECT(!perm.needs_exec_permission || (perm_bits & S_IXOTH));
+  return {};
+}
+
+static Result<void> CheckPermission(const std::string& dir,
+                                    const uid_t client_uid,
+                                    const gid_t client_gid) {
+  CF_EXPECT(!dir.empty() && DirectoryExists(dir));
+  struct stat dir_stat;
+  CF_EXPECT_EQ(stat(dir.c_str(), std::addressof(dir_stat)), 0);
+
+  const auto server_ownership = GetOwnershipType(dir_stat, getuid(), getgid());
+  CF_EXPECT(CheckPermission(server_ownership, dir_stat,
+                            RequirePermission{.needs_read_permission = true,
+                                              .needs_write_permission = true,
+                                              .needs_exec_permission = true}));
+  const auto client_ownership =
+      GetOwnershipType(dir_stat, client_uid, client_gid);
+  CF_EXPECT(CheckPermission(client_ownership, dir_stat,
+                            RequirePermission{.needs_read_permission = true,
+                                              .needs_write_permission = true,
+                                              .needs_exec_permission = true}));
+  return {};
+}
+
+Result<std::string> ParentOfAutogeneratedHomes(const uid_t client_uid,
+                                               const gid_t client_gid) {
+  std::deque<std::string> try_dirs = {
+      StringFromEnv("TMPDIR", ""),
+      StringFromEnv("TEMP", ""),
+      StringFromEnv("TMP", ""),
+      "/tmp",
+      "/var/tmp",
+      "/usr/tmp",
+  };
+
+  auto system_wide_home = SystemWideUserHome(client_uid);
+  if (system_wide_home.ok()) {
+    try_dirs.emplace_back(*system_wide_home);
+  }
+  try_dirs.emplace_back(AbsolutePath("."));
+  while (!try_dirs.empty()) {
+    const auto candidate = std::move(try_dirs.front());
+    try_dirs.pop_front();
+    if (candidate.empty() || !EnsureDirectoryExists(candidate).ok()) {
+      continue;
+    }
+    CF_EXPECT(CheckPermission(candidate, client_uid, client_gid));
+    return AbsolutePath(candidate);
+  }
+  return CF_ERR("Tried all candidate directories but none was read-writable.");
+}
+
+CvdFlag<std::string> SelectorFlags::GroupNameFlag(const std::string& name) {
+  CvdFlag<std::string> group_name{name};
+  std::stringstream group_name_help;
+  group_name_help << "--" << name << "=<"
+                  << "name of the instance group>";
+  group_name.SetHelpMessage(group_name_help.str());
+  return group_name;
+}
+
+CvdFlag<std::string> SelectorFlags::InstanceNameFlag(const std::string& name) {
+  CvdFlag<std::string> instance_name{name};
+  std::stringstream instance_name_help;
+  instance_name_help << "--" << name << "=<"
+                     << "comma-separated names of the instances>";
+  instance_name.SetHelpMessage(instance_name_help.str());
+  return instance_name;
+}
+
+CvdFlag<bool> SelectorFlags::DisableDefaultGroupFlag(const std::string& name,
+                                                     const bool default_val) {
+  CvdFlag<bool> disable_default_group(name, default_val);
+  std::stringstream help;
+  help << "--" << name << "=true not to create the default instance group.";
+  disable_default_group.SetHelpMessage(help.str());
+  return disable_default_group;
+}
+
+CvdFlag<bool> SelectorFlags::AcquireFileLockFlag(const std::string& name,
+                                                 const bool default_val) {
+  CvdFlag<bool> acquire_file_lock(name, default_val);
+  std::stringstream help;
+  help << "--" << name
+       << "=false for cvd server not to acquire lock file locks.";
+  acquire_file_lock.SetHelpMessage(help.str());
+  return acquire_file_lock;
+}
+
+const SelectorFlags& SelectorFlags::Get() {
+  static SelectorFlags singleton_selector_flags;
+  return singleton_selector_flags;
+}
+
+const SelectorFlags SelectorFlags::New() {
+  SelectorFlags selector_flags;
+  return selector_flags;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_constants.h b/host/commands/cvd/selector/selector_constants.h
new file mode 100644
index 0000000..9fee988
--- /dev/null
+++ b/host/commands/cvd/selector/selector_constants.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <string>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/flag.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/** The direct parent of auto-generated runtime directories, which
+ * is recommended to be short.
+ *
+ * Try these one by one in order, and append /.cf
+ *
+ * 1. $TMPDIR
+ * 2. $TEMP
+ * 3. $TMP
+ * 4. /tmp
+ * 5. /var/tmp
+ * 6. /usr/tmp
+ * 7. HOME of uid
+ *
+ */
+Result<std::string> ParentOfAutogeneratedHomes(const uid_t client_uid,
+                                               const gid_t client_gid);
+
+/*
+ * These are fields in instance database
+ *
+ */
+constexpr char kGroupNameField[] = "group_name";
+constexpr char kHomeField[] = "home";
+constexpr char kInstanceIdField[] = "instance_id";
+/* per_instance_name
+ *
+ * by default, to_string(instance_id), and users can override it
+ */
+constexpr char kInstanceNameField[] = "instance_name";
+
+/**
+ * The authentic collection of selector flags
+ *
+ */
+// names of the flags, which are also used for search
+
+class SelectorFlags {
+ public:
+  static constexpr char kGroupName[] = "group_name";
+  static constexpr char kInstanceName[] = "instance_name";
+  static constexpr char kAcquireFileLock[] = "acquire_file_lock";
+  static constexpr char kAcquireFileLockEnv[] = "CVD_ACQUIRE_FILE_LOCK";
+  static constexpr char kDisableDefaultGroup[] = "disable_default_group";
+  static const SelectorFlags& Get();
+  static const SelectorFlags New();
+
+  Result<CvdFlagProxy> GetFlag(const std::string& search_key) const {
+    auto flag = CF_EXPECT(flags_.GetFlag(search_key));
+    return flag;
+  }
+
+  std::vector<CvdFlagProxy> Flags() const { return flags_.Flags(); }
+  const auto& FlagsAsCollection() const { return flags_; }
+
+ private:
+  SelectorFlags() {
+    flags_.EnrollFlag(GroupNameFlag(kGroupName));
+    flags_.EnrollFlag(InstanceNameFlag(kInstanceName));
+    flags_.EnrollFlag(DisableDefaultGroupFlag(kDisableDefaultGroup, false));
+    flags_.EnrollFlag(AcquireFileLockFlag(kAcquireFileLock, true));
+  }
+
+  CvdFlag<std::string> GroupNameFlag(const std::string& name);
+  CvdFlag<std::string> InstanceNameFlag(const std::string& name);
+  CvdFlag<bool> DisableDefaultGroupFlag(const std::string& name,
+                                        const bool default_val);
+  CvdFlag<bool> AcquireFileLockFlag(const std::string& name,
+                                    const bool default_val);
+
+  FlagCollection flags_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_option_parser_utils.cpp b/host/commands/cvd/selector/selector_option_parser_utils.cpp
new file mode 100644
index 0000000..7eda290
--- /dev/null
+++ b/host/commands/cvd/selector/selector_option_parser_utils.cpp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/selector_option_parser_utils.h"
+
+#include <android-base/strings.h>
+
+namespace cuttlefish {
+namespace selector {
+
+Result<std::vector<std::string>> SeparateButWithNoEmptyToken(
+    const std::string& input, const std::string& delimiter) {
+  auto tokens = android::base::Split(input, delimiter);
+  for (const auto& t : tokens) {
+    CF_EXPECT(!t.empty());
+  }
+  return tokens;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/selector_option_parser_utils.h b/host/commands/cvd/selector/selector_option_parser_utils.h
new file mode 100644
index 0000000..32eb9be
--- /dev/null
+++ b/host/commands/cvd/selector/selector_option_parser_utils.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/*
+ * @return any parsing successfully and actually happened
+ */
+template <typename T>
+Result<void> FilterSelectorFlag(std::vector<std::string>& args,
+                                const std::string& flag_name,
+                                std::optional<T>& value_opt) {
+  value_opt = std::nullopt;
+  const int args_initial_size = args.size();
+  if (args_initial_size == 0) {
+    return {};
+  }
+
+  T value;
+  CF_EXPECT(ParseFlags({GflagsCompatFlag(flag_name, value)}, args),
+            "Failed to parse --" << flag_name);
+  if (args.size() == args_initial_size) {
+    // not consumed
+    return {};
+  }
+  value_opt = value;
+  return {};
+}
+
+/*
+ * android::base::Split by delimeter but returns CF_ERR if any split token is
+ * empty
+ */
+Result<std::vector<std::string>> SeparateButWithNoEmptyToken(
+    const std::string& input, const std::string& delimiter);
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/start_selector_parser.cpp b/host/commands/cvd/selector/start_selector_parser.cpp
new file mode 100644
index 0000000..4a6e6ad
--- /dev/null
+++ b/host/commands/cvd/selector/start_selector_parser.cpp
@@ -0,0 +1,454 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/selector/start_selector_parser.h"
+
+#include <unistd.h>
+
+#include <iostream>
+#include <sstream>
+#include <string_view>
+
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/selector/selector_option_parser_utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/instance_nums.h"
+
+namespace std {
+
+/* For a needed CF_EXPECT_EQ(vector, vector, msg) below
+ *
+ * the result.h included above requires this operator. The declaration must come
+ * before the header file if the operator<< is in cuttlefish namespace.
+ * Otherwise, operator<< should be in std.
+ *
+ * The namespace resolution rule is to search cuttlefish, and then std if
+ * failed.
+ */
+static inline std::ostream& operator<<(std::ostream& out,
+                                       const std::vector<std::string>& v) {
+  if (v.empty()) {
+    out << "{}";
+    return out;
+  }
+  out << "{";
+  if (v.size() > 1) {
+    for (auto itr = v.cbegin(); itr != v.cend() - 1; itr++) {
+      out << *itr << ", ";
+    }
+  }
+  out << v.back() << "}";
+  return out;
+}
+
+}  // namespace std
+
+namespace cuttlefish {
+namespace selector {
+
+static bool Unique(const std::vector<unsigned>& v) {
+  std::unordered_set<unsigned> hash_set(v.begin(), v.end());
+  return v.size() == hash_set.size();
+}
+
+static Result<unsigned> ParseNaturalNumber(const std::string& token) {
+  std::int32_t value;
+  CF_EXPECT(android::base::ParseInt(token, &value));
+  CF_EXPECT(value > 0);
+  return static_cast<unsigned>(value);
+}
+
+Result<StartSelectorParser> StartSelectorParser::ConductSelectFlagsParser(
+    const uid_t uid, const cvd_common::Args& selector_args,
+    const cvd_common::Args& cmd_args, const cvd_common::Envs& envs) {
+  const std::string system_wide_home = CF_EXPECT(SystemWideUserHome(uid));
+  cvd_common::Args selector_args_copied{selector_args};
+  StartSelectorParser parser(
+      system_wide_home, selector_args_copied, cmd_args, envs,
+      CF_EXPECT(SelectorCommonParser::Parse(uid, selector_args_copied, envs)));
+  CF_EXPECT(parser.ParseOptions(), "selector option flag parsing failed.");
+  return {std::move(parser)};
+}
+
+StartSelectorParser::StartSelectorParser(
+    const std::string& system_wide_user_home,
+    const cvd_common::Args& selector_args, const cvd_common::Args& cmd_args,
+    const cvd_common::Envs& envs, SelectorCommonParser&& common_parser)
+    : client_user_home_{system_wide_user_home},
+      selector_args_(selector_args),
+      cmd_args_(cmd_args),
+      envs_(envs),
+      common_parser_(std::move(common_parser)) {}
+
+std::optional<std::string> StartSelectorParser::GroupName() const {
+  return group_name_;
+}
+
+std::optional<std::vector<std::string>> StartSelectorParser::PerInstanceNames()
+    const {
+  return per_instance_names_;
+}
+
+namespace {
+
+std::optional<unsigned> TryFromCuttlefishInstance(
+    const cvd_common::Envs& envs) {
+  if (!Contains(envs, kCuttlefishInstanceEnvVarName)) {
+    return std::nullopt;
+  }
+  const auto cuttlefish_instance = envs.at(kCuttlefishInstanceEnvVarName);
+  if (cuttlefish_instance.empty()) {
+    return std::nullopt;
+  }
+  auto parsed = ParseNaturalNumber(cuttlefish_instance);
+  return parsed.ok() ? std::optional(*parsed) : std::nullopt;
+}
+
+std::optional<unsigned> TryFromUser(const cvd_common::Envs& envs) {
+  if (!Contains(envs, "USER")) {
+    return std::nullopt;
+  }
+  std::string_view user{envs.at("USER")};
+  if (user.empty() || !android::base::ConsumePrefix(&user, kVsocUserPrefix)) {
+    return std::nullopt;
+  }
+  const auto& vsoc_num = user;
+  auto vsoc_id = ParseNaturalNumber(vsoc_num.data());
+  return vsoc_id.ok() ? std::optional(*vsoc_id) : std::nullopt;
+}
+
+}  // namespace
+
+std::optional<std::vector<unsigned>>
+StartSelectorParser::InstanceFromEnvironment(
+    const InstanceFromEnvParam& params) {
+  const auto& cuttlefish_instance_env = params.cuttlefish_instance_env;
+  const auto& vsoc_suffix = params.vsoc_suffix;
+  const auto& num_instances = params.num_instances;
+
+  // see the logic in cuttlefish::InstanceFromEnvironment()
+  // defined in host/libs/config/cuttlefish_config.cpp
+  std::vector<unsigned> nums;
+  std::optional<unsigned> base;
+  if (cuttlefish_instance_env) {
+    base = *cuttlefish_instance_env;
+  }
+  if (!base && vsoc_suffix) {
+    base = *vsoc_suffix;
+  }
+  if (!base) {
+    return std::nullopt;
+  }
+  // this is guaranteed by the caller
+  // assert(num_instances != std::nullopt);
+  for (unsigned i = 0; i != *num_instances; i++) {
+    nums.emplace_back(base.value() + i);
+  }
+  return nums;
+}
+
+Result<unsigned> StartSelectorParser::VerifyNumOfInstances(
+    const VerifyNumOfInstancesParam& params,
+    const unsigned default_n_instances) const {
+  const auto& num_instances_flag = params.num_instances_flag;
+  const auto& instance_names = params.instance_names;
+  const auto& instance_nums_flag = params.instance_nums_flag;
+
+  std::optional<unsigned> num_instances;
+  if (num_instances_flag) {
+    num_instances = CF_EXPECT(ParseNaturalNumber(*num_instances_flag));
+  }
+  if (instance_names && !instance_names->empty()) {
+    auto implied_n_instances = instance_names->size();
+    if (num_instances) {
+      CF_EXPECT_EQ(*num_instances, static_cast<unsigned>(implied_n_instances),
+                   "The number of instances requested by --num_instances "
+                       << " are not the same as what is implied by "
+                       << " --instance_name.");
+    }
+    num_instances = implied_n_instances;
+  }
+  if (instance_nums_flag) {
+    std::vector<std::string> tokens =
+        android::base::Split(*instance_nums_flag, ",");
+    for (const auto& t : tokens) {
+      CF_EXPECT(ParseNaturalNumber(t), t << " must be a natural number");
+    }
+    if (!num_instances) {
+      num_instances = tokens.size();
+    }
+    CF_EXPECT_EQ(*num_instances, tokens.size(),
+                 "All information for the number of instances must match.");
+  }
+  return num_instances.value_or(default_n_instances);
+}
+
+static Result<std::vector<unsigned>> ParseInstanceNums(
+    const std::string& instance_nums_flag) {
+  std::vector<unsigned> nums;
+  std::vector<std::string> tokens =
+      android::base::Split(instance_nums_flag, ",");
+  for (const auto& t : tokens) {
+    unsigned num =
+        CF_EXPECT(ParseNaturalNumber(t), t << " must be a natural number");
+    nums.emplace_back(num);
+  }
+  CF_EXPECT(Unique(nums), "--instance_nums include duplicated numbers");
+  return nums;
+}
+
+Result<StartSelectorParser::ParsedInstanceIdsOpt>
+StartSelectorParser::HandleInstanceIds(
+    const InstanceIdsParams& instance_id_params) {
+  const auto& instance_nums = instance_id_params.instance_nums;
+  const auto& base_instance_num = instance_id_params.base_instance_num;
+  const auto& cuttlefish_instance_env =
+      instance_id_params.cuttlefish_instance_env;
+  const auto& vsoc_suffix = instance_id_params.vsoc_suffix;
+
+  // calculate and/or verify the number of instances
+  unsigned num_instances =
+      CF_EXPECT(VerifyNumOfInstances(VerifyNumOfInstancesParam{
+          .num_instances_flag = instance_id_params.num_instances,
+          .instance_names = PerInstanceNames(),
+          .instance_nums_flag = instance_nums}));
+
+  if (!instance_nums && !base_instance_num) {
+    // num_instances is given. if non-std::nullopt is returned,
+    // the base is also figured out. If base can't be figured out,
+    // std::nullopt is returned.
+    auto instance_ids = InstanceFromEnvironment(
+        {.cuttlefish_instance_env = cuttlefish_instance_env,
+         .vsoc_suffix = vsoc_suffix,
+         .num_instances = num_instances});
+    if (instance_ids) {
+      return ParsedInstanceIdsOpt(*instance_ids);
+    }
+    // the return value, n_instances is the "desired/requested" instances
+    // When instance_ids set isn't figured out, n_instances is not meant to
+    // be always zero; it could be any natural number.
+    return ParsedInstanceIdsOpt(num_instances);
+  }
+
+  InstanceNumsCalculator calculator;
+  calculator.NumInstances(static_cast<std::int32_t>(num_instances));
+  if (instance_nums) {
+    CF_EXPECT(base_instance_num == std::nullopt,
+              "-base_instance_num and -instance_nums are mutually exclusive.");
+    std::vector<unsigned> parsed_nums =
+        CF_EXPECT(ParseInstanceNums(*instance_nums));
+    return ParsedInstanceIdsOpt(parsed_nums);
+  }
+  if (base_instance_num) {
+    unsigned base = CF_EXPECT(ParseNaturalNumber(*base_instance_num));
+    calculator.BaseInstanceNum(static_cast<std::int32_t>(base));
+  }
+  auto instance_ids = std::move(CF_EXPECT(calculator.CalculateFromFlags()));
+  CF_EXPECT(!instance_ids.empty(),
+            "CalculateFromFlags() must be called when --num_instances or "
+                << "--base_instance_num is given, and must not return an "
+                << "empty set");
+  auto instance_ids_vector =
+      std::vector<unsigned>{instance_ids.begin(), instance_ids.end()};
+  return ParsedInstanceIdsOpt{instance_ids_vector};
+}
+
+Result<bool> StartSelectorParser::CalcMayBeDefaultGroup() {
+  auto disable_default_group_flag = CF_EXPECT(
+      SelectorFlags::Get().GetFlag(SelectorFlags::kDisableDefaultGroup));
+  if (CF_EXPECT(
+          disable_default_group_flag.CalculateFlag<bool>(selector_args_))) {
+    return false;
+  }
+  /*
+   * --disable_default_group instructs that the default group
+   * should be disabled anyway. If not given, the logic to determine
+   * whether this group is the default one or not is:
+   *  If HOME is not overridden and no selector options, then
+   *   the default group
+   *  Or, not a default group
+   *
+   */
+  if (CF_EXPECT(common_parser_.HomeOverridden())) {
+    return false;
+  }
+  return !common_parser_.HasDeviceSelectOption();
+}
+
+static bool IsTrue(const std::string& value) {
+  std::unordered_set<std::string> true_strings = {"y", "yes", "true"};
+  std::string value_in_lower_case = value;
+  /*
+   * https://en.cppreference.com/w/cpp/string/byte/tolower
+   *
+   * char should be converted to unsigned char first.
+   */
+  std::transform(value_in_lower_case.begin(), value_in_lower_case.end(),
+                 value_in_lower_case.begin(),
+                 [](unsigned char c) { return std::tolower(c); });
+  return Contains(true_strings, value_in_lower_case);
+}
+
+static bool IsFalse(const std::string& value) {
+  std::unordered_set<std::string> false_strings = {"n", "no", "false"};
+  std::string value_in_lower_case = value;
+  /*
+   * https://en.cppreference.com/w/cpp/string/byte/tolower
+   *
+   * char should be converted to unsigned char first.
+   */
+  std::transform(value_in_lower_case.begin(), value_in_lower_case.end(),
+                 value_in_lower_case.begin(),
+                 [](unsigned char c) { return std::tolower(c); });
+  return Contains(false_strings, value_in_lower_case);
+}
+
+static std::optional<std::string> GetAcquireFileLockEnvValue(
+    const cvd_common::Envs& envs) {
+  if (!Contains(envs, SelectorFlags::kAcquireFileLockEnv)) {
+    return std::nullopt;
+  }
+  auto env_value = envs.at(SelectorFlags::kAcquireFileLockEnv);
+  if (env_value.empty()) {
+    return std::nullopt;
+  }
+  return env_value;
+}
+
+Result<bool> StartSelectorParser::CalcAcquireFileLock() {
+  // if the flag is set, flag has the highest priority
+  auto must_acquire_file_lock_flag =
+      CF_EXPECT(SelectorFlags::Get().GetFlag(SelectorFlags::kAcquireFileLock));
+  std::optional<bool> value_opt =
+      CF_EXPECT(must_acquire_file_lock_flag.FilterFlag<bool>(selector_args_));
+  if (value_opt) {
+    return *value_opt;
+  }
+  // flag is not set. see if there is the environment variable set
+  auto env_value_opt = GetAcquireFileLockEnvValue(envs_);
+  if (env_value_opt) {
+    auto value_string = *env_value_opt;
+    if (IsTrue(value_string)) {
+      return true;
+    }
+    if (IsFalse(value_string)) {
+      return false;
+    }
+    return CF_ERR("In \"" << SelectorFlags::kAcquireFileLockEnv << "="
+                          << value_string << ",\" \"" << value_string
+                          << "\" is an invalid value. Try true or false.");
+  }
+  // nothing set, falls back to the default value of the flag
+  auto default_value =
+      CF_EXPECT(must_acquire_file_lock_flag.DefaultValue<bool>());
+  return default_value;
+}
+
+Result<StartSelectorParser::WebrtcCalculatedNames>
+StartSelectorParser::CalcNamesUsingWebrtcDeviceId() {
+  std::optional<std::string> webrtc_device_ids_opt;
+  FilterSelectorFlag(cmd_args_, "webrtc_device_id", webrtc_device_ids_opt);
+  if (!webrtc_device_ids_opt) {
+    return WebrtcCalculatedNames{
+        .group_name = common_parser_.GroupName(),
+        .per_instance_names = common_parser_.PerInstanceNames()};
+  }
+  const std::string webrtc_device_ids =
+      std::move(webrtc_device_ids_opt.value());
+  std::vector<std::string> webrtc_device_names =
+      android::base::Tokenize(webrtc_device_ids, ",");
+
+  std::unordered_set<std::string> group_names;
+  std::vector<std::string> instance_names;
+  instance_names.reserve(webrtc_device_names.size());
+
+  // check if the supposedly group names exist and common across each
+  // webrtc_device_id
+  for (const auto& webrtc_device_name : webrtc_device_names) {
+    std::vector<std::string> tokens =
+        android::base::Tokenize(webrtc_device_name, "-");
+    CF_EXPECT_GE(tokens.size(), 2,
+                 webrtc_device_name
+                     << " cannot be split into group name and instance name");
+    group_names.insert(tokens.front());
+    CF_EXPECT_EQ(group_names.size(), 1,
+                 "group names in --webrtc_device_id must be the same but are "
+                 "different.");
+    tokens.erase(tokens.begin());
+    instance_names.push_back(android::base::Join(tokens, "-"));
+  }
+
+  std::string group_name = *(group_names.begin());
+  CF_EXPECT(IsValidGroupName(group_name),
+            group_name << " is not a valid group name");
+
+  for (const auto& instance_name : instance_names) {
+    CF_EXPECT(IsValidInstanceName(instance_name),
+              instance_name << " is not a valid instance name.");
+  }
+
+  if (auto flag_group_name_opt = common_parser_.GroupName()) {
+    CF_EXPECT_EQ(flag_group_name_opt.value(), group_name);
+  }
+  if (auto flag_per_instance_names_opt = common_parser_.PerInstanceNames()) {
+    CF_EXPECT_EQ(flag_per_instance_names_opt.value(), instance_names);
+  }
+  return WebrtcCalculatedNames{.group_name = group_name,
+                               .per_instance_names = instance_names};
+}
+
+Result<void> StartSelectorParser::ParseOptions() {
+  may_be_default_group_ = CF_EXPECT(CalcMayBeDefaultGroup());
+  must_acquire_file_lock_ = CF_EXPECT(CalcAcquireFileLock());
+
+  // compare webrtc_device_id against instance names
+  auto verified_names =
+      CF_EXPECT(CalcNamesUsingWebrtcDeviceId(),
+                "--webrtc_device_id must match the list of device names");
+  group_name_ = verified_names.group_name;
+  per_instance_names_ = verified_names.per_instance_names;
+
+  std::optional<std::string> num_instances;
+  std::optional<std::string> instance_nums;
+  std::optional<std::string> base_instance_num;
+  // set num_instances as std::nullptr or the value of --num_instances
+  FilterSelectorFlag(cmd_args_, "num_instances", num_instances);
+  FilterSelectorFlag(cmd_args_, "instance_nums", instance_nums);
+  FilterSelectorFlag(cmd_args_, "base_instance_num", base_instance_num);
+
+  InstanceIdsParams instance_nums_param{
+      .num_instances = std::move(num_instances),
+      .instance_nums = std::move(instance_nums),
+      .base_instance_num = std::move(base_instance_num),
+      .cuttlefish_instance_env = TryFromCuttlefishInstance(envs_),
+      .vsoc_suffix = TryFromUser(envs_)};
+  auto parsed_ids = CF_EXPECT(HandleInstanceIds(instance_nums_param));
+  requested_num_instances_ = parsed_ids.GetNumOfInstances();
+  instance_ids_ = std::move(parsed_ids.GetInstanceIds());
+
+  return {};
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/selector/start_selector_parser.h b/host/commands/cvd/selector/start_selector_parser.h
new file mode 100644
index 0000000..2f9f93a
--- /dev/null
+++ b/host/commands/cvd/selector/start_selector_parser.h
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/selector/selector_common_parser.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/**
+ * This class parses the separated SelectorOptions defined in
+ * cvd_server.proto.
+ *
+ * Note that the parsing is from the perspective of syntax.
+ *
+ * In other words, this does not check the following, for example:
+ *  1. If the numeric instance id is duplicated
+ *  2. If the group name is already taken
+ *
+ * How it works is, it parses the selector options that are common
+ * across operations with SelectorCommonParser first. Following that,
+ * StartSelectorParser parses start-specific selector options.
+ */
+class StartSelectorParser {
+ public:
+  static Result<StartSelectorParser> ConductSelectFlagsParser(
+      const uid_t uid, const cvd_common::Args& selector_args,
+      const cvd_common::Args& cmd_args, const cvd_common::Envs& envs);
+  std::optional<std::string> GroupName() const;
+  std::optional<std::vector<std::string>> PerInstanceNames() const;
+  const std::optional<std::vector<unsigned>>& InstanceIds() const {
+    return instance_ids_;
+  }
+  unsigned RequestedNumInstances() const { return requested_num_instances_; }
+  bool IsMaybeDefaultGroup() const { return may_be_default_group_; }
+  bool MustAcquireFileLock() const { return must_acquire_file_lock_; }
+
+ private:
+  StartSelectorParser(const std::string& system_wide_user_home,
+                      const cvd_common::Args& selector_args,
+                      const cvd_common::Args& cmd_args,
+                      const cvd_common::Envs& envs,
+                      SelectorCommonParser&& common_parser);
+
+  Result<void> ParseOptions();
+
+  struct InstanceIdsParams {
+    std::optional<std::string> num_instances;
+    std::optional<std::string> instance_nums;
+    std::optional<std::string> base_instance_num;
+    std::optional<unsigned> cuttlefish_instance_env;
+    std::optional<unsigned> vsoc_suffix;
+  };
+
+  class ParsedInstanceIdsOpt {
+    friend class StartSelectorParser;
+
+   private:
+    ParsedInstanceIdsOpt(const std::vector<unsigned>& instance_ids)
+        : instance_ids_{instance_ids},
+          n_instances_{static_cast<unsigned>(instance_ids.size())} {}
+    ParsedInstanceIdsOpt(const unsigned n_instances)
+        : instance_ids_{std::nullopt}, n_instances_{n_instances} {}
+    auto GetInstanceIds() { return std::move(instance_ids_); }
+    unsigned GetNumOfInstances() const { return n_instances_; }
+    std::optional<std::vector<unsigned>> instance_ids_;
+    const unsigned n_instances_;
+  };
+
+  /*
+   * CF_ERR is meant to be an error:
+   *  For example, --num_instances != |--instance_nums|.
+   *
+   * On the contrary, std::nullopt inside Result is not necessary one.
+   * std::nullopt inside Result means that with the given information,
+   * the instance_ids_ cannot be yet figured out, so the task is deferred
+   * to CreationAnaylizer or so, which has more contexts. For example,
+   * if no option at all is given, it is not an error; however, the
+   * StartSelectorParser alone cannot figure out the list of instance ids. The
+   * InstanceDatabase, UniqueResourceAllocator, InstanceLockFileManager will be
+   * involved to automatically generate the valid, numeric instance ids.
+   * If that's the case, Result{std::nullopt} could be returned.
+   *
+   */
+  Result<ParsedInstanceIdsOpt> HandleInstanceIds(
+      const InstanceIdsParams& instance_id_params);
+
+  struct InstanceFromEnvParam {
+    std::optional<unsigned> cuttlefish_instance_env;
+    std::optional<unsigned> vsoc_suffix;
+    std::optional<unsigned> num_instances;
+  };
+  std::optional<std::vector<unsigned>> InstanceFromEnvironment(
+      const InstanceFromEnvParam& params);
+
+  struct VerifyNumOfInstancesParam {
+    std::optional<std::string> num_instances_flag;
+    std::optional<std::vector<std::string>> instance_names;
+    std::optional<std::string> instance_nums_flag;
+  };
+
+  Result<unsigned> VerifyNumOfInstances(
+      const VerifyNumOfInstancesParam& params,
+      const unsigned default_n_instances = 1) const;
+  Result<bool> CalcMayBeDefaultGroup();
+  Result<bool> CalcAcquireFileLock();
+
+  struct WebrtcCalculatedNames {
+    std::optional<std::string> group_name;
+    std::optional<std::vector<std::string>> per_instance_names;
+  };
+  Result<WebrtcCalculatedNames> CalcNamesUsingWebrtcDeviceId();
+
+  /**
+   * The following are considered, and left empty if can't be figured out.
+   *
+   * --base_instance_num, --instance_nums, --num_instances,
+   * instance_names_.size(), CUTTLEFISH_INSTANCE, and vsoc-suffix if
+   * it is the user name.
+   *
+   * instance_names_.size() is effectively another --num_instances.
+   * CUTTLEFISH_INSTANCE and the suffix in order are considered as
+   * --base_instance_num if --base_instance_num is not given and
+   * --instance_nums is not given.
+   *
+   */
+  std::optional<std::vector<unsigned>> instance_ids_;
+  unsigned requested_num_instances_;
+  bool may_be_default_group_;
+  bool must_acquire_file_lock_;
+  std::optional<std::string> group_name_;
+  std::optional<std::vector<std::string>> per_instance_names_;
+
+  // temporarily keeps the leftover of the input cmd_args
+  const std::string client_user_home_;
+  cvd_common::Args selector_args_;
+  cvd_common::Args cmd_args_;
+  cvd_common::Envs envs_;
+  SelectorCommonParser common_parser_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server.cc b/host/commands/cvd/server.cc
index 6e74ddf..d2cea1a 100644
--- a/host/commands/cvd/server.cc
+++ b/host/commands/cvd/server.cc
@@ -17,6 +17,7 @@
 #include "host/commands/cvd/server.h"
 
 #include <signal.h>
+#include <unistd.h>
 
 #include <atomic>
 #include <future>
@@ -27,6 +28,7 @@
 
 #include <android-base/file.h>
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 #include <fruit/fruit.h>
 
 #include "cvd_server.pb.h"
@@ -37,44 +39,59 @@
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
 #include "common/libs/utils/result.h"
+#include "common/libs/utils/scope_guard.h"
 #include "common/libs/utils/shared_fd_flag.h"
 #include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/build_api.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/demo_multi_vd.h"
 #include "host/commands/cvd/epoll_loop.h"
-#include "host/commands/cvd/scope_guard.h"
+#include "host/commands/cvd/logger.h"
+#include "host/commands/cvd/server_command/acloud.h"
+#include "host/commands/cvd/server_command/cmd_list.h"
+#include "host/commands/cvd/server_command/crosvm.h"
+#include "host/commands/cvd/server_command/display.h"
+#include "host/commands/cvd/server_command/env.h"
+#include "host/commands/cvd/server_command/generic.h"
+#include "host/commands/cvd/server_command/handler_proxy.h"
+#include "host/commands/cvd/server_command/load_configs.h"
+#include "host/commands/cvd/server_command/operation_to_bins_map.h"
+#include "host/commands/cvd/server_command/power.h"
+#include "host/commands/cvd/server_command/reset.h"
+#include "host/commands/cvd/server_command/start.h"
+#include "host/commands/cvd/server_command/subcmd.h"
 #include "host/commands/cvd/server_constants.h"
 #include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/inject.h"
 #include "host/libs/config/known_paths.h"
 
 namespace cuttlefish {
 
-static fruit::Component<> RequestComponent(CvdServer* server,
-                                           InstanceManager* instance_manager) {
-  return fruit::createComponent()
-      .bindInstance(*server)
-      .bindInstance(*instance_manager)
-      .install(AcloudCommandComponent)
-      .install(cvdCommandComponent)
-      .install(cvdShutdownComponent)
-      .install(cvdVersionComponent);
-}
-
 static constexpr int kNumThreads = 10;
 
-CvdServer::CvdServer(EpollPool& epoll_pool, InstanceManager& instance_manager)
-    : epoll_pool_(epoll_pool),
+CvdServer::CvdServer(BuildApi& build_api, EpollPool& epoll_pool,
+                     InstanceManager& instance_manager,
+                     HostToolTargetManager& host_tool_target_manager,
+                     ServerLogger& server_logger)
+    : build_api_(build_api),
+      epoll_pool_(epoll_pool),
       instance_manager_(instance_manager),
-      running_(true) {
+      host_tool_target_manager_(host_tool_target_manager),
+      server_logger_(server_logger),
+      running_(true),
+      optout_(false) {
   std::scoped_lock lock(threads_mutex_);
   for (auto i = 0; i < kNumThreads; i++) {
     threads_.emplace_back([this]() {
       while (running_) {
         auto result = epoll_pool_.HandleEvent();
         if (!result.ok()) {
-          LOG(ERROR) << "Epoll worker error:\n" << result.error();
+          LOG(ERROR) << "Epoll worker error:\n" << result.error().Message();
+          LOG(DEBUG) << "Epoll worker error:\n" << result.error().Trace();
         }
       }
       auto wakeup = BestEffortWakeup();
-      CHECK(wakeup.ok()) << wakeup.error().message();
+      CHECK(wakeup.ok()) << wakeup.error().Trace();
     });
   }
 }
@@ -82,10 +99,39 @@
 CvdServer::~CvdServer() {
   running_ = false;
   auto wakeup = BestEffortWakeup();
-  CHECK(wakeup.ok()) << wakeup.error().message();
+  CHECK(wakeup.ok()) << wakeup.error().Trace();
   Join();
 }
 
+fruit::Component<> CvdServer::RequestComponent(CvdServer* server) {
+  return fruit::createComponent()
+      .bindInstance(*server)
+      .bindInstance(server->instance_manager_)
+      .bindInstance(server->build_api_)
+      .bindInstance(server->host_tool_target_manager_)
+      .bindInstance<
+          fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>(
+          server->optout_)
+      .install(CvdAcloudComponent)
+      .install(CvdCmdlistComponent)
+      .install(CommandSequenceExecutorComponent)
+      .install(CvdCrosVmComponent)
+      .install(cvdCommandComponent)
+      .install(CvdDevicePowerComponent)
+      .install(CvdDisplayComponent)
+      .install(CvdEnvComponent)
+      .install(cvdGenericCommandComponent)
+      .install(CvdHandlerProxyComponent)
+      .install(CvdHelpComponent)
+      .install(CvdResetComponent)
+      .install(CvdRestartComponent)
+      .install(cvdShutdownComponent)
+      .install(CvdStartCommandComponent)
+      .install(cvdVersionComponent)
+      .install(DemoMultiVdComponent)
+      .install(LoadConfigsComponent);
+}
+
 Result<void> CvdServer::BestEffortWakeup() {
   // This attempts to cascade through the responder threads, forcing them
   // to wake up and see that running_ is false, then exit and wake up
@@ -122,6 +168,8 @@
       }
       request->handler->Interrupt();
     }
+    auto wakeup = BestEffortWakeup();
+    CHECK(wakeup.ok()) << wakeup.error().Trace();
     std::scoped_lock lock(threads_mutex_);
     for (auto& thread : threads_) {
       auto current_thread = thread.get_id() == std::this_thread::get_id();
@@ -141,7 +189,64 @@
   }
 }
 
-static Result<CvdServerHandler*> RequestHandler(
+Result<void> CvdServer::Exec(const ExecParam& exec_param) {
+  CF_EXPECT(server_fd_->IsOpen(), "Server not running");
+  Stop();
+  android::base::unique_fd server_dup{server_fd_->UNMANAGED_Dup()};
+  CF_EXPECT(server_dup.get() >= 0, "dup: \"" << server_fd_->StrError() << "\"");
+  android::base::unique_fd client_dup{
+      exec_param.carryover_client_fd->UNMANAGED_Dup()};
+  CF_EXPECT(client_dup.get() >= 0, "dup: \"" << server_fd_->StrError() << "\"");
+  android::base::unique_fd client_stderr_dup{
+      exec_param.client_stderr_fd->UNMANAGED_Dup()};
+  CF_EXPECT(client_stderr_dup.get() >= 0,
+            "dup: \"" << exec_param.client_stderr_fd->StrError() << "\"");
+  cvd_common::Args argv_str = {
+      kServerExecPath,
+      "-INTERNAL_server_fd=" + std::to_string(server_dup.get()),
+      "-INTERNAL_carryover_client_fd=" + std::to_string(client_dup.get()),
+      "-INTERNAL_carryover_stderr_fd=" +
+          std::to_string(client_stderr_dup.get()),
+  };
+
+  int in_memory_dup = -1;
+  ScopeGuard exit_action([&in_memory_dup]() {
+    if (in_memory_dup >= 0) {
+      if (close(in_memory_dup) != 0) {
+        LOG(ERROR) << "Failed to close file " << in_memory_dup;
+      }
+    }
+  });
+  if (exec_param.in_memory_data_fd) {
+    in_memory_dup = exec_param.in_memory_data_fd.value()->UNMANAGED_Dup();
+    CF_EXPECT(
+        in_memory_dup >= 0,
+        "dup: \"" << exec_param.in_memory_data_fd.value()->StrError() << "\"");
+    argv_str.push_back("-INTERNAL_memory_carryover_fd=" +
+                       std::to_string(in_memory_dup));
+  }
+
+  std::vector<char*> argv_cstr;
+  for (const auto& argv : argv_str) {
+    argv_cstr.emplace_back(strdup(argv.c_str()));
+  }
+  argv_cstr.emplace_back(nullptr);
+  android::base::unique_fd new_exe_dup{exec_param.new_exe->UNMANAGED_Dup()};
+  CF_EXPECT(new_exe_dup.get() >= 0,
+            "dup: \"" << exec_param.new_exe->StrError() << "\"");
+
+  if (exec_param.verbose) {
+    LOG(ERROR) << "Server Exec'ing: " << android::base::Join(argv_str, " ");
+  }
+
+  fexecve(new_exe_dup.get(), argv_cstr.data(), environ);
+  for (const auto& argv : argv_cstr) {
+    free(argv);
+  }
+  return CF_ERR("fexecve failed: \"" << strerror(errno) << "\"");
+}
+
+Result<CvdServerHandler*> RequestHandler(
     const RequestWithStdio& request,
     const std::vector<CvdServerHandler*>& handlers) {
   Result<cvd::Response> response;
@@ -158,6 +263,7 @@
 }
 
 Result<void> CvdServer::StartServer(SharedFD server_fd) {
+  server_fd_ = server_fd;
   auto cb = [this](EpollEvent ev) -> Result<void> {
     CF_EXPECT(AcceptClient(ev));
     return {};
@@ -166,6 +272,23 @@
   return {};
 }
 
+Result<void> CvdServer::AcceptCarryoverClient(
+    SharedFD client,
+    // the passed ScopedLogger should be destroyed on return of this function.
+    std::unique_ptr<ServerLogger::ScopedLogger>) {
+  auto self_cb = [this](EpollEvent ev) -> Result<void> {
+    CF_EXPECT(HandleMessage(ev));
+    return {};
+  };
+  CF_EXPECT(epoll_pool_.Register(client, EPOLLIN, self_cb));
+
+  cvd::Response success_message;
+  success_message.mutable_status()->set_code(cvd::Status::OK);
+  success_message.mutable_command_response();
+  CF_EXPECT(SendResponse(client, success_message));
+  return {};
+}
+
 Result<void> CvdServer::AcceptClient(EpollEvent event) {
   ScopeGuard stop_on_failure([this] { Stop(); });
 
@@ -203,17 +326,18 @@
     return {};
   }
 
+  auto logger = server_logger_.LogThreadToFd(request->Err());
   auto response = HandleRequest(*request, event.fd);
   if (!response.ok()) {
     cvd::Response failure_message;
     failure_message.mutable_status()->set_code(cvd::Status::INTERNAL);
-    failure_message.mutable_status()->set_message(response.error().message());
+    failure_message.mutable_status()->set_message(response.error().Trace());
     CF_EXPECT(SendResponse(event.fd, failure_message));
     return {};  // Error already sent to the client, don't repeat on the server
   }
   CF_EXPECT(SendResponse(event.fd, *response));
 
-  auto self_cb = [this](EpollEvent ev) -> Result<void> {
+  auto self_cb = [this, err = request->Err()](EpollEvent ev) -> Result<void> {
     CF_EXPECT(HandleMessage(ev));
     return {};
   };
@@ -223,9 +347,79 @@
   return {};
 }
 
-Result<cvd::Response> CvdServer::HandleRequest(RequestWithStdio request,
+// convert HOME, ANDROID_HOST_OUT, ANDROID_SOONG_HOST_OUT
+// and ANDROID_PRODUCT_OUT into absolute paths if any.
+static Result<RequestWithStdio> ConvertDirPathToAbsolute(
+    const RequestWithStdio& request) {
+  if (request.Message().contents_case() !=
+      cvd::Request::ContentsCase::kCommandRequest) {
+    return request;
+  }
+  if (request.Message().command_request().env().empty()) {
+    return request;
+  }
+  auto envs =
+      cvd_common::ConvertToEnvs(request.Message().command_request().env());
+  std::unordered_set<std::string> interested_envs{
+      kAndroidHostOut, kAndroidSoongHostOut, "HOME", kAndroidProductOut};
+  const auto& current_dir =
+      request.Message().command_request().working_directory();
+
+  // make sure that "~" is not included
+  for (const auto& key : interested_envs) {
+    if (!Contains(envs, key)) {
+      continue;
+    }
+    const auto& dir = envs.at(key);
+    CF_EXPECT(dir != "~" && !android::base::StartsWith(dir, "~/"),
+              "The " << key << " directory should not start with ~");
+  }
+
+  for (const auto& key : interested_envs) {
+    if (!Contains(envs, key)) {
+      continue;
+    }
+    const auto dir = envs.at(key);
+    envs[key] =
+        CF_EXPECT(EmulateAbsolutePath({.current_working_dir = current_dir,
+                                       .home_dir = std::nullopt,  // unused
+                                       .path_to_convert = dir,
+                                       .follow_symlink = false}));
+  }
+
+  auto cmd_args =
+      cvd_common::ConvertToArgs(request.Message().command_request().args());
+  auto selector_args = cvd_common::ConvertToArgs(
+      request.Message().command_request().selector_opts().args());
+  RequestWithStdio new_request(
+      request.Client(),
+      MakeRequest({.cmd_args = std::move(cmd_args),
+                   .selector_args = std::move(selector_args),
+                   .env = std::move(envs),
+                   .working_dir = current_dir},
+                  request.Message().command_request().wait_behavior()),
+      request.FileDescriptors(), request.Credentials());
+  return new_request;
+}
+
+static Result<void> VerifyUser(const RequestWithStdio& request) {
+  CF_EXPECT(request.Credentials(),
+            "ucred is not available while it is necessary.");
+  const uid_t client_uid = request.Credentials()->uid;
+  CF_EXPECT_EQ(client_uid, getuid(), "Cvd server process is one per user.");
+  return {};
+}
+
+Result<cvd::Response> CvdServer::HandleRequest(RequestWithStdio orig_request,
                                                SharedFD client) {
-  fruit::Injector<> injector(RequestComponent, this, &instance_manager_);
+  CF_EXPECT(VerifyUser(orig_request));
+  auto request = CF_EXPECT(ConvertDirPathToAbsolute(orig_request));
+  fruit::Injector<> injector(RequestComponent, this);
+
+  for (auto& late_injected : injector.getMultibindings<LateInjected>()) {
+    CF_EXPECT(late_injected->LateInject(injector));
+  }
+
   auto possible_handlers = injector.getMultibindings<CvdServerHandler>();
 
   // Even if the interrupt callback outlives the request handler, it'll only
@@ -249,7 +443,9 @@
     ongoing_requests_.erase(shared);
   });
 
-  auto interrupt_cb = [shared](EpollEvent) -> Result<void> {
+  auto interrupt_cb = [this, shared,
+                       err = request.Err()](EpollEvent) -> Result<void> {
+    auto logger = server_logger_.LogThreadToFd(err);
     std::lock_guard lock(shared->mutex);
     CF_EXPECT(shared->handler != nullptr);
     CF_EXPECT(shared->handler->Interrupt());
@@ -267,24 +463,82 @@
   return response;
 }
 
-static fruit::Component<CvdServer> ServerComponent() {
-  return fruit::createComponent()
-      .install(EpollLoopComponent);
+Result<void> CvdServer::InstanceDbFromJson(const std::string& json_string) {
+  const uid_t uid = getuid();
+  auto json = CF_EXPECT(ParseJson(json_string));
+  CF_EXPECT(instance_manager_.LoadFromJson(uid, json));
+  return {};
 }
 
-Result<int> CvdServerMain(SharedFD server_fd) {
+static fruit::Component<> ServerComponent(ServerLogger* server_logger) {
+  return fruit::createComponent()
+      .addMultibinding<CvdServer, CvdServer>()
+      .bindInstance(*server_logger)
+      .install(BuildApiModule)
+      .install(EpollLoopComponent)
+      .install(HostToolTargetManagerComponent)
+      .install(OperationToBinsMapComponent);
+}
+
+Result<int> CvdServerMain(ServerMainParam&& fds) {
   LOG(INFO) << "Starting server";
 
+  CF_EXPECT(daemon(0, 0) != -1, strerror(errno));
+
   signal(SIGPIPE, SIG_IGN);
 
+  SharedFD server_fd = std::move(fds.internal_server_fd);
   CF_EXPECT(server_fd->IsOpen(), "Did not receive a valid cvd_server fd");
 
-  fruit::Injector<CvdServer> injector(ServerComponent);
-  CvdServer& server = injector.get<CvdServer&>();
+  std::unique_ptr<ServerLogger> server_logger = std::move(fds.server_logger);
+  fruit::Injector<> injector(ServerComponent, server_logger.get());
+
+  for (auto& late_injected : injector.getMultibindings<LateInjected>()) {
+    CF_EXPECT(late_injected->LateInject(injector));
+  }
+
+  auto server_bindings = injector.getMultibindings<CvdServer>();
+  CF_EXPECT(server_bindings.size() == 1,
+            "Expected 1 server binding, got " << server_bindings.size());
+  auto& server = *(server_bindings[0]);
+
+  std::optional<SharedFD> memory_carryover_fd =
+      std::move(fds.memory_carryover_fd);
+  if (memory_carryover_fd) {
+    const std::string json_string =
+        CF_EXPECT(ReadAllFromMemFd(*memory_carryover_fd));
+    CF_EXPECT(server.InstanceDbFromJson(json_string),
+              "Failed to load from: " << json_string);
+  }
+
   server.StartServer(server_fd);
+
+  SharedFD carryover_client = std::move(fds.carryover_client_fd);
+  // The carryover_client wouldn't be available after AcceptCarryoverClient()
+  if (carryover_client->IsOpen()) {
+    // release scoped_logger for this thread inside AcceptCarryoverClient()
+    CF_EXPECT(server.AcceptCarryoverClient(carryover_client,
+                                           std::move(fds.scoped_logger)));
+  } else {
+    // release scoped_logger now and delete the object
+    fds.scoped_logger.reset();
+  }
   server.Join();
 
   return 0;
 }
 
+Result<std::string> ReadAllFromMemFd(const SharedFD& mem_fd) {
+  const auto n_message_size = mem_fd->LSeek(0, SEEK_END);
+  CF_EXPECT_NE(n_message_size, -1, "LSeek on the memory file failed.");
+  std::vector<char> buffer(n_message_size);
+  CF_EXPECT_EQ(mem_fd->LSeek(0, SEEK_SET), 0, mem_fd->StrError());
+  auto n_read = ReadExact(mem_fd, buffer.data(), n_message_size);
+  CF_EXPECT(n_read == n_message_size,
+            "Expected to read " << n_message_size << " bytes but actually read "
+                                << n_read << " bytes.");
+  std::string message(buffer.begin(), buffer.end());
+  return message;
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/server.h b/host/commands/cvd/server.h
index 8a77081..fa375c2 100644
--- a/host/commands/cvd/server.h
+++ b/host/commands/cvd/server.h
@@ -29,32 +29,59 @@
 
 #include "common/libs/fs/epoll.h"
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "common/libs/utils/unix_sockets.h"
 #include "host/commands/cvd/epoll_loop.h"
 #include "host/commands/cvd/instance_manager.h"
-#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/logger.h"
+// including "server_command/subcmd.h" causes cyclic dependency
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/libs/config/inject.h"
+#include "host/libs/web/build_api.h"
 
 namespace cuttlefish {
 
-class CvdServerHandler {
- public:
-  virtual ~CvdServerHandler() = default;
-
-  virtual Result<bool> CanHandle(const RequestWithStdio&) const = 0;
-  virtual Result<cvd::Response> Handle(const RequestWithStdio&) = 0;
-  virtual Result<void> Interrupt() = 0;
+struct ServerMainParam {
+  SharedFD internal_server_fd;
+  SharedFD carryover_client_fd;
+  std::optional<SharedFD> memory_carryover_fd;
+  std::unique_ptr<ServerLogger> server_logger;
+  /* scoped logger that carries the stderr of the carried-over
+   * client. The client may have called "cvd restart-server."
+   *
+   * The scoped_logger should expire just after AcceptCarryoverClient()
+   */
+  std::unique_ptr<ServerLogger::ScopedLogger> scoped_logger;
 };
+Result<int> CvdServerMain(ServerMainParam&& fds);
 
 class CvdServer {
+  // for server_logger_.
+  // server_logger_ shouldn't be exposed to anything but CvdServerMain()
+  friend Result<int> CvdServerMain(ServerMainParam&& fds);
+
  public:
-  INJECT(CvdServer(EpollPool&, InstanceManager&));
+  INJECT(CvdServer(BuildApi&, EpollPool&, InstanceManager&,
+                   HostToolTargetManager&, ServerLogger&));
   ~CvdServer();
 
   Result<void> StartServer(SharedFD server);
+  struct ExecParam {
+    SharedFD new_exe;
+    SharedFD carryover_client_fd;  // the client that called cvd restart-server
+    std::optional<SharedFD>
+        in_memory_data_fd;  // fd to carry over in-memory data
+    SharedFD client_stderr_fd;
+    bool verbose;
+  };
+  Result<void> Exec(const ExecParam&);
+  Result<void> AcceptCarryoverClient(
+      SharedFD client,
+      std::unique_ptr<ServerLogger::ScopedLogger> scoped_logger);
   void Stop();
   void Join();
+  Result<void> InstanceDbFromJson(const std::string& json_string);
 
  private:
   struct OngoingRequest {
@@ -63,13 +90,20 @@
     std::thread::id thread_id;
   };
 
+  /* this has to be static due to the way fruit includes components */
+  static fruit::Component<> RequestComponent(CvdServer*);
+
   Result<void> AcceptClient(EpollEvent);
   Result<void> HandleMessage(EpollEvent);
   Result<cvd::Response> HandleRequest(RequestWithStdio, SharedFD client);
   Result<void> BestEffortWakeup();
 
+  SharedFD server_fd_;
+  BuildApi& build_api_;
   EpollPool& epoll_pool_;
   InstanceManager& instance_manager_;
+  HostToolTargetManager& host_tool_target_manager_;
+  ServerLogger& server_logger_;
   std::atomic_bool running_ = true;
 
   std::mutex ongoing_requests_mutex_;
@@ -77,36 +111,16 @@
   // TODO(schuffelen): Move this thread pool to another class.
   std::mutex threads_mutex_;
   std::vector<std::thread> threads_;
+
+  // translator optout
+  std::atomic<bool> optout_;
 };
 
-class CvdCommandHandler : public CvdServerHandler {
- public:
-  INJECT(CvdCommandHandler(InstanceManager& instance_manager));
+Result<CvdServerHandler*> RequestHandler(
+    const RequestWithStdio& request,
+    const std::vector<CvdServerHandler*>& handlers);
 
-  Result<bool> CanHandle(const RequestWithStdio&) const override;
-  Result<cvd::Response> Handle(const RequestWithStdio&) override;
-  Result<void> Interrupt() override;
-
- private:
-  InstanceManager& instance_manager_;
-  std::optional<Subprocess> subprocess_;
-  std::mutex interruptible_;
-  bool interrupted_ = false;
-};
-
-fruit::Component<fruit::Required<InstanceManager>> cvdCommandComponent();
-fruit::Component<fruit::Required<CvdServer, InstanceManager>>
-cvdShutdownComponent();
-fruit::Component<> cvdVersionComponent();
-fruit::Component<fruit::Required<CvdCommandHandler>> AcloudCommandComponent();
-
-struct CommandInvocation {
-  std::string command;
-  std::vector<std::string> arguments;
-};
-
-CommandInvocation ParseInvocation(const cvd::Request& request);
-
-Result<int> CvdServerMain(SharedFD server_fd);
+// Read all contents from the file
+Result<std::string> ReadAllFromMemFd(const SharedFD& mem_fd);
 
 }  // namespace cuttlefish
diff --git a/host/commands/cvd/server_client.cpp b/host/commands/cvd/server_client.cpp
index 017b2f5..7020d68 100644
--- a/host/commands/cvd/server_client.cpp
+++ b/host/commands/cvd/server_client.cpp
@@ -70,7 +70,8 @@
     LOG(DEBUG) << "Has credentials, uid=" << creds->uid;
   }
 
-  return RequestWithStdio(std::move(request), std::move(fds), std::move(creds));
+  return RequestWithStdio(client, std::move(request), std::move(fds),
+                          std::move(creds));
 }
 
 Result<void> SendResponse(const SharedFD& client,
@@ -87,10 +88,15 @@
   return {};
 }
 
-RequestWithStdio::RequestWithStdio(cvd::Request message,
+RequestWithStdio::RequestWithStdio(SharedFD client_fd, cvd::Request message,
                                    std::vector<SharedFD> fds,
                                    std::optional<ucred> creds)
-    : message_(message), fds_(std::move(fds)), creds_(creds) {}
+    : client_fd_(client_fd),
+      message_(message),
+      fds_(std::move(fds)),
+      creds_(creds) {}
+
+SharedFD RequestWithStdio::Client() const { return client_fd_; }
 
 const cvd::Request& RequestWithStdio::Message() const { return message_; }
 
diff --git a/host/commands/cvd/server_client.h b/host/commands/cvd/server_client.h
index 751a890..8442c60 100644
--- a/host/commands/cvd/server_client.h
+++ b/host/commands/cvd/server_client.h
@@ -32,8 +32,10 @@
 
 class RequestWithStdio {
  public:
-  RequestWithStdio(cvd::Request, std::vector<SharedFD>, std::optional<ucred>);
+  RequestWithStdio(SharedFD, cvd::Request, std::vector<SharedFD>,
+                   std::optional<ucred>);
 
+  SharedFD Client() const;
   const cvd::Request& Message() const;
   const std::vector<SharedFD>& FileDescriptors() const;
   SharedFD In() const;
@@ -43,6 +45,7 @@
   std::optional<ucred> Credentials() const;
 
  private:
+  SharedFD client_fd_;
   cvd::Request message_;
   std::vector<SharedFD> fds_;
   std::optional<ucred> creds_;
diff --git a/host/commands/cvd/server_command.cpp b/host/commands/cvd/server_command.cpp
deleted file mode 100644
index e7e9b4a..0000000
--- a/host/commands/cvd/server_command.cpp
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/commands/cvd/server.h"
-
-#include <set>
-#include <string>
-#include <vector>
-
-#include <android-base/file.h>
-#include <android-base/logging.h>
-#include <fruit/fruit.h>
-
-#include "cvd_server.pb.h"
-
-#include "common/libs/fs/shared_buf.h"
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/environment.h"
-#include "common/libs/utils/files.h"
-#include "common/libs/utils/flag_parser.h"
-#include "common/libs/utils/result.h"
-#include "common/libs/utils/subprocess.h"
-#include "host/commands/cvd/instance_manager.h"
-#include "host/libs/config/cuttlefish_config.h"
-
-namespace cuttlefish {
-namespace {
-
-constexpr char kHostBugreportBin[] = "cvd_internal_host_bugreport";
-constexpr char kStartBin[] = "cvd_internal_start";
-constexpr char kFetchBin[] = "fetch_cvd";
-constexpr char kMkdirBin[] = "/bin/mkdir";
-
-constexpr char kClearBin[] = "clear_placeholder";  // Unused, runs CvdClear()
-constexpr char kFleetBin[] = "fleet_placeholder";  // Unused, runs CvdFleet()
-constexpr char kHelpBin[] = "help_placeholder";  // Unused, prints kHelpMessage.
-constexpr char kHelpMessage[] = R"(Cuttlefish Virtual Device (CVD) CLI.
-
-usage: cvd <command> <args>
-
-Commands:
-  help                Print this message.
-  help <command>      Print help for a command.
-  start               Start a device.
-  stop                Stop a running device.
-  clear               Stop all running devices and delete all instance and assembly directories.
-  fleet               View the current fleet status.
-  kill-server         Kill the cvd_server background process.
-  status              Check and print the state of a running instance.
-  host_bugreport      Capture a host bugreport, including configs, logs, and tombstones.
-
-Args:
-  <command args>      Each command has its own set of args. See cvd help <command>.
-  --clean             If provided, runs cvd kill-server before the requested command.
-)";
-
-const std::map<std::string, std::string> CommandToBinaryMap = {
-    {"help", kHelpBin},
-    {"host_bugreport", kHostBugreportBin},
-    {"cvd_host_bugreport", kHostBugreportBin},
-    {"start", kStartBin},
-    {"launch_cvd", kStartBin},
-    {"status", kStatusBin},
-    {"cvd_status", kStatusBin},
-    {"stop", kStopBin},
-    {"stop_cvd", kStopBin},
-    {"clear", kClearBin},
-    {"fetch", kFetchBin},
-    {"fetch_cvd", kFetchBin},
-    {"mkdir", kMkdirBin},
-    {"fleet", kFleetBin}};
-
-}  // namespace
-
-CvdCommandHandler::CvdCommandHandler(InstanceManager& instance_manager)
-    : instance_manager_(instance_manager) {}
-
-Result<bool> CvdCommandHandler::CanHandle(
-    const RequestWithStdio& request) const {
-  auto invocation = ParseInvocation(request.Message());
-  return CommandToBinaryMap.find(invocation.command) !=
-         CommandToBinaryMap.end();
-}
-
-Result<cvd::Response> CvdCommandHandler::Handle(
-    const RequestWithStdio& request) {
-  std::unique_lock interrupt_lock(interruptible_);
-  if (interrupted_) {
-    return CF_ERR("Interrupted");
-  }
-  CF_EXPECT(CanHandle(request));
-  cvd::Response response;
-  response.mutable_command_response();
-
-  auto invocation = ParseInvocation(request.Message());
-
-  auto subcommand_bin = CommandToBinaryMap.find(invocation.command);
-  CF_EXPECT(subcommand_bin != CommandToBinaryMap.end());
-  auto bin = subcommand_bin->second;
-
-  // HOME is used to possibly set CuttlefishConfig path env variable later. This
-  // env variable is used by subcommands when locating the config.
-  auto request_home = request.Message().command_request().env().find("HOME");
-  std::string home =
-      request_home != request.Message().command_request().env().end()
-          ? request_home->second
-          : StringFromEnv("HOME", ".");
-
-  // Create a copy of args before parsing, to be passed to subcommands.
-  auto args = invocation.arguments;
-  auto args_copy = invocation.arguments;
-
-  auto host_artifacts_path =
-      request.Message().command_request().env().find("ANDROID_HOST_OUT");
-  if (host_artifacts_path == request.Message().command_request().env().end()) {
-    response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
-    response.mutable_status()->set_message(
-        "Missing ANDROID_HOST_OUT in client environment.");
-    return response;
-  }
-
-  if (bin == kHelpBin) {
-    // Handle `cvd help`
-    if (args.empty()) {
-      WriteAll(request.Out(), kHelpMessage);
-      response.mutable_status()->set_code(cvd::Status::OK);
-      return response;
-    }
-
-    // Certain commands have no detailed help text.
-    std::set<std::string> builtins = {"help", "clear", "kill-server"};
-    auto it = CommandToBinaryMap.find(args[0]);
-    if (it == CommandToBinaryMap.end() ||
-        builtins.find(args[0]) != builtins.end()) {
-      WriteAll(request.Out(), kHelpMessage);
-      response.mutable_status()->set_code(cvd::Status::OK);
-      return response;
-    }
-
-    // Handle `cvd help <subcommand>` by calling the subcommand with --help.
-    bin = it->second;
-    args_copy.push_back("--help");
-  } else if (bin == kClearBin) {
-    *response.mutable_status() =
-        instance_manager_.CvdClear(request.Out(), request.Err());
-    return response;
-  } else if (bin == kFleetBin) {
-    auto env_config = request.Message().command_request().env().find(
-        kCuttlefishConfigEnvVarName);
-    std::string config_path;
-    if (env_config != request.Message().command_request().env().end()) {
-      config_path = env_config->second;
-    }
-    *response.mutable_status() =
-        instance_manager_.CvdFleet(request.Out(), config_path);
-    return response;
-  } else if (bin == kStartBin) {
-    auto first_instance = 1;
-    auto instance_env =
-        request.Message().command_request().env().find("CUTTLEFISH_INSTANCE");
-    if (instance_env != request.Message().command_request().env().end()) {
-      first_instance = std::stoi(instance_env->second);
-    }
-    auto ins_flag = GflagsCompatFlag("base_instance_num", first_instance);
-    auto num_instances = 1;
-    auto num_instances_flag = GflagsCompatFlag("num_instances", num_instances);
-    CF_EXPECT(ParseFlags({ins_flag, num_instances_flag}, args));
-
-    // Track this assembly_dir in the fleet.
-    InstanceManager::InstanceGroupInfo info;
-    info.host_binaries_dir = host_artifacts_path->second + "/bin/";
-    for (int i = first_instance; i < first_instance + num_instances; i++) {
-      info.instances.insert(i);
-    }
-    instance_manager_.SetInstanceGroup(home, info);
-  }
-
-  Command command("(replaced)");
-  if (bin == kFetchBin) {
-    command.SetExecutable(HostBinaryPath("fetch_cvd"));
-  } else if (bin == kMkdirBin) {
-    command.SetExecutable(kMkdirBin);
-  } else {
-    auto assembly_info = CF_EXPECT(instance_manager_.GetInstanceGroup(home));
-    command.SetExecutable(assembly_info.host_binaries_dir + bin);
-  }
-  for (const std::string& arg : args_copy) {
-    command.AddParameter(arg);
-  }
-
-  // Set CuttlefishConfig path based on assembly dir,
-  // used by subcommands when locating the CuttlefishConfig.
-  if (request.Message().command_request().env().count(
-          kCuttlefishConfigEnvVarName) == 0) {
-    auto config_path = GetCuttlefishConfigPath(home);
-    if (config_path) {
-      command.AddEnvironmentVariable(kCuttlefishConfigEnvVarName, *config_path);
-    }
-  }
-  for (auto& it : request.Message().command_request().env()) {
-    command.UnsetFromEnvironment(it.first);
-    command.AddEnvironmentVariable(it.first, it.second);
-  }
-
-  // Redirect stdin, stdout, stderr back to the cvd client
-  command.RedirectStdIO(Subprocess::StdIOChannel::kStdIn, request.In());
-  command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, request.Out());
-  command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, request.Err());
-  SubprocessOptions options;
-
-  if (request.Message().command_request().wait_behavior() ==
-      cvd::WAIT_BEHAVIOR_START) {
-    options.ExitWithParent(false);
-  }
-
-  const auto& working_dir =
-      request.Message().command_request().working_directory();
-  if (!working_dir.empty()) {
-    auto fd = SharedFD::Open(working_dir, O_RDONLY | O_PATH | O_DIRECTORY);
-    CF_EXPECT(fd->IsOpen(),
-              "Couldn't open \"" << working_dir << "\": " << fd->StrError());
-    command.SetWorkingDirectory(fd);
-  }
-
-  subprocess_ = command.Start(options);
-
-  if (request.Message().command_request().wait_behavior() ==
-      cvd::WAIT_BEHAVIOR_START) {
-    response.mutable_status()->set_code(cvd::Status::OK);
-    return response;
-  }
-  interrupt_lock.unlock();
-
-  siginfo_t infop{};
-
-  // This blocks until the process exits, but doesn't reap it.
-  auto result = subprocess_->Wait(&infop, WEXITED | WNOWAIT);
-  CF_EXPECT(result != -1, "Lost track of subprocess pid");
-  interrupt_lock.lock();
-  // Perform a reaping wait on the process (which should already have exited).
-  result = subprocess_->Wait(&infop, WEXITED);
-  CF_EXPECT(result != -1, "Lost track of subprocess pid");
-  // The double wait avoids a race around the kernel reusing pids. Waiting
-  // with WNOWAIT won't cause the child process to be reaped, so the kernel
-  // won't reuse the pid until the Wait call below, and any kill signals won't
-  // reach unexpected processes.
-
-  subprocess_ = {};
-
-  if (infop.si_code == CLD_EXITED && bin == kStopBin) {
-    instance_manager_.RemoveInstanceGroup(home);
-  }
-
-  if (infop.si_code == CLD_EXITED && infop.si_status == 0) {
-    response.mutable_status()->set_code(cvd::Status::OK);
-    return response;
-  }
-
-  response.mutable_status()->set_code(cvd::Status::INTERNAL);
-  if (infop.si_code == CLD_EXITED) {
-    response.mutable_status()->set_message("Exited with code " +
-                                           std::to_string(infop.si_status));
-  } else if (infop.si_code == CLD_KILLED) {
-    response.mutable_status()->set_message("Exited with signal " +
-                                           std::to_string(infop.si_status));
-  } else {
-    response.mutable_status()->set_message("Quit with code " +
-                                           std::to_string(infop.si_status));
-  }
-  return response;
-}
-
-Result<void> CvdCommandHandler::Interrupt() {
-  std::scoped_lock interrupt_lock(interruptible_);
-  if (subprocess_) {
-    auto stop_result = subprocess_->Stop();
-    switch (stop_result) {
-      case StopperResult::kStopFailure:
-        return CF_ERR("Failed to stop subprocess");
-      case StopperResult::kStopCrash:
-        return CF_ERR("Stopper caused process to crash");
-      case StopperResult::kStopSuccess:
-        return {};
-      default:
-        return CF_ERR("Unknown stop result: " << (uint64_t)stop_result);
-    }
-  }
-  return {};
-}
-
-CommandInvocation ParseInvocation(const cvd::Request& request) {
-  CommandInvocation invocation;
-  if (request.contents_case() != cvd::Request::ContentsCase::kCommandRequest) {
-    return invocation;
-  }
-  if (request.command_request().args_size() == 0) {
-    return invocation;
-  }
-  for (const std::string& arg : request.command_request().args()) {
-    invocation.arguments.push_back(arg);
-  }
-  invocation.arguments[0] = cpp_basename(invocation.arguments[0]);
-  if (invocation.arguments[0] == "cvd") {
-    if (invocation.arguments.size() == 1) {
-      // Show help if user invokes `cvd` alone.
-      invocation.command = "help";
-      invocation.arguments = {};
-    } else {  // More arguments
-      invocation.command = invocation.arguments[1];
-      invocation.arguments.erase(invocation.arguments.begin());
-      invocation.arguments.erase(invocation.arguments.begin());
-    }
-  } else {
-    invocation.command = invocation.arguments[0];
-    invocation.arguments.erase(invocation.arguments.begin());
-  }
-  return invocation;
-}
-
-fruit::Component<fruit::Required<InstanceManager>> cvdCommandComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CvdServerHandler, CvdCommandHandler>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/Android.bp b/host/commands/cvd/server_command/Android.bp
new file mode 100644
index 0000000..a637dc0
--- /dev/null
+++ b/host/commands/cvd/server_command/Android.bp
@@ -0,0 +1,60 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libcvd_sub_commands",
+    srcs: [
+        "utils.cpp",
+        "acloud.cpp",
+        "acloud_command.cpp",
+        "acloud_common.cpp",
+        "acloud_translator.cpp",
+        "cmd_list.cpp",
+        "components.cpp",
+        "crosvm.cpp",
+        "display.cpp",
+        "env.cpp",
+        "fetch.cpp",
+        "flags_collector.cpp",
+        "fleet.cpp",
+        "host_tool_target.cpp",
+        "host_tool_target_manager.cpp",
+        "generic.cpp",
+        "handler_proxy.cpp",
+        "help.cpp",
+        "load_configs.cpp",
+        "operation_to_bins_map.cpp",
+        "power.cpp",
+        "reset.cpp",
+        "restart.cpp",
+        "serial_launch.cpp",
+        "serial_preset.cpp",
+        "shutdown.cpp",
+        "start.cpp",
+        "start_impl.cpp",
+        "subprocess_waiter.cpp",
+        "try_acloud.cpp",
+        "version.cpp",
+    ],
+    static_libs: [
+        "libcvd_acloud",
+        "libcvd_selector",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
diff --git a/host/commands/cvd/server_command/acloud.cpp b/host/commands/cvd/server_command/acloud.cpp
new file mode 100644
index 0000000..df1d73d
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/acloud.h"
+
+#include <atomic>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/acloud/converter.h"
+#include "host/commands/cvd/server_command/acloud.h"
+#include "host/commands/cvd/server_command/acloud_command.h"
+#include "host/commands/cvd/server_command/acloud_translator.h"
+#include "host/commands/cvd/server_command/try_acloud.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<
+    CommandSequenceExecutor,
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+CvdAcloudComponent() {
+  return fruit::createComponent()
+      .install(AcloudCreateConvertCommandComponent)
+      .install(AcloudCommandComponent)
+      .install(TryAcloudCommandComponent)
+      .install(AcloudTranslatorCommandComponent);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/acloud.h b/host/commands/cvd/server_command/acloud.h
new file mode 100644
index 0000000..c476165
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/server_command/acloud_common.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<
+    CommandSequenceExecutor,
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+CvdAcloudComponent();
+
+}
diff --git a/host/commands/cvd/server_command/acloud_command.cpp b/host/commands/cvd/server_command/acloud_command.cpp
new file mode 100644
index 0000000..d42066c
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_command.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/acloud_command.h"
+
+#include <atomic>
+#include <mutex>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/instance_lock.h"
+#include "host/commands/cvd/server_command/acloud_common.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class AcloudCommand : public CvdServerHandler {
+ public:
+  INJECT(AcloudCommand(CommandSequenceExecutor& executor,
+                       ConvertAcloudCreateCommand& converter))
+      : executor_(executor), converter_(converter) {}
+  ~AcloudCommand() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    if (invocation.arguments.size() >= 2) {
+      if (invocation.command == "acloud" &&
+          invocation.arguments[0] == "translator") {
+        return false;
+      }
+    }
+    return invocation.command == "acloud";
+  }
+
+  cvd_common::Args CmdList() const override { return {"acloud"}; }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interrupt_mutex_);
+    if (interrupted_) {
+      return CF_ERR("Interrupted");
+    }
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(IsSubOperationSupported(request));
+    auto converted = CF_EXPECT(converter_.Convert(request));
+    interrupt_lock.unlock();
+    CF_EXPECT(executor_.Execute(converted.prep_requests, request.Err()));
+    auto start_response =
+        CF_EXPECT(executor_.ExecuteOne(converted.start_request, request.Err()));
+
+    if (converter_.FetchCommandString() != "") {
+      // has cvd fetch command, update the fetch cvd command file
+      using android::base::WriteStringToFile;
+      CF_EXPECT(WriteStringToFile(converter_.FetchCommandString(),
+                                  converter_.FetchCvdArgsFile()),
+                true);
+    }
+
+    auto handle_response_result = HandleStartResponse(start_response);
+    if (handle_response_result.ok()) {
+      // print
+      std::optional<SharedFD> fd_opt;
+      if (converter_.Verbose()) {
+        fd_opt = request.Err();
+      }
+      auto write_result = PrintBriefSummary(*handle_response_result, fd_opt);
+      if (!write_result.ok()) {
+        LOG(ERROR) << "Failed to write the start response report.";
+      }
+    } else {
+      LOG(ERROR) << "Failed to analyze the cvd start response.";
+    }
+    cvd::Response response;
+    response.mutable_command_response();
+    return response;
+  }
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interrupt_mutex_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+ private:
+  Result<cvd::InstanceGroupInfo> HandleStartResponse(
+      const cvd::Response& start_response);
+  Result<void> PrintBriefSummary(const cvd::InstanceGroupInfo& group_info,
+                                 std::optional<SharedFD> stream_fd) const;
+
+  CommandSequenceExecutor& executor_;
+  ConvertAcloudCreateCommand& converter_;
+
+  std::mutex interrupt_mutex_;
+  bool interrupted_ = false;
+};
+
+Result<cvd::InstanceGroupInfo> AcloudCommand::HandleStartResponse(
+    const cvd::Response& start_response) {
+  CF_EXPECT(start_response.has_command_response(),
+            "cvd start did ont return a command response.");
+  const auto& start_command_response = start_response.command_response();
+  CF_EXPECT(start_command_response.has_instance_group_info(),
+            "cvd start command response did not return instance_group_info.");
+  cvd::InstanceGroupInfo group_info =
+      start_command_response.instance_group_info();
+  return group_info;
+}
+
+Result<void> AcloudCommand::PrintBriefSummary(
+    const cvd::InstanceGroupInfo& group_info,
+    std::optional<SharedFD> stream_fd) const {
+  if (!stream_fd) {
+    return {};
+  }
+  SharedFD fd = *stream_fd;
+  std::stringstream ss;
+  const std::string& group_name = group_info.group_name();
+  CF_EXPECT_EQ(group_info.home_directories().size(), 1);
+  const std::string home_dir = (group_info.home_directories())[0];
+  std::vector<std::string> instance_names;
+  std::vector<unsigned> instance_ids;
+  instance_names.reserve(group_info.instances().size());
+  instance_ids.reserve(group_info.instances().size());
+  for (const auto& instance : group_info.instances()) {
+    instance_names.push_back(instance.name());
+    instance_ids.push_back(instance.instance_id());
+  }
+  ss << std::endl << "Created instance group: " << group_name << std::endl;
+  for (size_t i = 0; i != instance_ids.size(); i++) {
+    std::string device_name = group_name + "-" + instance_names[i];
+    ss << "  " << device_name << " (local-instance-" << instance_ids[i] << ")"
+       << std::endl;
+  }
+  ss << std::endl
+     << "acloud list or cvd fleet for more information." << std::endl;
+  auto n_write = WriteAll(*stream_fd, ss.str());
+  CF_EXPECT_EQ(n_write, ss.str().size());
+  return {};
+}
+
+fruit::Component<
+    fruit::Required<CommandSequenceExecutor, ConvertAcloudCreateCommand>>
+AcloudCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, AcloudCommand>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/acloud_command.h b/host/commands/cvd/server_command/acloud_command.h
new file mode 100644
index 0000000..a0a87e1
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_command.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/acloud/converter.h"
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<
+    fruit::Required<CommandSequenceExecutor, ConvertAcloudCreateCommand>>
+AcloudCommandComponent();
+
+}
diff --git a/host/commands/cvd/server_command/acloud_common.cpp b/host/commands/cvd/server_command/acloud_common.cpp
new file mode 100644
index 0000000..4c343bb
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_common.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/acloud_common.h"
+
+#include "host/commands/cvd/server_command/utils.h"
+
+namespace cuttlefish {
+
+bool IsSubOperationSupported(const RequestWithStdio& request) {
+  auto invocation = ParseInvocation(request.Message());
+  if (invocation.arguments.empty()) {
+    return false;
+  }
+  return invocation.arguments[0] == "create";
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/acloud_common.h b/host/commands/cvd/server_command/acloud_common.h
new file mode 100644
index 0000000..820973f
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_common.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "host/commands/cvd/server_client.h"
+
+namespace cuttlefish {
+
+struct AcloudTranslatorOptOut {};
+
+bool IsSubOperationSupported(const RequestWithStdio& request);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/acloud_translator.cpp b/host/commands/cvd/server_command/acloud_translator.cpp
new file mode 100644
index 0000000..7368540
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_translator.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/acloud_translator.h"
+
+#include <mutex>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+static constexpr char kTranslatorHelpMessage[] =
+    R"(Cuttlefish Virtual Device (CVD) CLI.
+
+usage: cvd acloud translator <args>
+
+Args:
+  --opt-out              Opt-out CVD Acloud and choose to run original Python Acloud.
+  --opt-in               Opt-in and run CVD Acloud as default.
+Both -opt-out and --opt-in are mutually exclusive.
+)";
+
+class AcloudTranslatorCommand : public CvdServerHandler {
+ public:
+  INJECT(AcloudTranslatorCommand(ANNOTATED(AcloudTranslatorOptOut,
+                                           std::atomic<bool>&) optout))
+      : optout_(optout) {}
+  ~AcloudTranslatorCommand() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    if (invocation.arguments.size() >= 2) {
+      if (invocation.command == "acloud" &&
+          invocation.arguments[0] == "translator") {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  cvd_common::Args CmdList() const override { return {}; }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    auto invocation = ParseInvocation(request.Message());
+    if (invocation.arguments.empty() || invocation.arguments.size() < 2) {
+      return CF_ERR("Translator command not support");
+    }
+
+    // cvd acloud translator --opt-out
+    // cvd acloud translator --opt-in
+    cvd::Response response;
+    response.mutable_command_response();
+    bool help = false;
+    bool flag_optout = false;
+    bool flag_optin = false;
+    std::vector<Flag> translator_flags = {
+        GflagsCompatFlag("help", help),
+        GflagsCompatFlag("opt-out", flag_optout),
+        GflagsCompatFlag("opt-in", flag_optin),
+    };
+    CF_EXPECT(ParseFlags(translator_flags, invocation.arguments),
+              "Failed to process translator flag.");
+    if (help) {
+      WriteAll(request.Out(), kTranslatorHelpMessage);
+      return response;
+    }
+    CF_EXPECT(flag_optout != flag_optin,
+              "Only one of --opt-out or --opt-in should be given.");
+    optout_ = flag_optout;
+    return response;
+  }
+  Result<void> Interrupt() override { return CF_ERR("Can't be interrupted."); }
+
+ private:
+  std::atomic<bool>& optout_;
+};
+
+fruit::Component<fruit::Required<
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+AcloudTranslatorCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, AcloudTranslatorCommand>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/acloud_translator.h b/host/commands/cvd/server_command/acloud_translator.h
new file mode 100644
index 0000000..45232a5
--- /dev/null
+++ b/host/commands/cvd/server_command/acloud_translator.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/server_command/acloud_common.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+AcloudTranslatorCommandComponent();
+
+}
diff --git a/host/commands/cvd/server_command/cmd_list.cpp b/host/commands/cvd/server_command/cmd_list.cpp
new file mode 100644
index 0000000..3031f0c
--- /dev/null
+++ b/host/commands/cvd/server_command/cmd_list.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/cmd_list.h"
+
+#include <mutex>
+#include <vector>
+
+#include <android-base/strings.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdCmdlistHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdCmdlistHandler(CommandSequenceExecutor& executor))
+      : executor_(executor) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return (invocation.command == "cmd-list");
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::lock_guard interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+
+    cvd::Response response;
+    response.mutable_command_response();  // Sets oneof member
+    response.mutable_status()->set_code(cvd::Status::OK);
+
+    CF_EXPECT(CanHandle(request));
+
+    auto [subcmd, subcmd_args] = ParseInvocation(request.Message());
+    const auto subcmds = executor_.CmdList();
+
+    std::vector<std::string> subcmds_vec{subcmds.begin(), subcmds.end()};
+    const auto subcmds_str = android::base::Join(subcmds_vec, ",");
+    Json::Value subcmd_info;
+    subcmd_info["subcmd"] = subcmds_str;
+    WriteAll(request.Out(), subcmd_info.toStyledString());
+    return response;
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  // not intended to be used by the user
+  cvd_common::Args CmdList() const override { return {}; }
+
+ private:
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  CommandSequenceExecutor& executor_;
+};
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+CvdCmdlistComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdCmdlistHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/cmd_list.h b/host/commands/cvd/server_command/cmd_list.h
new file mode 100644
index 0000000..f212821
--- /dev/null
+++ b/host/commands/cvd/server_command/cmd_list.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+CvdCmdlistComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/components.cpp b/host/commands/cvd/server_command/components.cpp
new file mode 100644
index 0000000..10a90d5
--- /dev/null
+++ b/host/commands/cvd/server_command/components.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/components.h"
+
+#include "host/commands/cvd/server_command/fetch.h"
+#include "host/commands/cvd/server_command/fleet.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdCommandComponent() {
+  return fruit::createComponent()
+      .install(cvdFleetCommandComponent)
+      .install(cvdFetchCommandComponent);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/components.h b/host/commands/cvd/server_command/components.h
new file mode 100644
index 0000000..5d91d67
--- /dev/null
+++ b/host/commands/cvd/server_command/components.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server.h"
+#include "host/commands/cvd/server_command/generic.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+#include "host/libs/web/build_api.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>> CvdHelpComponent();
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdCommandComponent();
+
+fruit::Component<fruit::Required<BuildApi, CvdServer, InstanceManager>>
+CvdRestartComponent();
+
+fruit::Component<fruit::Required<CvdServer, InstanceManager>>
+cvdShutdownComponent();
+
+fruit::Component<> cvdVersionComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/crosvm.cpp b/host/commands/cvd/server_command/crosvm.cpp
new file mode 100644
index 0000000..6e72f95
--- /dev/null
+++ b/host/commands/cvd/server_command/crosvm.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/crosvm.h"
+
+#include <android-base/strings.h>
+
+#include <iostream>
+#include <mutex>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdCrosVmCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdCrosVmCommandHandler(InstanceManager& instance_manager,
+                                 SubprocessWaiter& subprocess_waiter))
+      : instance_manager_{instance_manager},
+        subprocess_waiter_(subprocess_waiter),
+        crosvm_operations_{"suspend", "resume", "snapshot"} {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const {
+    auto invocation = ParseInvocation(request.Message());
+    return Contains(crosvm_operations_, invocation.command);
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(VerifyPrecondition(request));
+    const uid_t uid = request.Credentials()->uid;
+    cvd_common::Envs envs =
+        cvd_common::ConvertToEnvs(request.Message().command_request().env());
+
+    auto [crosvm_op, subcmd_args] = ParseInvocation(request.Message());
+    /*
+     * crosvm suspend/resume/snapshot support --help only. Not --helpxml, etc.
+     *
+     * Otherwise, IsHelpSubcmd() should be used here instead.
+     */
+    auto help_flag = CvdFlag("help", false);
+    cvd_common::Args subcmd_args_copy{subcmd_args};
+    auto help_parse_result = help_flag.CalculateFlag(subcmd_args_copy);
+    bool is_help = help_parse_result.ok() && (*help_parse_result);
+
+    Command command =
+        is_help ? CF_EXPECT(HelpCommand(request, crosvm_op, subcmd_args, envs))
+                : CF_EXPECT(NonHelpCommand(request, uid, crosvm_op, subcmd_args,
+                                           envs));
+    SubprocessOptions options;
+    CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+    interrupt_lock.unlock();
+
+    auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+    return ResponseFromSiginfo(infop);
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(subprocess_waiter_.Interrupt());
+    return {};
+  }
+  cvd_common::Args CmdList() const override {
+    return cvd_common::Args(crosvm_operations_.begin(),
+                            crosvm_operations_.end());
+  }
+
+ private:
+  Result<Command> HelpCommand(const RequestWithStdio& request,
+                              const std::string& crosvm_op,
+                              const cvd_common::Args& subcmd_args,
+                              cvd_common::Envs envs) {
+    CF_EXPECT(Contains(envs, kAndroidHostOut));
+    cvd_common::Args crosvm_args{crosvm_op};
+    crosvm_args.insert(crosvm_args.end(), subcmd_args.begin(),
+                       subcmd_args.end());
+    return CF_EXPECT(
+        ConstructCvdHelpCommand("crosvm", envs, crosvm_args, request));
+  }
+
+  Result<Command> NonHelpCommand(const RequestWithStdio& request,
+                                 const uid_t uid, const std::string& crosvm_op,
+                                 const cvd_common::Args& subcmd_args,
+                                 const cvd_common::Envs& envs) {
+    const auto& selector_opts =
+        request.Message().command_request().selector_opts();
+    const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+
+    auto instance =
+        CF_EXPECT(instance_manager_.SelectInstance(selector_args, envs, uid));
+    const auto& instance_group = instance.ParentGroup();
+    const auto instance_id = instance.InstanceId();
+    auto home = instance_group.HomeDir();
+    const auto socket_file_path =
+        ConcatToString(home, "/cuttlefish_runtime.", instance_id,
+                       "/internal/"
+                       "crosvm_control.sock");
+
+    auto android_host_out = instance_group.HostArtifactsPath();
+    auto crosvm_bin_path = ConcatToString(android_host_out, "/bin/crosvm");
+
+    cvd_common::Args crosvm_args{crosvm_op};
+    crosvm_args.insert(crosvm_args.end(), subcmd_args.begin(),
+                       subcmd_args.end());
+    crosvm_args.push_back(socket_file_path);
+    return CF_EXPECT(
+        ConstructCvdGenericNonHelpCommand({.bin_file = "crosvm",
+                                           .envs = envs,
+                                           .cmd_args = std::move(crosvm_args),
+                                           .android_host_out = android_host_out,
+                                           .home = home,
+                                           .verbose = true},
+                                          request));
+  }
+
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  std::vector<std::string> crosvm_operations_;
+};
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdCrosVmComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdCrosVmCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/crosvm.h b/host/commands/cvd/server_command/crosvm.h
new file mode 100644
index 0000000..2d12b91
--- /dev/null
+++ b/host/commands/cvd/server_command/crosvm.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdCrosVmComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/display.cpp b/host/commands/cvd/server_command/display.cpp
new file mode 100644
index 0000000..30a3844
--- /dev/null
+++ b/host/commands/cvd/server_command/display.cpp
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/display.h"
+
+#include <android-base/strings.h>
+
+#include <iostream>
+#include <mutex>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdDisplayCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdDisplayCommandHandler(InstanceManager& instance_manager,
+                                  SubprocessWaiter& subprocess_waiter))
+      : instance_manager_{instance_manager},
+        subprocess_waiter_(subprocess_waiter),
+        cvd_display_operations_{"display"} {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const {
+    auto invocation = ParseInvocation(request.Message());
+    return Contains(cvd_display_operations_, invocation.command);
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(VerifyPrecondition(request));
+    const uid_t uid = request.Credentials()->uid;
+    cvd_common::Envs envs =
+        cvd_common::ConvertToEnvs(request.Message().command_request().env());
+
+    auto [_, subcmd_args] = ParseInvocation(request.Message());
+
+    bool is_help = IsHelp(subcmd_args);
+    // may modify subcmd_args by consuming in parsing
+    Command command =
+        is_help ? CF_EXPECT(HelpCommand(request, uid, subcmd_args, envs))
+                : CF_EXPECT(NonHelpCommand(request, uid, subcmd_args, envs));
+    SubprocessOptions options;
+    CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+    interrupt_lock.unlock();
+
+    auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+    return ResponseFromSiginfo(infop);
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(subprocess_waiter_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override {
+    return cvd_common::Args(cvd_display_operations_.begin(),
+                            cvd_display_operations_.end());
+  }
+
+ private:
+  Result<Command> HelpCommand(const RequestWithStdio& request, const uid_t uid,
+                              const cvd_common::Args& subcmd_args,
+                              cvd_common::Envs envs) {
+    CF_EXPECT(Contains(envs, kAndroidHostOut));
+    auto cvd_display_bin_path =
+        ConcatToString(envs.at(kAndroidHostOut), "/bin/", kDisplayBin);
+    std::string home = Contains(envs, "HOME")
+                           ? envs.at("HOME")
+                           : CF_EXPECT(SystemWideUserHome(uid));
+    envs["HOME"] = home;
+    envs[kAndroidSoongHostOut] = envs.at(kAndroidHostOut);
+    ConstructCommandParam construct_cmd_param{
+        .bin_path = cvd_display_bin_path,
+        .home = home,
+        .args = subcmd_args,
+        .envs = envs,
+        .working_dir = request.Message().command_request().working_directory(),
+        .command_name = kDisplayBin,
+        .in = request.In(),
+        .out = request.Out(),
+        .err = request.Err()};
+    Command command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+    return command;
+  }
+
+  Result<Command> NonHelpCommand(const RequestWithStdio& request,
+                                 const uid_t uid, cvd_common::Args& subcmd_args,
+                                 cvd_common::Envs envs) {
+    // test if there is --instance_num flag
+    CvdFlag<std::int32_t> instance_num_flag("instance_num");
+    auto instance_num_opt =
+        CF_EXPECT(instance_num_flag.FilterFlag(subcmd_args));
+    selector::Queries extra_queries;
+    if (instance_num_opt) {
+      extra_queries.emplace_back(selector::kInstanceIdField, *instance_num_opt);
+    }
+
+    const auto& selector_opts =
+        request.Message().command_request().selector_opts();
+    const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+
+    auto instance = CF_EXPECT(instance_manager_.SelectInstance(
+        selector_args, extra_queries, envs, uid));
+    const auto& instance_group = instance.ParentGroup();
+    const auto& home = instance_group.HomeDir();
+
+    const auto& android_host_out = instance_group.HostArtifactsPath();
+    auto cvd_display_bin_path =
+        ConcatToString(android_host_out, "/bin/", kDisplayBin);
+
+    cvd_common::Args cvd_env_args{subcmd_args};
+    cvd_env_args.push_back(
+        ConcatToString("--instance_num=", instance.InstanceId()));
+    envs["HOME"] = home;
+    envs[kAndroidHostOut] = android_host_out;
+    envs[kAndroidSoongHostOut] = android_host_out;
+
+    std::stringstream command_to_issue;
+    command_to_issue << "HOME=" << home << " " << kAndroidHostOut << "="
+                     << android_host_out << " " << kAndroidSoongHostOut << "="
+                     << android_host_out << " " << cvd_display_bin_path << " ";
+    for (const auto& arg : cvd_env_args) {
+      command_to_issue << arg << " ";
+    }
+    WriteAll(request.Err(), command_to_issue.str());
+
+    ConstructCommandParam construct_cmd_param{
+        .bin_path = cvd_display_bin_path,
+        .home = home,
+        .args = cvd_env_args,
+        .envs = envs,
+        .working_dir = request.Message().command_request().working_directory(),
+        .command_name = kDisplayBin,
+        .in = request.In(),
+        .out = request.Out(),
+        .err = request.Err()};
+    Command command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+    return command;
+  }
+
+  bool IsHelp(const cvd_common::Args& cmd_args) const {
+    // cvd display --help, --helpxml, etc or simply cvd display
+    if (cmd_args.empty() || IsHelpSubcmd(cmd_args)) {
+      return true;
+    }
+    // cvd display help <subcommand> format
+    return (cmd_args.front() == "help");
+  }
+
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  std::vector<std::string> cvd_display_operations_;
+  static constexpr char kDisplayBin[] = "cvd_internal_display";
+};
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdDisplayComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdDisplayCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/display.h b/host/commands/cvd/server_command/display.h
new file mode 100644
index 0000000..e43ee74
--- /dev/null
+++ b/host/commands/cvd/server_command/display.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdDisplayComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/env.cpp b/host/commands/cvd/server_command/env.cpp
new file mode 100644
index 0000000..d66319c
--- /dev/null
+++ b/host/commands/cvd/server_command/env.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/env.h"
+
+#include <android-base/strings.h>
+
+#include <iostream>
+#include <mutex>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdEnvCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdEnvCommandHandler(InstanceManager& instance_manager,
+                              SubprocessWaiter& subprocess_waiter))
+      : instance_manager_{instance_manager},
+        subprocess_waiter_(subprocess_waiter),
+        cvd_env_operations_{"env"} {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const {
+    auto invocation = ParseInvocation(request.Message());
+    return Contains(cvd_env_operations_, invocation.command);
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(VerifyPrecondition(request));
+    const uid_t uid = request.Credentials()->uid;
+    cvd_common::Envs envs =
+        cvd_common::ConvertToEnvs(request.Message().command_request().env());
+
+    auto [_, subcmd_args] = ParseInvocation(request.Message());
+
+    /*
+     * cvd_env --help only. Not --helpxml, etc.
+     *
+     * Otherwise, IsHelpSubcmd() should be used here instead.
+     */
+    auto help_flag = CvdFlag("help", false);
+    cvd_common::Args subcmd_args_copy{subcmd_args};
+    auto help_parse_result = help_flag.CalculateFlag(subcmd_args_copy);
+    bool is_help = help_parse_result.ok() && (*help_parse_result);
+
+    Command command =
+        is_help ? CF_EXPECT(HelpCommand(request, subcmd_args, envs))
+                : CF_EXPECT(NonHelpCommand(request, uid, subcmd_args, envs));
+    SubprocessOptions options;
+    CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+    interrupt_lock.unlock();
+
+    auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+    return ResponseFromSiginfo(infop);
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(subprocess_waiter_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override {
+    return cvd_common::Args(cvd_env_operations_.begin(),
+                            cvd_env_operations_.end());
+  }
+
+ private:
+  Result<Command> HelpCommand(const RequestWithStdio& request,
+                              const cvd_common::Args& subcmd_args,
+                              const cvd_common::Envs& envs) {
+    CF_EXPECT(Contains(envs, kAndroidHostOut));
+    return CF_EXPECT(
+        ConstructCvdHelpCommand(kCvdEnvBin, envs, subcmd_args, request));
+  }
+
+  Result<Command> NonHelpCommand(const RequestWithStdio& request,
+                                 const uid_t uid,
+                                 const cvd_common::Args& subcmd_args,
+                                 const cvd_common::Envs& envs) {
+    const auto& selector_opts =
+        request.Message().command_request().selector_opts();
+    const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+
+    auto instance =
+        CF_EXPECT(instance_manager_.SelectInstance(selector_args, envs, uid));
+    const auto& instance_group = instance.ParentGroup();
+    const auto& home = instance_group.HomeDir();
+
+    const auto& android_host_out = instance_group.HostArtifactsPath();
+    auto cvd_env_bin_path =
+        ConcatToString(android_host_out, "/bin/", kCvdEnvBin);
+    const auto& internal_device_name = instance.InternalDeviceName();
+
+    cvd_common::Args cvd_env_args{internal_device_name};
+    cvd_env_args.insert(cvd_env_args.end(), subcmd_args.begin(),
+                        subcmd_args.end());
+
+    return CF_EXPECT(
+        ConstructCvdGenericNonHelpCommand({.bin_file = kCvdEnvBin,
+                                           .envs = envs,
+                                           .cmd_args = cvd_env_args,
+                                           .android_host_out = android_host_out,
+                                           .home = home,
+                                           .verbose = true},
+                                          request));
+  }
+
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  std::vector<std::string> cvd_env_operations_;
+
+  static constexpr char kCvdEnvBin[] = "cvd_internal_env";
+};
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdEnvComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdEnvCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/env.h b/host/commands/cvd/server_command/env.h
new file mode 100644
index 0000000..2d95147
--- /dev/null
+++ b/host/commands/cvd/server_command/env.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+CvdEnvComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/fetch.cpp b/host/commands/cvd/server_command/fetch.cpp
new file mode 100644
index 0000000..0888107
--- /dev/null
+++ b/host/commands/cvd/server_command/fetch.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/fetch.h"
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdFetchCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdFetchCommandHandler(SubprocessWaiter& subprocess_waiter))
+      : subprocess_waiter_(subprocess_waiter),
+        fetch_cmd_list_{std::vector<std::string>{"fetch", "fetch_cvd"}} {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override;
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override;
+  Result<void> Interrupt() override;
+  cvd_common::Args CmdList() const override { return fetch_cmd_list_; }
+
+ private:
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  std::vector<std::string> fetch_cmd_list_;
+};
+
+Result<bool> CvdFetchCommandHandler::CanHandle(
+    const RequestWithStdio& request) const {
+  auto invocation = ParseInvocation(request.Message());
+  return Contains(fetch_cmd_list_, invocation.command);
+}
+
+Result<cvd::Response> CvdFetchCommandHandler::Handle(
+    const RequestWithStdio& request) {
+  std::unique_lock interrupt_lock(interruptible_);
+  if (interrupted_) {
+    return CF_ERR("Interrupted");
+  }
+  CF_EXPECT(CanHandle(request));
+
+  Command command("/proc/self/exe");
+  command.SetName("fetch_cvd");
+  command.SetExecutable("/proc/self/exe");
+
+  for (const auto& argument : ParseInvocation(request.Message()).arguments) {
+    command.AddParameter(argument);
+  }
+
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdIn, request.In());
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, request.Out());
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, request.Err());
+  SubprocessOptions options;
+
+  const auto& command_request = request.Message().command_request();
+  if (command_request.wait_behavior() == cvd::WAIT_BEHAVIOR_START) {
+    options.ExitWithParent(false);
+  }
+
+  const auto& working_dir = command_request.working_directory();
+  if (!working_dir.empty()) {
+    auto fd = SharedFD::Open(working_dir, O_RDONLY | O_PATH | O_DIRECTORY);
+    if (fd->IsOpen()) {
+      command.SetWorkingDirectory(fd);
+    }
+  }
+
+  CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+
+  if (command_request.wait_behavior() == cvd::WAIT_BEHAVIOR_START) {
+    cvd::Response response;
+    response.mutable_command_response();
+    response.mutable_status()->set_code(cvd::Status::OK);
+    return response;
+  }
+
+  interrupt_lock.unlock();
+
+  auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+
+  return ResponseFromSiginfo(infop);
+}
+
+Result<void> CvdFetchCommandHandler::Interrupt() {
+  std::scoped_lock interrupt_lock(interruptible_);
+  interrupted_ = true;
+  CF_EXPECT(subprocess_waiter_.Interrupt());
+  return {};
+}
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdFetchCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdFetchCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/fetch.h b/host/commands/cvd/server_command/fetch.h
new file mode 100644
index 0000000..2f91d6d
--- /dev/null
+++ b/host/commands/cvd/server_command/fetch.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdFetchCommandComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/flags_collector.cpp b/host/commands/cvd/server_command/flags_collector.cpp
new file mode 100644
index 0000000..245b89c
--- /dev/null
+++ b/host/commands/cvd/server_command/flags_collector.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/flags_collector.h"
+
+#include <android-base/logging.h>
+#include <libxml/parser.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/scope_guard.h"
+
+namespace cuttlefish {
+namespace {
+
+struct XmlDocDeleter {
+  void operator()(struct _xmlDoc* doc);
+};
+
+using XmlDocPtr = std::unique_ptr<struct _xmlDoc, XmlDocDeleter>;
+
+void XmlDocDeleter::operator()(struct _xmlDoc* doc) {
+  if (!doc) {
+    return;
+  }
+  xmlFree(doc);
+}
+
+/*
+ * Each "flag" xmlNode has child nodes such as file, name, meaning,
+ * type, default, current, etc. Each child xmlNode is a leaf XML node,
+ * which means that each child xmlNode has a child, and that child
+ * keeps the value: e.g. the value of "name" xmlNode is the name
+ * of the flag such as "daemon", "restart_subprocesses", etc.
+ *
+ * For the grandchild xmlNode of a flag xmlNode, we could do this
+ * to retrieve the string value:
+ *  xmlNodeListGetString(doc, grandchild, 1);
+ */
+FlagInfoPtr ParseFlagNode(struct _xmlDoc* doc, xmlNode& flag) {
+  std::unordered_map<std::string, std::string> field_value_map;
+  for (xmlNode* child = flag.xmlChildrenNode; child != nullptr;
+       child = child->next) {
+    if (!child->name) {
+      continue;
+    }
+    std::string field_name = reinterpret_cast<const char*>(child->name);
+    if (!child->xmlChildrenNode) {
+      field_value_map[field_name] = "";
+      continue;
+    }
+    auto* xml_node_text = xmlNodeListGetString(doc, child->xmlChildrenNode, 1);
+    if (!xml_node_text) {
+      field_value_map[field_name] = "";
+      continue;
+    }
+    field_value_map[field_name] = reinterpret_cast<const char*>(xml_node_text);
+  }
+  if (field_value_map.empty()) {
+    return nullptr;
+  }
+  return FlagInfo::Create(field_value_map);
+}
+
+std::vector<FlagInfoPtr> ParseXml(struct _xmlDoc* doc, xmlNode* node) {
+  if (!node) {
+    return {};
+  }
+
+  std::vector<FlagInfoPtr> flags;
+  // if it is <flag> node
+  if (node->name &&
+      xmlStrcmp(node->name, reinterpret_cast<const xmlChar*>("flag")) == 0) {
+    auto flag_info = ParseFlagNode(doc, *node);
+    // we don't assume that a flag node is nested.
+    if (flag_info) {
+      flags.push_back(std::move(flag_info));
+      return flags;
+    }
+    return {};
+  }
+
+  if (!node->xmlChildrenNode) {
+    return {};
+  }
+
+  for (xmlNode* child_node = node->xmlChildrenNode; child_node != nullptr;
+       child_node = child_node->next) {
+    auto child_flags = ParseXml(doc, child_node);
+    if (child_flags.empty()) {
+      continue;
+    }
+    for (auto& child_flag : child_flags) {
+      flags.push_back(std::move(child_flag));
+    }
+  }
+  return flags;
+}
+
+XmlDocPtr BuildXmlDocFromString(const std::string& xml_str) {
+  struct _xmlDoc* doc =
+      xmlReadMemory(xml_str.data(), xml_str.size(), NULL, NULL, 0);
+  XmlDocPtr doc_uniq_ptr = XmlDocPtr(doc, XmlDocDeleter());
+  if (!doc) {
+    LOG(ERROR) << "helpxml parsing failed: " << xml_str;
+    return nullptr;
+  }
+  return doc_uniq_ptr;
+}
+
+std::optional<std::vector<FlagInfoPtr>> LoadFromXml(XmlDocPtr&& doc) {
+  std::vector<FlagInfoPtr> flags;
+  ScopeGuard exit_action([]() { xmlCleanupParser(); });
+  {
+    XmlDocPtr moved_doc = std::move(doc);
+    xmlNode* root = xmlDocGetRootElement(moved_doc.get());
+    if (!root) {
+      LOG(ERROR) << "Failed to get the root element from XML doc.";
+      return std::nullopt;
+    }
+    flags = ParseXml(moved_doc.get(), root);
+  }
+  return flags;
+}
+
+}  // namespace
+
+std::unique_ptr<FlagInfo> FlagInfo::Create(
+    const FlagInfoFieldMap& field_value_map) {
+  if (!Contains(field_value_map, "name") ||
+      field_value_map.at("name").empty()) {
+    return nullptr;
+  }
+  if (!Contains(field_value_map, "type") ||
+      field_value_map.at("type").empty()) {
+    return nullptr;
+  }
+  FlagInfo* new_flag_info = new FlagInfo(field_value_map);
+  return std::unique_ptr<FlagInfo>(new_flag_info);
+}
+
+std::optional<std::vector<FlagInfoPtr>> CollectFlagsFromHelpxml(
+    const std::string& xml_str) {
+  auto helpxml_doc = BuildXmlDocFromString(xml_str);
+  if (!helpxml_doc) {
+    return std::nullopt;
+  }
+  return LoadFromXml(std::move(helpxml_doc));
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/flags_collector.h b/host/commands/cvd/server_command/flags_collector.h
new file mode 100644
index 0000000..3423d3e
--- /dev/null
+++ b/host/commands/cvd/server_command/flags_collector.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+namespace cuttlefish {
+
+class FlagInfo {
+ public:
+  using FlagInfoFieldMap = std::unordered_map<std::string, std::string>;
+  static std::unique_ptr<FlagInfo> Create(
+      const FlagInfoFieldMap& field_value_map);
+  const std::string& Name() const { return name_; }
+  const std::string& Type() const { return type_; }
+
+ private:
+  // field_value_map must have needed fields; guaranteed by the factory
+  // function, static Create().
+  FlagInfo(const FlagInfoFieldMap& field_value_map)
+      : name_(field_value_map.at("name")), type_(field_value_map.at("type")) {}
+
+  // TODO(kwstephenkim): add more fields
+  std::string name_;
+  std::string type_;
+};
+
+using FlagInfoPtr = std::unique_ptr<FlagInfo>;
+
+std::optional<std::vector<FlagInfoPtr>> CollectFlagsFromHelpxml(
+    const std::string& xml_str);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/fleet.cpp b/host/commands/cvd/server_command/fleet.cpp
new file mode 100644
index 0000000..4337fd6
--- /dev/null
+++ b/host/commands/cvd/server_command/fleet.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/fleet.h"
+
+#include <sys/types.h>
+
+#include <mutex>
+
+#include <android-base/file.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+class CvdFleetCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdFleetCommandHandler(InstanceManager& instance_manager,
+                                SubprocessWaiter& subprocess_waiter))
+      : instance_manager_(instance_manager),
+        subprocess_waiter_(subprocess_waiter) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const;
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override;
+  Result<void> Interrupt() override;
+  cvd_common::Args CmdList() const override { return {kFleetSubcmd}; }
+
+ private:
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+
+  static constexpr char kFleetSubcmd[] = "fleet";
+  Result<cvd::Status> HandleCvdFleet(const uid_t uid, const SharedFD& out,
+                                     const SharedFD& err,
+                                     const cvd_common::Args& cmd_args) const;
+  Result<cvd::Status> CvdFleetHelp(const SharedFD& out) const;
+  bool IsHelp(const cvd_common::Args& cmd_args) const;
+};
+
+Result<bool> CvdFleetCommandHandler::CanHandle(
+    const RequestWithStdio& request) const {
+  auto invocation = ParseInvocation(request.Message());
+  return invocation.command == kFleetSubcmd;
+}
+
+Result<void> CvdFleetCommandHandler::Interrupt() {
+  std::scoped_lock interrupt_lock(interruptible_);
+  interrupted_ = true;
+  CF_EXPECT(subprocess_waiter_.Interrupt());
+  return {};
+}
+
+Result<cvd::Response> CvdFleetCommandHandler::Handle(
+    const RequestWithStdio& request) {
+  std::unique_lock interrupt_lock(interruptible_);
+  if (interrupted_) {
+    return CF_ERR("Interrupted");
+  }
+  CF_EXPECT(CanHandle(request));
+  CF_EXPECT(request.Credentials() != std::nullopt);
+  const uid_t uid = request.Credentials()->uid;
+
+  cvd::Response response;
+  response.mutable_command_response();
+
+  auto [sub_cmd, args] = ParseInvocation(request.Message());
+  auto envs =
+      cvd_common::ConvertToEnvs(request.Message().command_request().env());
+  if (!IsHelp(args)) {
+    CF_EXPECT(Contains(envs, "ANDROID_HOST_OUT") &&
+              DirectoryExists(envs.at("ANDROID_HOST_OUT")));
+  }
+
+  *response.mutable_status() =
+      CF_EXPECT(HandleCvdFleet(uid, request.Out(), request.Err(), args));
+
+  return response;
+}
+
+Result<cvd::Status> CvdFleetCommandHandler::HandleCvdFleet(
+    const uid_t uid, const SharedFD& out, const SharedFD& err,
+    const cvd_common::Args& cmd_args) const {
+  if (IsHelp(cmd_args)) {
+    auto status = CF_EXPECT(CvdFleetHelp(out));
+    return status;
+  }
+  auto status = CF_EXPECT(instance_manager_.CvdFleet(uid, out, err, cmd_args));
+  return status;
+}
+
+bool CvdFleetCommandHandler::IsHelp(const cvd_common::Args& args) const {
+  for (const auto& arg : args) {
+    if (arg == "--help" || arg == "-help") {
+      return true;
+    }
+  }
+  return false;
+}
+
+Result<cvd::Status> CvdFleetCommandHandler::CvdFleetHelp(
+    const SharedFD& out) const {
+  WriteAll(out, "Simply run \"cvd fleet\" as it has no other flags.\n");
+  WriteAll(out, "\n");
+  WriteAll(out, "\"cvd fleet\" will:\n");
+  WriteAll(out,
+           "      1. tell whether the devices (i.e. \"run_cvd\" processes) are "
+           "active.\n");
+  WriteAll(out,
+           "      2. optionally list the active devices with information.\n");
+  cvd::Status status;
+  status.set_code(cvd::Status::OK);
+  return status;
+}
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdFleetCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdFleetCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/fleet.h b/host/commands/cvd/server_command/fleet.h
new file mode 100644
index 0000000..f566369
--- /dev/null
+++ b/host/commands/cvd/server_command/fleet.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, SubprocessWaiter>>
+cvdFleetCommandComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/generic.cpp b/host/commands/cvd/server_command/generic.cpp
new file mode 100644
index 0000000..dff3cd1
--- /dev/null
+++ b/host/commands/cvd/server_command/generic.cpp
@@ -0,0 +1,433 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/generic.h"
+
+#include <sys/types.h>
+
+#include <functional>
+#include <mutex>
+#include <variant>
+
+#include <android-base/file.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/scope_guard.h"
+#include "common/libs/utils/subprocess.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/instance_nums.h"
+
+namespace cuttlefish {
+
+class CvdGenericCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdGenericCommandHandler(
+      InstanceManager& instance_manager, SubprocessWaiter& subprocess_waiter,
+      HostToolTargetManager& host_tool_target_manager));
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const;
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override;
+  Result<void> Interrupt() override;
+  cvd_common::Args CmdList() const override;
+
+ private:
+  struct CommandInvocationInfo {
+    std::string command;
+    std::string bin;
+    std::string bin_path;
+    std::string home;
+    std::string host_artifacts_path;
+    uid_t uid;
+    std::vector<std::string> args;
+    cvd_common::Envs envs;
+  };
+  struct ExtractedInfo {
+    CommandInvocationInfo invocation_info;
+    std::optional<selector::LocalInstanceGroup> group;
+  };
+  Result<ExtractedInfo> ExtractInfo(const RequestWithStdio& request) const;
+  Result<std::string> GetBin(const std::string& subcmd) const;
+  Result<std::string> GetBin(const std::string& subcmd,
+                             const std::string& host_artifacts_path) const;
+  bool IsStopCommand(const std::string& subcmd) const {
+    return subcmd == "stop" || subcmd == "stop_cvd";
+  }
+  // whether the "bin" is cvd bins like stop_cvd or not (e.g. ln, ls, mkdir)
+  // The information to fire the command might be different. This information
+  // is about what the executable binary is and how to find it.
+  struct BinPathInfo {
+    std::string bin_;
+    std::string bin_path_;
+    std::string host_artifacts_path_;
+  };
+  Result<BinPathInfo> NonCvdBinPath(const std::string& subcmd,
+                                    const cvd_common::Envs& envs) const;
+  Result<BinPathInfo> CvdHelpBinPath(const std::string& subcmd,
+                                     const cvd_common::Envs& envs) const;
+  Result<BinPathInfo> CvdBinPath(const std::string& subcmd,
+                                 const cvd_common::Envs& envs,
+                                 const std::string& home,
+                                 const uid_t uid) const;
+
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  HostToolTargetManager& host_tool_target_manager_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  using BinGeneratorType = std::function<Result<std::string>(
+      const std::string& host_artifacts_path)>;
+  using BinType = std::variant<std::string, BinGeneratorType>;
+  std::map<std::string, BinType> command_to_binary_map_;
+
+  static constexpr char kHostBugreportBin[] = "cvd_internal_host_bugreport";
+  static constexpr char kLnBin[] = "ln";
+  static constexpr char kMkdirBin[] = "mkdir";
+  static constexpr char kClearBin[] =
+      "clear_placeholder";  // Unused, runs CvdClear()
+};
+
+CvdGenericCommandHandler::CvdGenericCommandHandler(
+    InstanceManager& instance_manager, SubprocessWaiter& subprocess_waiter,
+    HostToolTargetManager& host_tool_target_manager)
+    : instance_manager_(instance_manager),
+      subprocess_waiter_(subprocess_waiter),
+      host_tool_target_manager_(host_tool_target_manager),
+      command_to_binary_map_{
+          {"host_bugreport", kHostBugreportBin},
+          {"cvd_host_bugreport", kHostBugreportBin},
+          {"status",
+           [this](
+               const std::string& host_artifacts_path) -> Result<std::string> {
+             auto stat_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+                 .artifacts_path = host_artifacts_path,
+                 .op = "status",
+             }));
+             return stat_bin;
+           }},
+          {"cvd_status",
+           [this](
+               const std::string& host_artifacts_path) -> Result<std::string> {
+             auto stat_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+                 .artifacts_path = host_artifacts_path,
+                 .op = "status",
+             }));
+             return stat_bin;
+           }},
+          {"stop",
+           [this](
+               const std::string& host_artifacts_path) -> Result<std::string> {
+             auto stop_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+                 .artifacts_path = host_artifacts_path,
+                 .op = "stop",
+             }));
+             return stop_bin;
+           }},
+          {"stop_cvd",
+           [this](
+               const std::string& host_artifacts_path) -> Result<std::string> {
+             auto stop_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+                 .artifacts_path = host_artifacts_path,
+                 .op = "stop",
+             }));
+             return stop_bin;
+           }},
+          {"clear", kClearBin},
+          {"mkdir", kMkdirBin},
+          {"ln", kLnBin},
+      } {}
+
+Result<bool> CvdGenericCommandHandler::CanHandle(
+    const RequestWithStdio& request) const {
+  auto invocation = ParseInvocation(request.Message());
+  return Contains(command_to_binary_map_, invocation.command);
+}
+
+Result<void> CvdGenericCommandHandler::Interrupt() {
+  std::scoped_lock interrupt_lock(interruptible_);
+  interrupted_ = true;
+  CF_EXPECT(subprocess_waiter_.Interrupt());
+  return {};
+}
+
+Result<cvd::Response> CvdGenericCommandHandler::Handle(
+    const RequestWithStdio& request) {
+  std::unique_lock interrupt_lock(interruptible_);
+  if (interrupted_) {
+    return CF_ERR("Interrupted");
+  }
+  CF_EXPECT(CanHandle(request));
+  CF_EXPECT(request.Credentials() != std::nullopt);
+  const uid_t uid = request.Credentials()->uid;
+
+  cvd::Response response;
+  response.mutable_command_response();
+
+  auto precondition_verified = VerifyPrecondition(request);
+  if (!precondition_verified.ok()) {
+    response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
+    response.mutable_status()->set_message(
+        precondition_verified.error().Message());
+    return response;
+  }
+  auto [invocation_info, group_opt] = CF_EXPECT(ExtractInfo(request));
+  if (invocation_info.bin == kClearBin) {
+    *response.mutable_status() =
+        instance_manager_.CvdClear(request.Out(), request.Err());
+    return response;
+  }
+
+  ConstructCommandParam construct_cmd_param{
+      .bin_path = invocation_info.bin_path,
+      .home = invocation_info.home,
+      .args = invocation_info.args,
+      .envs = invocation_info.envs,
+      .working_dir = request.Message().command_request().working_directory(),
+      .command_name = invocation_info.bin,
+      .in = request.In(),
+      .out = request.Out(),
+      .err = request.Err()};
+  Command command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+
+  SubprocessOptions options;
+  if (request.Message().command_request().wait_behavior() ==
+      cvd::WAIT_BEHAVIOR_START) {
+    options.ExitWithParent(false);
+  }
+  CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+
+  bool is_stop = IsStopCommand(invocation_info.command);
+
+  // captured structured bindings are a C++20 extension
+  // so we need [group_ptr] instead of [&group_opt]
+  auto* group_ptr = (group_opt ? std::addressof(*group_opt) : nullptr);
+  ScopeGuard exit_action([this, is_stop, group_ptr]() {
+    if (!is_stop) {
+      return;
+    }
+    if (!group_ptr) {
+      return;
+    }
+    for (const auto& instance : group_ptr->Instances()) {
+      auto lock = instance_manager_.TryAcquireLock(instance->InstanceId());
+      if (lock.ok() && (*lock)) {
+        (*lock)->Status(InUseState::kNotInUse);
+        continue;
+      }
+      LOG(ERROR) << "InstanceLockFileManager failed to acquire lock for #"
+                 << instance->InstanceId();
+    }
+  });
+
+  if (request.Message().command_request().wait_behavior() ==
+      cvd::WAIT_BEHAVIOR_START) {
+    response.mutable_status()->set_code(cvd::Status::OK);
+    return response;
+  }
+
+  interrupt_lock.unlock();
+
+  auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+
+  if (infop.si_code == CLD_EXITED && IsStopCommand(invocation_info.command)) {
+    instance_manager_.RemoveInstanceGroup(uid, invocation_info.home);
+  }
+
+  return ResponseFromSiginfo(infop);
+}
+
+std::vector<std::string> CvdGenericCommandHandler::CmdList() const {
+  std::vector<std::string> subcmd_list;
+  subcmd_list.reserve(command_to_binary_map_.size());
+  for (const auto& [cmd, _] : command_to_binary_map_) {
+    subcmd_list.emplace_back(cmd);
+  }
+  return subcmd_list;
+}
+
+Result<CvdGenericCommandHandler::BinPathInfo>
+CvdGenericCommandHandler::NonCvdBinPath(const std::string& subcmd,
+                                        const cvd_common::Envs& envs) const {
+  auto bin_path_base = CF_EXPECT(GetBin(subcmd));
+  // no need of executable directory. Will look up by PATH
+  // bin_path_base is like ln, mkdir, etc.
+  return BinPathInfo{.bin_ = bin_path_base,
+                     .bin_path_ = bin_path_base,
+                     .host_artifacts_path_ = envs.at(kAndroidHostOut)};
+}
+
+Result<CvdGenericCommandHandler::BinPathInfo>
+CvdGenericCommandHandler::CvdHelpBinPath(const std::string& subcmd,
+                                         const cvd_common::Envs& envs) const {
+  auto tool_dir_path = envs.at(kAndroidHostOut);
+  if (!DirectoryExists(tool_dir_path + "/bin")) {
+    tool_dir_path =
+        android::base::Dirname(android::base::GetExecutableDirectory());
+  }
+  auto bin_path_base = CF_EXPECT(GetBin(subcmd, tool_dir_path));
+  // no need of executable directory. Will look up by PATH
+  // bin_path_base is like ln, mkdir, etc.
+  return BinPathInfo{
+      .bin_ = bin_path_base,
+      .bin_path_ = tool_dir_path.append("/bin/").append(bin_path_base),
+      .host_artifacts_path_ = envs.at(kAndroidHostOut)};
+}
+
+Result<CvdGenericCommandHandler::BinPathInfo>
+CvdGenericCommandHandler::CvdBinPath(const std::string& subcmd,
+                                     const cvd_common::Envs& envs,
+                                     const std::string& home,
+                                     const uid_t uid) const {
+  std::string host_artifacts_path;
+  auto instance_group_result = instance_manager_.FindGroup(
+      uid, InstanceManager::Query{selector::kHomeField, home});
+
+  // the dir that "bin/<this subcmd bin file>" belongs to
+  std::string tool_dir_path;
+  if (instance_group_result.ok()) {
+    host_artifacts_path = instance_group_result->HostArtifactsPath();
+    tool_dir_path = host_artifacts_path;
+  } else {
+    // if the group does not exist (e.g. cvd status --help)
+    // falls back here
+    host_artifacts_path = envs.at(kAndroidHostOut);
+    tool_dir_path = host_artifacts_path;
+    if (!DirectoryExists(tool_dir_path + "/bin")) {
+      tool_dir_path =
+          android::base::Dirname(android::base::GetExecutableDirectory());
+    }
+  }
+  const std::string bin = CF_EXPECT(GetBin(subcmd, tool_dir_path));
+  const std::string bin_path = tool_dir_path.append("/bin/").append(bin);
+  CF_EXPECT(FileExists(bin_path));
+  return BinPathInfo{.bin_ = bin,
+                     .bin_path_ = bin_path,
+                     .host_artifacts_path_ = host_artifacts_path};
+}
+
+/*
+ * commands like ln, mkdir, clear
+ *  -> bin, bin, system_wide_home, N/A, cmd_args, envs
+ *
+ * help command
+ *  -> android_out/bin, bin, system_wide_home, android_out, cmd_args, envs
+ *
+ * non-help command
+ *  -> group->a/o/bin, bin, group->home, group->android_out, cmd_args, envs
+ *
+ */
+Result<CvdGenericCommandHandler::ExtractedInfo>
+CvdGenericCommandHandler::ExtractInfo(const RequestWithStdio& request) const {
+  auto result_opt = request.Credentials();
+  CF_EXPECT(result_opt != std::nullopt);
+  const uid_t uid = result_opt->uid;
+
+  auto [subcmd, cmd_args] = ParseInvocation(request.Message());
+  CF_EXPECT(Contains(command_to_binary_map_, subcmd));
+
+  cvd_common::Envs envs =
+      cvd_common::ConvertToEnvs(request.Message().command_request().env());
+  const auto& selector_opts =
+      request.Message().command_request().selector_opts();
+  const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+  CF_EXPECT(Contains(envs, kAndroidHostOut) &&
+            DirectoryExists(envs.at(kAndroidHostOut)));
+
+  std::unordered_set<std::string> non_cvd_op{"clear", "mkdir", "ln"};
+  if (Contains(non_cvd_op, subcmd) || IsHelpSubcmd(cmd_args)) {
+    const auto [bin, bin_path, host_artifacts_path] =
+        Contains(non_cvd_op, subcmd) ? CF_EXPECT(NonCvdBinPath(subcmd, envs))
+                                     : CF_EXPECT(CvdHelpBinPath(subcmd, envs));
+    return ExtractedInfo{
+        .invocation_info =
+            CommandInvocationInfo{
+                .command = subcmd,
+                .bin = bin,
+                .bin_path = bin_path,
+                .home = CF_EXPECT(SystemWideUserHome(uid)),
+                .host_artifacts_path = envs.at(kAndroidHostOut),
+                .uid = uid,
+                .args = cmd_args,
+                .envs = envs},
+        .group = std::nullopt};
+  }
+
+  auto instance_group =
+      CF_EXPECT(instance_manager_.SelectGroup(selector_args, envs, uid));
+  auto android_host_out = instance_group.HostArtifactsPath();
+  auto home = instance_group.HomeDir();
+  auto bin = CF_EXPECT(GetBin(subcmd, android_host_out));
+  auto bin_path = ConcatToString(android_host_out, "/bin/", bin);
+  CommandInvocationInfo result = {.command = subcmd,
+                                  .bin = bin,
+                                  .bin_path = bin_path,
+                                  .home = home,
+                                  .host_artifacts_path = android_host_out,
+                                  .uid = uid,
+                                  .args = cmd_args,
+                                  .envs = envs};
+  result.envs["HOME"] = home;
+  return ExtractedInfo{.invocation_info = result, .group = instance_group};
+}
+
+Result<std::string> CvdGenericCommandHandler::GetBin(
+    const std::string& subcmd) const {
+  const auto& bin_type_entry = command_to_binary_map_.at(subcmd);
+  const std::string* ptr_if_string =
+      std::get_if<std::string>(std::addressof(bin_type_entry));
+  CF_EXPECT(ptr_if_string != nullptr,
+            "To figure out bin for " << subcmd << ", we need ANDROID_HOST_OUT");
+  return *ptr_if_string;
+}
+
+Result<std::string> CvdGenericCommandHandler::GetBin(
+    const std::string& subcmd, const std::string& host_artifacts_path) const {
+  auto bin_getter = Overload{
+      [](const std::string& str) -> Result<std::string> { return str; },
+      [&host_artifacts_path](
+          const BinGeneratorType& bin_generator) -> Result<std::string> {
+        const auto bin = CF_EXPECT(bin_generator(host_artifacts_path));
+        return bin;
+      },
+      [](auto) -> Result<std::string> {
+        return CF_ERR("Unsupported parameter type for GetBin()");
+      }};
+  auto bin =
+      CF_EXPECT(std::visit(bin_getter, command_to_binary_map_.at(subcmd)));
+  return bin;
+}
+
+fruit::Component<
+    fruit::Required<InstanceManager, SubprocessWaiter, HostToolTargetManager>>
+cvdGenericCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdGenericCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/generic.h b/host/commands/cvd/server_command/generic.h
new file mode 100644
index 0000000..6b3e17a
--- /dev/null
+++ b/host/commands/cvd/server_command/generic.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+fruit::Component<
+    fruit::Required<InstanceManager, SubprocessWaiter, HostToolTargetManager>>
+cvdGenericCommandComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/handler_proxy.cpp b/host/commands/cvd/server_command/handler_proxy.cpp
new file mode 100644
index 0000000..5614372
--- /dev/null
+++ b/host/commands/cvd/server_command/handler_proxy.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/handler_proxy.h"
+
+#include <mutex>
+#include <vector>
+
+#include <android-base/strings.h>
+
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/frontline_parser.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdServerHandlerProxy : public CvdServerHandler {
+ public:
+  INJECT(CvdServerHandlerProxy(CommandSequenceExecutor& executor))
+      : executor_(executor) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return (invocation.command == "process");
+  }
+
+  // the input format is:
+  //   cmd_args:      cvd cmdline-parser
+  //   selector_args: [command args to parse]
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CanHandle(request));
+
+    const auto& selector_opts =
+        request.Message().command_request().selector_opts();
+    auto all_args = cvd_common::ConvertToArgs(selector_opts.args());
+    CF_EXPECT_GE(all_args.size(), 1);
+    if (all_args.size() == 1) {
+      CF_EXPECT_EQ(all_args.front(), "cvd");
+      all_args = cvd_common::Args{"cvd", "help"};
+    }
+
+    cvd_common::Envs envs =
+        cvd_common::ConvertToEnvs(request.Message().command_request().env());
+
+    auto subcmds = executor_.CmdList();
+    auto selector_flag_collection =
+        selector::SelectorFlags::New().FlagsAsCollection();
+
+    FrontlineParser::ParserParam server_param{
+        .server_supported_subcmds = subcmds,
+        .internal_cmds = std::vector<std::string>{},
+        .all_args = all_args,
+        .cvd_flags = std::move(selector_flag_collection)};
+    auto frontline_parser = CF_EXPECT(FrontlineParser::Parse(server_param));
+    CF_EXPECT(frontline_parser != nullptr);
+
+    const auto prog_path = frontline_parser->ProgPath();
+    const auto new_sub_cmd = frontline_parser->SubCmd();
+    cvd_common::Args cmd_args{frontline_parser->SubCmdArgs()};
+    cvd_common::Args selector_args{frontline_parser->CvdArgs()};
+
+    cvd_common::Args new_exec_args{prog_path};
+    if (new_sub_cmd) {
+      new_exec_args.push_back(*new_sub_cmd);
+    }
+    new_exec_args.insert(new_exec_args.end(), cmd_args.begin(), cmd_args.end());
+
+    cvd::Request exec_request = MakeRequest(
+        {.cmd_args = new_exec_args,
+         .env = envs,
+         .selector_args = selector_args,
+         .working_dir =
+             request.Message().command_request().working_directory()},
+        request.Message().command_request().wait_behavior());
+
+    RequestWithStdio forwarded_request(
+        request.Client(), std::move(exec_request), request.FileDescriptors(),
+        request.Credentials());
+    interrupt_lock.unlock();
+    SharedFD dev_null = SharedFD::Open("/dev/null", O_RDWR);
+    CF_EXPECT(dev_null->IsOpen(), "Failed to open /dev/null");
+    const auto responses =
+        CF_EXPECT(executor_.Execute({std::move(forwarded_request)}, dev_null));
+    CF_EXPECT_EQ(responses.size(), 1);
+    return responses.front();
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  // not intended to be used by the user
+  cvd_common::Args CmdList() const override { return {}; }
+
+ private:
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  CommandSequenceExecutor& executor_;
+};
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+CvdHandlerProxyComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdServerHandlerProxy>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/handler_proxy.h b/host/commands/cvd/server_command/handler_proxy.h
new file mode 100644
index 0000000..a97fdd3
--- /dev/null
+++ b/host/commands/cvd/server_command/handler_proxy.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+CvdHandlerProxyComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/help.cpp b/host/commands/cvd/server_command/help.cpp
new file mode 100644
index 0000000..942a8a2
--- /dev/null
+++ b/host/commands/cvd/server_command/help.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server.h"
+
+#include <mutex>
+
+#include "common/libs/fs/shared_buf.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/inject.h"
+
+namespace cuttlefish {
+
+static constexpr char kHelpMessage[] = R"(Cuttlefish Virtual Device (CVD) CLI.
+
+usage: cvd <selector/driver options> <command> <args>
+
+Selector Options:
+  -group_name <name>     Specify the name of the instance group created
+                         or selected.
+  -instance_name <name>  Selects the device of the given name to perform the
+                         commands for.
+  -instance_name <names> Takes the names of the devices to create within an
+                         instance group. The 'names' is comma-separated.
+
+Driver Options:
+  --help                 Print this message
+  -disable_default_group If the flag is true, the group's runtime files are
+                         not populated under the user's HOME. Instead the
+                         files are created under an automatically-generated
+                         directory. (default: false)
+  -acquire_file_lock     If the flag is given, the cvd server attempts to
+                         acquire the instance lock file lock. (default: true)
+
+Commands:
+  help                   Print this message.
+  help <command>         Print help for a command.
+  start                  Start a device.
+  stop                   Stop a running device.
+  clear                  Stop all running devices and delete all instance and
+                         assembly directories.
+  fleet                  View the current fleet status.
+  kill-server            Kill the cvd_server background process.
+  server-kill            Same as kill-server
+  powerwash              Delivers powerwash command to the selected device
+  restart                Restart the device without reinitializing the disks
+  restart-server         Restart the cvd_server background process.
+  status                 Check and print the state of a running instance.
+  host_bugreport         Capture a host bugreport, including configs, logs, and
+                         tombstones.
+
+Args:
+  <command args>         Each command has its own set of args.
+                         See cvd help <command>.
+
+Experimental:
+  reset                  See cvd reset --help. Requires cvd >= v1.2
+)";
+
+class CvdHelpHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdHelpHandler(CommandSequenceExecutor& executor))
+      : executor_(executor) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return (invocation.command == "help");
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    if (interrupted_) {
+      return CF_ERR("Interrupted");
+    }
+
+    cvd::Response response;
+    response.mutable_command_response();  // Sets oneof member
+    response.mutable_status()->set_code(cvd::Status::OK);
+
+    CF_EXPECT(CanHandle(request));
+
+    auto [subcmd, subcmd_args] = ParseInvocation(request.Message());
+    const auto supported_subcmd_list = executor_.CmdList();
+
+    /*
+     * cvd help, cvd help invalid_token, cvd help help
+     */
+    if (subcmd_args.empty() ||
+        !Contains(supported_subcmd_list, subcmd_args.front()) ||
+        subcmd_args.front() == "help") {
+      WriteAll(request.Out(), kHelpMessage);
+      return response;
+    }
+
+    cvd::Request modified_proto = HelpSubcommandToFlag(request);
+
+    RequestWithStdio inner_cmd(request.Client(), modified_proto,
+                               request.FileDescriptors(),
+                               request.Credentials());
+
+    interrupt_lock.unlock();
+    executor_.Execute({inner_cmd}, SharedFD::Open("/dev/null", O_RDWR));
+
+    return response;
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override { return {"help"}; }
+
+ private:
+  cvd::Request HelpSubcommandToFlag(const RequestWithStdio& request);
+
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  CommandSequenceExecutor& executor_;
+};
+
+cvd::Request CvdHelpHandler::HelpSubcommandToFlag(
+    const RequestWithStdio& request) {
+  cvd::Request modified_proto = request.Message();
+  auto all_args =
+      cvd_common::ConvertToArgs(modified_proto.command_request().args());
+  auto& args = *modified_proto.mutable_command_request()->mutable_args();
+  args.Clear();
+  // there must be one or more "help" in all_args
+  // delete the first "help"
+  bool found_help = false;
+  for (const auto& cmd_arg : all_args) {
+    if (cmd_arg != "help" || found_help) {
+      args.Add(cmd_arg.c_str());
+      continue;
+    }
+    // skip first help
+    found_help = true;
+  }
+  args.Add("--help");
+  return modified_proto;
+}
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>> CvdHelpComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdHelpHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/host_tool_target.cpp b/host/commands/cvd/server_command/host_tool_target.cpp
new file mode 100644
index 0000000..c0f29f3
--- /dev/null
+++ b/host/commands/cvd/server_command/host_tool_target.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/host_tool_target.h"
+
+#include <sys/stat.h>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/server_command/flags_collector.h"
+
+namespace cuttlefish {
+
+Result<HostToolTarget> HostToolTarget::Create(
+    const std::string& artifacts_path,
+    const OperationToBinsMap& supported_operations) {
+  std::string bin_dir_path = ConcatToString(artifacts_path, "/bin");
+  std::unordered_map<std::string, OperationImplementation> op_to_impl_map;
+  for (const auto& [op, candidates] : supported_operations) {
+    for (const auto& bin_name : candidates) {
+      const auto bin_path = ConcatToString(bin_dir_path, "/", bin_name);
+      if (!FileExists(bin_path)) {
+        continue;
+      }
+      op_to_impl_map[op] = OperationImplementation{.bin_name_ = bin_name};
+      break;
+    }
+  }
+
+  for (auto& [op, op_impl] : op_to_impl_map) {
+    const std::string bin_path =
+        ConcatToString(artifacts_path, "/bin/", op_impl.bin_name_);
+    Command command(bin_path);
+    command.AddParameter("--helpxml");
+    // b/276497044
+    command.UnsetFromEnvironment(kAndroidHostOut);
+    command.AddEnvironmentVariable(kAndroidHostOut, artifacts_path);
+    command.UnsetFromEnvironment(kAndroidSoongHostOut);
+    command.AddEnvironmentVariable(kAndroidSoongHostOut, artifacts_path);
+
+    std::string xml_str;
+    RunWithManagedStdio(std::move(command), nullptr, std::addressof(xml_str),
+                        nullptr);
+    auto flags_opt = CollectFlagsFromHelpxml(xml_str);
+    if (!flags_opt) {
+      LOG(ERROR) << bin_path << " --helpxml failed.";
+      continue;
+    }
+    auto flags = std::move(*flags_opt);
+    for (auto& flag : flags) {
+      op_impl.supported_flags_[flag->Name()] = std::move(flag);
+    }
+  }
+
+  struct stat for_dir_time_stamp;
+  time_t dir_time_stamp = 0;
+  // we get dir time stamp, as the runtime libraries might be updated
+  if (::stat(bin_dir_path.data(), &for_dir_time_stamp) == 0) {
+    // if stat failed, use the smallest possible value, which is 0
+    // in that way, the HostTool entry will be always updated on read request.
+    dir_time_stamp = for_dir_time_stamp.st_mtime;
+  }
+  return HostToolTarget(artifacts_path, dir_time_stamp,
+                        std::move(op_to_impl_map));
+}
+
+HostToolTarget::HostToolTarget(
+    const std::string& artifacts_path, const time_t dir_time_stamp,
+    std::unordered_map<std::string, OperationImplementation>&& op_to_impl_map)
+    : artifacts_path_(artifacts_path),
+      dir_time_stamp_(dir_time_stamp),
+      op_to_impl_map_(std::move(op_to_impl_map)) {}
+
+bool HostToolTarget::IsDirty() const {
+  std::string bin_path = ConcatToString(artifacts_path_, "/bin");
+  if (!DirectoryExists(bin_path)) {
+    return true;
+  }
+  struct stat for_dir_time_stamp;
+  if (::stat(bin_path.data(), &for_dir_time_stamp) != 0) {
+    return true;
+  }
+  return dir_time_stamp_ != for_dir_time_stamp.st_mtime;
+}
+
+Result<FlagInfo> HostToolTarget::GetFlagInfo(
+    const FlagInfoRequest& request) const {
+  CF_EXPECT(Contains(op_to_impl_map_, request.operation_),
+            "Operation \"" << request.operation_ << "\" is not supported.");
+  auto& supported_flags =
+      op_to_impl_map_.at(request.operation_).supported_flags_;
+  CF_EXPECT(Contains(supported_flags, request.flag_name_));
+  const auto& flag_uniq_ptr = supported_flags.at(request.flag_name_);
+  FlagInfo copied(*flag_uniq_ptr);
+  return copied;
+}
+
+Result<std::string> HostToolTarget::GetBinName(
+    const std::string& operation) const {
+  CF_EXPECT(Contains(op_to_impl_map_, operation),
+            "Operation \"" << operation << "\" is not supported by "
+                           << "the host tool target object at "
+                           << artifacts_path_);
+  return op_to_impl_map_.at(operation).bin_name_;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/host_tool_target.h b/host/commands/cvd/server_command/host_tool_target.h
new file mode 100644
index 0000000..57d6609
--- /dev/null
+++ b/host/commands/cvd/server_command/host_tool_target.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_command/flags_collector.h"
+
+namespace cuttlefish {
+
+class HostToolTarget {
+ public:
+  using OperationToBinsMap =
+      std::unordered_map<std::string, std::vector<std::string>>;
+  struct FlagInfoRequest {
+    std::string operation_;
+    std::string flag_name_;
+  };
+  // artifacts_path: ANDROID_HOST_OUT, or so
+  static Result<HostToolTarget> Create(
+      const std::string& artifacts_path,
+      const OperationToBinsMap& supported_operations);
+
+  bool IsDirty() const;
+  Result<FlagInfo> GetFlagInfo(const FlagInfoRequest& request) const;
+  bool HasField(const FlagInfoRequest& request) const {
+    return GetFlagInfo(request).ok();
+  }
+  Result<std::string> GetBinName(const std::string& operation) const;
+
+ private:
+  using SupportedFlagMap = std::unordered_map<std::string, FlagInfoPtr>;
+  struct OperationImplementation {
+    std::string bin_name_;
+    SupportedFlagMap supported_flags_;
+  };
+  HostToolTarget(const std::string& artifacts_path, const time_t dir_time_stamp,
+                 std::unordered_map<std::string, OperationImplementation>&&
+                     op_to_impl_map);
+
+  // time stamp on creation
+  const std::string artifacts_path_;
+  const time_t dir_time_stamp_;
+  // map from "start", "stop", etc, to "cvd_internal_start", "stop_cvd", etc
+  // with the supported flags if those implementation offers --helpxml.
+  std::unordered_map<std::string, OperationImplementation> op_to_impl_map_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/host_tool_target_manager.cpp b/host/commands/cvd/server_command/host_tool_target_manager.cpp
new file mode 100644
index 0000000..c625020
--- /dev/null
+++ b/host/commands/cvd/server_command/host_tool_target_manager.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "host/commands/cvd/common_utils.h"
+
+namespace cuttlefish {
+
+class HostToolTargetManagerImpl : public HostToolTargetManager {
+ public:
+  INJECT(HostToolTargetManagerImpl(const OperationToBinsMap&));
+
+  Result<FlagInfo> ReadFlag(const HostToolFlagRequestForm& request) override;
+  Result<std::string> ExecBaseName(
+      const HostToolExecNameRequestForm& request) override;
+
+ private:
+  Result<void> EnsureExistence(const std::string& artifacts_path);
+  Result<void> UpdateOutdated(const std::string& artifacts_path);
+
+  using HostToolTargetMap = std::unordered_map<std::string, HostToolTarget>;
+
+  // map from artifact dir to host tool target information object
+  HostToolTargetMap host_target_table_;
+  // predefined mapping from an operation to potential executable binary names
+  // e.g. "start" -> {"cvd_internal_start", "launch_cvd"}
+  const OperationToBinsMap& op_to_possible_bins_map_;
+  std::mutex table_mutex_;
+};
+
+HostToolTargetManagerImpl::HostToolTargetManagerImpl(
+    const OperationToBinsMap& op_to_bins_map)
+    : op_to_possible_bins_map_{op_to_bins_map} {}
+
+// use this only after acquiring the table_mutex_
+Result<void> HostToolTargetManagerImpl::EnsureExistence(
+    const std::string& artifacts_path) {
+  if (!Contains(host_target_table_, artifacts_path)) {
+    HostToolTarget new_host_tool_target = CF_EXPECT(
+        HostToolTarget::Create(artifacts_path, op_to_possible_bins_map_));
+    host_target_table_.emplace(artifacts_path, std::move(new_host_tool_target));
+  }
+  return {};
+}
+
+Result<void> HostToolTargetManagerImpl::UpdateOutdated(
+    const std::string& artifacts_path) {
+  CF_EXPECT(Contains(host_target_table_, artifacts_path));
+  auto& host_target = host_target_table_.at(artifacts_path);
+  if (!host_target.IsDirty()) {
+    return {};
+  }
+  LOG(ERROR) << artifacts_path << " is new, so updating HostToolTarget";
+  HostToolTarget new_host_tool_target = CF_EXPECT(
+      HostToolTarget::Create(artifacts_path, op_to_possible_bins_map_));
+  host_target_table_.emplace(artifacts_path, std::move(new_host_tool_target));
+  return {};
+}
+
+Result<FlagInfo> HostToolTargetManagerImpl::ReadFlag(
+    const HostToolFlagRequestForm& request) {
+  std::lock_guard<std::mutex> lock(table_mutex_);
+  CF_EXPECT(
+      EnsureExistence(request.artifacts_path),
+      "Could not create HostToolTarget object for " << request.artifacts_path);
+  CF_EXPECT(UpdateOutdated(request.artifacts_path));
+  auto& host_target = host_target_table_.at(request.artifacts_path);
+  auto flag_info =
+      CF_EXPECT(host_target.GetFlagInfo(HostToolTarget::FlagInfoRequest{
+          .operation_ = request.op,
+          .flag_name_ = request.flag_name,
+      }));
+  return flag_info;
+}
+
+Result<std::string> HostToolTargetManagerImpl::ExecBaseName(
+    const HostToolExecNameRequestForm& request) {
+  std::lock_guard<std::mutex> lock(table_mutex_);
+  CF_EXPECT(
+      EnsureExistence(request.artifacts_path),
+      "Could not create HostToolTarget object for " << request.artifacts_path);
+  auto& host_target = host_target_table_.at(request.artifacts_path);
+  auto base_name = CF_EXPECT(host_target.GetBinName(request.op));
+  return base_name;
+}
+
+fruit::Component<fruit::Required<OperationToBinsMap>, HostToolTargetManager>
+HostToolTargetManagerComponent() {
+  return fruit::createComponent()
+      .bind<HostToolTargetManager, HostToolTargetManagerImpl>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/host_tool_target_manager.h b/host/commands/cvd/server_command/host_tool_target_manager.h
new file mode 100644
index 0000000..d0b8ebb
--- /dev/null
+++ b/host/commands/cvd/server_command/host_tool_target_manager.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <mutex>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_command/flags_collector.h"
+#include "host/commands/cvd/server_command/host_tool_target.h"
+#include "host/commands/cvd/server_command/operation_to_bins_map.h"
+
+namespace cuttlefish {
+
+struct HostToolFlagRequestForm {
+  std::string artifacts_path;
+  // operations like stop, start, status, etc
+  std::string op;
+  std::string flag_name;
+};
+
+struct HostToolExecNameRequestForm {
+  std::string artifacts_path;
+  // operations like stop, start, status, etc
+  std::string op;
+};
+
+class HostToolTargetManager {
+ public:
+  virtual ~HostToolTargetManager() = default;
+  virtual Result<FlagInfo> ReadFlag(const HostToolFlagRequestForm& request) = 0;
+  virtual Result<std::string> ExecBaseName(
+      const HostToolExecNameRequestForm& request) = 0;
+};
+
+fruit::Component<fruit::Required<OperationToBinsMap>, HostToolTargetManager>
+HostToolTargetManagerComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/load_configs.cpp b/host/commands/cvd/server_command/load_configs.cpp
new file mode 100644
index 0000000..eb49807
--- /dev/null
+++ b/host/commands/cvd/server_command/load_configs.cpp
@@ -0,0 +1,292 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/server_command/load_configs.h"
+
+#include <chrono>
+#include <mutex>
+#include <sstream>
+#include <string>
+
+#include <fruit/fruit.h>
+#include <android-base/parseint.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/parser/load_configs_parser.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+namespace {
+
+using DemoCommandSequence = std::vector<RequestWithStdio>;
+
+}  // namespace
+
+class LoadConfigsCommand : public CvdServerHandler {
+ public:
+  INJECT(LoadConfigsCommand(CommandSequenceExecutor& executor))
+      : executor_(executor) {}
+  ~LoadConfigsCommand() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return invocation.command == kLoadSubCmd;
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interrupt_mutex_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CF_EXPECT(CanHandle(request)));
+
+    auto commands = CF_EXPECT(CreateCommandSequence(request));
+    interrupt_lock.unlock();
+    CF_EXPECT(executor_.Execute(commands, request.Err()));
+
+    cvd::Response response;
+    response.mutable_command_response();
+    return response;
+  }
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interrupt_mutex_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override { return {kLoadSubCmd}; }
+
+  // TODO: expand this enum in the future to support more types ( double , float
+  // , etc) if neeeded
+  enum ArgValueType { UINTEGER, BOOLEAN, TEXT };
+
+  bool IsUnsignedInteger(const std::string& str) {
+    return !str.empty() && std::all_of(str.begin(), str.end(),
+                                       [](char c) { return std::isdigit(c); });
+  }
+
+  ArgValueType GetArgValueType(const std::string& str) {
+    if (IsUnsignedInteger(str)) {
+      return UINTEGER;
+    }
+
+    if (str == "true" || str == "false") {
+      return BOOLEAN;
+    }
+
+    // Otherwise, treat the string as text
+    return TEXT;
+  }
+
+  Json::Value ConvertArgToJson(const std::string& key,
+                               const std::string& leafValue) {
+    std::stack<std::string> levels;
+    std::stringstream ks(key);
+    std::string token;
+    while (std::getline(ks, token, '.')) {
+      levels.push(token);
+    }
+
+    // assign the leaf value based on the type of input value
+    Json::Value leaf;
+    if (GetArgValueType(leafValue) == UINTEGER) {
+      std::uint32_t leaf_val;
+      if (!android::base::ParseUint(leafValue ,&leaf_val)){
+        LOG(ERROR) << "Failed to parse unsigned integer " << leafValue;
+        return Json::Value::null;
+      };
+      leaf = leaf_val;
+    } else if (GetArgValueType(leafValue) == BOOLEAN) {
+      leaf = (leafValue == "true");
+    } else {
+      leaf = leafValue;
+    }
+
+    while (!levels.empty()) {
+      Json::Value curr;
+      std::string index = levels.top();
+
+      if (GetArgValueType(index) == UINTEGER) {
+        std::uint32_t index_val;
+        if (!android::base::ParseUint(index, &index_val)){
+          LOG(ERROR) << "Failed to parse unsigned integer " << index;
+          return Json::Value::null;
+        }
+        curr[index_val] = leaf;
+      } else {
+        curr[index] = leaf;
+      }
+
+      leaf = curr;
+      levels.pop();
+    }
+
+    return leaf;
+  }
+
+  Json::Value ParseArgsToJson(const std::vector<std::string>& strings) {
+    Json::Value jsonValue;
+    for (const auto& str : strings) {
+      std::string key;
+      std::string value;
+      size_t equals_pos = str.find('=');
+      if (equals_pos != std::string::npos) {
+        key = str.substr(0, equals_pos);
+        value = str.substr(equals_pos + 1);
+      } else {
+        key = str;
+        value.clear();
+        LOG(WARNING) << "No value provided for key " << key;
+        return Json::Value::null;
+      }
+      MergeTwoJsonObjs(jsonValue, ConvertArgToJson(key, value));
+    }
+
+    return jsonValue;
+  }
+
+  bool HasValidDotSeparatedPrefix(const std::string& str) {
+    auto equalsPos = str.find('=');
+    if (equalsPos == std::string::npos) {
+      return false;
+    }
+    std::string prefix = str.substr(0, equalsPos);
+    // return false if prefix is empty, has no dots, start with dots, end with dot
+    // or has cosequence of dots
+    if (prefix.empty() || prefix.find('.') == std::string::npos ||
+        prefix.find('.') == 0 || prefix.find("..") != std::string::npos ||
+        prefix.back() == '.') {
+      return false;
+    }
+    return true;
+  }
+
+  bool hasEqualsWithValidDotSeparatedPrefix(const std::string& str) {
+    auto equalsPos = str.find('=');
+    return equalsPos != std::string::npos && equalsPos < str.length() - 1 &&
+           HasValidDotSeparatedPrefix(str);
+  }
+
+  bool ValidateArgsFormat(const std::vector<std::string>& strings) {
+    for (const auto& str : strings) {
+      if (!hasEqualsWithValidDotSeparatedPrefix(str)) {
+        LOG(ERROR) << "Invalid  argument format. " << str
+                   << " Please use arg=value";
+        return false;
+      }
+    }
+    return true;
+  }
+
+  Result<DemoCommandSequence> CreateCommandSequence(
+      const RequestWithStdio& request) {
+    bool help = false;
+
+    std::vector<Flag> flags;
+    flags.emplace_back(GflagsCompatFlag("help", help));
+    std::string config_path;
+    flags.emplace_back(GflagsCompatFlag("config_path", config_path));
+
+    auto args = ParseInvocation(request.Message()).arguments;
+    CF_EXPECT(ParseFlags(flags, args));
+
+    Json::Value json_configs;
+    if (help) {
+      std::stringstream help_msg_stream;
+      help_msg_stream << "Usage: cvd " << kLoadSubCmd << std::endl;
+      const auto help_msg = help_msg_stream.str();
+      CF_EXPECT(WriteAll(request.Out(), help_msg) == help_msg.size());
+      return {};
+    } else {
+      json_configs =
+          CF_EXPECT(ParseJsonFile(config_path), "parsing input file failed");
+
+      if (args.size() > 0) {
+        for (auto& single_arg : args) {
+          LOG(INFO) << "Filtered args " << single_arg;
+        }
+        // Validate all arguments follow specific pattern
+        if (!ValidateArgsFormat(args)) {
+          return {};
+        }
+        // Convert all arguments to json tree
+        auto args_tree = ParseArgsToJson(args);
+        MergeTwoJsonObjs(json_configs, args_tree);
+      }
+    }
+
+    auto cvd_flags =
+        CF_EXPECT(ParseCvdConfigs(json_configs), "parsing json configs failed");
+
+    std::vector<cvd::Request> req_protos;
+
+    auto& launch_cmd = *req_protos.emplace_back().mutable_command_request();
+    launch_cmd.set_working_directory(
+        request.Message().command_request().working_directory());
+    *launch_cmd.mutable_env() = request.Message().command_request().env();
+
+    /* cvd load will always create instances in deamon mode (to be independent
+     of terminal) and will enable reporting automatically (to run automatically
+     without question during launch)
+     */
+    launch_cmd.add_args("cvd");
+    launch_cmd.add_args("start");
+    launch_cmd.add_args("--daemon");
+    for (auto& parsed_flag : cvd_flags.launch_cvd_flags) {
+      launch_cmd.add_args(parsed_flag);
+    }
+
+    launch_cmd.mutable_selector_opts()->add_args(
+        std::string("--") + selector::SelectorFlags::kDisableDefaultGroup);
+
+    /*Verbose is disabled by default*/
+    auto dev_null = SharedFD::Open("/dev/null", O_RDWR);
+    CF_EXPECT(dev_null->IsOpen(), dev_null->StrError());
+    std::vector<SharedFD> fds = {dev_null, dev_null, dev_null};
+    DemoCommandSequence ret;
+
+    for (auto& request_proto : req_protos) {
+      ret.emplace_back(RequestWithStdio(request.Client(), request_proto, fds,
+                                        request.Credentials()));
+    }
+
+    return ret;
+  }
+
+ private:
+  static constexpr char kLoadSubCmd[] = "load";
+
+  CommandSequenceExecutor& executor_;
+
+  std::mutex interrupt_mutex_;
+  bool interrupted_ = false;
+};
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+LoadConfigsComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, LoadConfigsCommand>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/load_configs.h b/host/commands/cvd/server_command/load_configs.h
new file mode 100644
index 0000000..81fd8a3
--- /dev/null
+++ b/host/commands/cvd/server_command/load_configs.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+/*
+cvd load component is responsible of loading, translation and creation of
+cuttlefish instances based on input json configuration file
+*/
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+LoadConfigsComponent();
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/operation_to_bins_map.cpp b/host/commands/cvd/server_command/operation_to_bins_map.cpp
new file mode 100644
index 0000000..6721f18
--- /dev/null
+++ b/host/commands/cvd/server_command/operation_to_bins_map.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "host/commands/cvd/common_utils.h"
+
+namespace cuttlefish {
+
+fruit::Component<OperationToBinsMap> OperationToBinsMapComponent() {
+  return fruit::createComponent().registerProvider(
+      [](void) -> OperationToBinsMap {
+        OperationToBinsMap op_to_possible_bins_map;
+        op_to_possible_bins_map["stop"] =
+            std::vector<std::string>{"cvd_internal_stop", "stop_cvd"};
+        op_to_possible_bins_map["start"] =
+            std::vector<std::string>{"cvd_internal_start", "launch_cvd"};
+        op_to_possible_bins_map["status"] =
+            std::vector<std::string>{"cvd_internal_status", "cvd_status"};
+        op_to_possible_bins_map["restart"] =
+            std::vector<std::string>{"restart_cvd"};
+        op_to_possible_bins_map["powerwash"] =
+            std::vector<std::string>{"powerwash_cvd"};
+        return op_to_possible_bins_map;
+      });
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/operation_to_bins_map.h b/host/commands/cvd/server_command/operation_to_bins_map.h
new file mode 100644
index 0000000..5b5fb9d
--- /dev/null
+++ b/host/commands/cvd/server_command/operation_to_bins_map.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+namespace cuttlefish {
+
+using OperationToBinsMap =
+    std::unordered_map<std::string, std::vector<std::string>>;
+
+fruit::Component<OperationToBinsMap> OperationToBinsMapComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/power.cpp b/host/commands/cvd/server_command/power.cpp
new file mode 100644
index 0000000..ef589d9
--- /dev/null
+++ b/host/commands/cvd/server_command/power.cpp
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/power.h"
+
+#include <android-base/strings.h>
+
+#include <functional>
+#include <iostream>
+#include <mutex>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/selector/instance_database_types.h"
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdDevicePowerCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdDevicePowerCommandHandler(
+      HostToolTargetManager& host_tool_target_manager,
+      InstanceManager& instance_manager, SubprocessWaiter& subprocess_waiter))
+      : host_tool_target_manager_(host_tool_target_manager),
+        instance_manager_{instance_manager},
+        subprocess_waiter_(subprocess_waiter) {
+    cvd_power_operations_["restart"] =
+        [this](const std::string& android_host_out) -> Result<std::string> {
+      return CF_EXPECT(RestartDeviceBin(android_host_out));
+    };
+    cvd_power_operations_["powerwash"] =
+        [this](const std::string& android_host_out) -> Result<std::string> {
+      return CF_EXPECT(PowerwashBin(android_host_out));
+    };
+  }
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const {
+    auto invocation = ParseInvocation(request.Message());
+    return Contains(cvd_power_operations_, invocation.command);
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interruptible_);
+    CF_EXPECT(!interrupted_, "Interrupted");
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(VerifyPrecondition(request));
+    const uid_t uid = request.Credentials()->uid;
+    cvd_common::Envs envs =
+        cvd_common::ConvertToEnvs(request.Message().command_request().env());
+
+    auto [op, subcmd_args] = ParseInvocation(request.Message());
+    bool is_help = IsHelp(subcmd_args);
+
+    // may modify subcmd_args by consuming in parsing
+    Command command =
+        is_help
+            ? CF_EXPECT(HelpCommand(request, uid, op, subcmd_args, envs))
+            : CF_EXPECT(NonHelpCommand(request, uid, op, subcmd_args, envs));
+    SubprocessOptions options;
+    CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+    interrupt_lock.unlock();
+
+    auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+    return ResponseFromSiginfo(infop);
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interruptible_);
+    interrupted_ = true;
+    CF_EXPECT(subprocess_waiter_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override {
+    cvd_common::Args valid_ops;
+    valid_ops.reserve(cvd_power_operations_.size());
+    for (const auto& [op, _] : cvd_power_operations_) {
+      valid_ops.push_back(op);
+    }
+    return valid_ops;
+  }
+
+ private:
+  Result<std::string> RestartDeviceBin(
+      const std::string& android_host_out) const {
+    auto restart_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+        .artifacts_path = android_host_out,
+        .op = "restart",
+    }));
+    return restart_bin;
+  }
+
+  Result<std::string> PowerwashBin(const std::string& android_host_out) const {
+    auto powerwash_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+        .artifacts_path = android_host_out,
+        .op = "powerwash",
+    }));
+    return powerwash_bin;
+  }
+
+  Result<Command> HelpCommand(const RequestWithStdio& request, const uid_t uid,
+                              const std::string& op,
+                              const cvd_common::Args& subcmd_args,
+                              cvd_common::Envs envs) {
+    CF_EXPECT(Contains(envs, kAndroidHostOut));
+    const auto bin_base = CF_EXPECT(GetBin(op, envs.at(kAndroidHostOut)));
+    auto cvd_power_bin_path =
+        ConcatToString(envs.at(kAndroidHostOut), "/bin/", bin_base);
+    std::string home = Contains(envs, "HOME")
+                           ? envs.at("HOME")
+                           : CF_EXPECT(SystemWideUserHome(uid));
+    envs["HOME"] = home;
+    envs[kAndroidSoongHostOut] = envs.at(kAndroidHostOut);
+    ConstructCommandParam construct_cmd_param{
+        .bin_path = cvd_power_bin_path,
+        .home = home,
+        .args = subcmd_args,
+        .envs = envs,
+        .working_dir = request.Message().command_request().working_directory(),
+        .command_name = bin_base,
+        .in = request.In(),
+        .out = request.Out(),
+        .err = request.Err()};
+    Command command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+    return command;
+  }
+
+  Result<Command> NonHelpCommand(const RequestWithStdio& request,
+                                 const uid_t uid, const std::string& op,
+                                 cvd_common::Args& subcmd_args,
+                                 cvd_common::Envs envs) {
+    // test if there is --instance_num flag
+    CvdFlag<std::int32_t> instance_num_flag("instance_num");
+    auto instance_num_opt =
+        CF_EXPECT(instance_num_flag.FilterFlag(subcmd_args));
+    selector::Queries extra_queries;
+    if (instance_num_opt) {
+      extra_queries.emplace_back(selector::kInstanceIdField, *instance_num_opt);
+    }
+    const auto& selector_opts =
+        request.Message().command_request().selector_opts();
+    const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+
+    auto instance = CF_EXPECT(instance_manager_.SelectInstance(
+        selector_args, extra_queries, envs, uid));
+    const auto& instance_group = instance.ParentGroup();
+    const auto& home = instance_group.HomeDir();
+
+    const auto& android_host_out = instance_group.HostArtifactsPath();
+    const auto bin_base = CF_EXPECT(GetBin(op, android_host_out));
+    auto cvd_power_bin_path =
+        ConcatToString(android_host_out, "/bin/", bin_base);
+
+    cvd_common::Args cvd_env_args{subcmd_args};
+    cvd_env_args.push_back(
+        ConcatToString("--instance_num=", instance.InstanceId()));
+    envs["HOME"] = home;
+    envs[kAndroidHostOut] = android_host_out;
+    envs[kAndroidSoongHostOut] = android_host_out;
+
+    std::stringstream command_to_issue;
+    command_to_issue << "HOME=" << home << " " << kAndroidHostOut << "="
+                     << android_host_out << " " << kAndroidSoongHostOut << "="
+                     << android_host_out << " " << cvd_power_bin_path << " ";
+    for (const auto& arg : cvd_env_args) {
+      command_to_issue << arg << " ";
+    }
+    WriteAll(request.Err(), command_to_issue.str());
+
+    ConstructCommandParam construct_cmd_param{
+        .bin_path = cvd_power_bin_path,
+        .home = home,
+        .args = cvd_env_args,
+        .envs = envs,
+        .working_dir = request.Message().command_request().working_directory(),
+        .command_name = bin_base,
+        .in = request.In(),
+        .out = request.Out(),
+        .err = request.Err()};
+    Command command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+    return command;
+  }
+
+  bool IsHelp(const cvd_common::Args& cmd_args) const {
+    if (cmd_args.empty()) {
+      return false;
+    }
+    // cvd restart/powerwash --help, --helpxml, etc or simply cvd restart
+    if (IsHelpSubcmd(cmd_args)) {
+      return true;
+    }
+    // cvd restart/powerwash help <subcommand> format
+    return (cmd_args.front() == "help");
+  }
+
+  Result<std::string> GetBin(const std::string& subcmd,
+                             const std::string& android_host_out) const {
+    CF_EXPECT(Contains(cvd_power_operations_, subcmd),
+              subcmd << " is not supported.");
+    return CF_EXPECT((cvd_power_operations_.at(subcmd))(android_host_out));
+  }
+
+  HostToolTargetManager& host_tool_target_manager_;
+  InstanceManager& instance_manager_;
+  SubprocessWaiter& subprocess_waiter_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  using BinGetter = std::function<Result<std::string>(const std::string&)>;
+  std::unordered_map<std::string, BinGetter> cvd_power_operations_;
+};
+
+fruit::Component<
+    fruit::Required<HostToolTargetManager, InstanceManager, SubprocessWaiter>>
+CvdDevicePowerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdDevicePowerCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/power.h b/host/commands/cvd/server_command/power.h
new file mode 100644
index 0000000..9ba37dc
--- /dev/null
+++ b/host/commands/cvd/server_command/power.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+// restart, powerwash
+fruit::Component<
+    fruit::Required<HostToolTargetManager, InstanceManager, SubprocessWaiter>>
+CvdDevicePowerComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/reset.cpp b/host/commands/cvd/server_command/reset.cpp
new file mode 100644
index 0000000..a427065
--- /dev/null
+++ b/host/commands/cvd/server_command/reset.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/fleet.h"
+
+#include <iostream>
+#include <sstream>
+
+#include "common/libs/fs/shared_buf.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+
+namespace cuttlefish {
+
+/*
+ * Prints out help message for cvd reset
+ *
+ * cvd reset is a feature implemented by the client. However, the user may run
+ * cvd help reset. The cvd help parsing will be done on the server side, and
+ * forwarded to the cvd help handler. The cvd help handler again will forward
+ * it to supposedly cvd reset handler. The cvd reset handler will only receive
+ * "cvd reset --help."
+ *
+ * For, say, "cvd reset" or even "cvd reset --help"," the parsing will be done
+ * on the client side, and handled by the client.
+ *
+ */
+class CvdResetCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdResetCommandHandler()) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const {
+    auto invocation = ParseInvocation(request.Message());
+    return invocation.command == kResetSubcmd;
+  }
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    cvd::Response response;
+    response.mutable_command_response();
+    response.mutable_status()->set_code(cvd::Status::OK);
+    std::stringstream guide_message;
+    guide_message << "\"cvd reset\" is implemented on the client side."
+                  << " Try:" << std::endl;
+    guide_message << "  cvd reset --help" << std::endl;
+    const auto guide_message_str = guide_message.str();
+    CF_EXPECT_EQ(WriteAll(request.Err(), guide_message_str),
+                 guide_message_str.size());
+    return response;
+  }
+  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
+  cvd_common::Args CmdList() const override { return {kResetSubcmd}; }
+
+ private:
+  static constexpr char kResetSubcmd[] = "reset";
+};
+
+fruit::Component<> CvdResetComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdResetCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/reset.h b/host/commands/cvd/server_command/reset.h
new file mode 100644
index 0000000..4ec389b
--- /dev/null
+++ b/host/commands/cvd/server_command/reset.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+namespace cuttlefish {
+
+fruit::Component<> CvdResetComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/restart.cpp b/host/commands/cvd/server_command/restart.cpp
new file mode 100644
index 0000000..3116be3
--- /dev/null
+++ b/host/commands/cvd/server_command/restart.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server.h"
+
+#include <sys/types.h>
+
+#include <cstdio>
+#include <iostream>
+
+#include <android-base/file.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/epoll_loop.h"
+#include "host/commands/cvd/frontline_parser.h"
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/components.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/web/build_api.h"
+
+namespace cuttlefish {
+namespace {
+
+constexpr char kRestartServerHelpMessage[] =
+    R"(Cuttlefish Virtual Device (CVD) CLI.
+
+usage: cvd restart-server <common args> <mode> <mode args>
+
+Common Args:
+  --help                 Print out this message
+  --verbose              Control verbose mode
+
+Modes:
+  match-client           Use the client executable.
+  latest                 Download the latest executable
+  reuse-server           Use the server executable.
+)";
+
+Result<SharedFD> LatestCvdAsFd(BuildApi& build_api) {
+  static constexpr char kBuild[] = "aosp-master";
+  static constexpr char kTarget[] = "aosp_cf_x86_64_phone-userdebug";
+  auto latest = CF_EXPECT(build_api.LatestBuildId(kBuild, kTarget));
+  DeviceBuild build{latest, kTarget};
+
+  auto fd = SharedFD::MemfdCreate("cvd");
+  CF_EXPECT(fd->IsOpen(), "MemfdCreate failed: " << fd->StrError());
+
+  auto write = [fd](char* data, size_t size) -> bool {
+    if (size == 0) {
+      return true;
+    }
+    auto written = WriteAll(fd, data, size);
+    if (written != size) {
+      LOG(ERROR) << "Failed to persist data: " << fd->StrError();
+      return false;
+    }
+    return true;
+  };
+  CF_EXPECT(build_api.ArtifactToCallback(build, "cvd", write));
+
+  return fd;
+}
+
+class CvdRestartHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdRestartHandler(BuildApi& build_api, CvdServer& server,
+                           InstanceManager& instance_manager))
+      : build_api_(build_api),
+        supported_modes_({"match-client", "latest", "reuse-server"}),
+        server_(server),
+        instance_manager_(instance_manager) {
+    flags_.EnrollFlag(CvdFlag<bool>("help", false));
+    flags_.EnrollFlag(CvdFlag<bool>("verbose", false));
+    // If the fla is false, the request will fail if there's on-going requests
+    // If true, calls Stop()
+    flags_.EnrollFlag(CvdFlag<bool>("force", true));
+  }
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return android::base::Basename(invocation.command) == kRestartServer;
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    cvd::Response response;
+    if (request.Message().has_shutdown_request()) {
+      response.mutable_shutdown_response();
+    } else {
+      CF_EXPECT(
+          request.Message().has_command_request(),
+          "cvd restart request must be either command or shutdown request.");
+      response.mutable_command_response();
+    }
+    // all_args[0] = "cvd", all_args[1] = "restart_server"
+    cvd_common::Args all_args =
+        cvd_common::ConvertToArgs(request.Message().command_request().args());
+    CF_EXPECT_GE(all_args.size(), 2);
+    CF_EXPECT_EQ(all_args.at(0), "cvd");
+    CF_EXPECT_EQ(all_args.at(1), kRestartServer);
+    // erase the first item, "cvd"
+    all_args.erase(all_args.begin());
+
+    auto parsed = CF_EXPECT(Parse(all_args));
+    if (parsed.help) {
+      const std::string help_message(kRestartServerHelpMessage);
+      WriteAll(request.Out(), help_message);
+      return response;
+    }
+
+    // On CF_ERR, the locks will be released automatically
+    WriteAll(request.Out(), "Stopping the cvd_server.\n");
+    server_.Stop();
+
+    CF_EXPECT(request.Credentials() != std::nullopt);
+    const uid_t client_uid = request.Credentials()->uid;
+    auto json_string =
+        CF_EXPECT(SerializedInstanceDatabaseToString(client_uid));
+    std::optional<SharedFD> mem_fd;
+    if (instance_manager_.HasInstanceGroups(client_uid)) {
+      mem_fd = CF_EXPECT(CreateMemFileWithSerializedDb(json_string));
+      CF_EXPECT(mem_fd != std::nullopt && (*mem_fd)->IsOpen(),
+                "mem file not open?");
+    }
+
+    if (parsed.verbose && mem_fd) {
+      PrintFileLink(request.Err(), *mem_fd);
+    }
+
+    const std::string subcmd = parsed.subcmd.value_or("reuse-server");
+    SharedFD new_exe;
+    CF_EXPECT(Contains(supported_modes_, subcmd),
+              "unsupported subcommand :" << subcmd);
+    if (subcmd == "match-client") {
+      CF_EXPECT(request.Extra(), "match-client requires the file descriptor.");
+      new_exe = *request.Extra();
+    } else if (subcmd == "latest") {
+      new_exe = CF_EXPECT(LatestCvdAsFd(build_api_));
+    } else if (subcmd == "reuse-server") {
+      new_exe = CF_EXPECT(NewExecFromPath(request, kServerExecPath));
+    } else {
+      return CF_ERR("unsupported subcommand");
+    }
+
+    CF_EXPECT(server_.Exec({.new_exe = new_exe,
+                            .carryover_client_fd = request.Client(),
+                            .client_stderr_fd = request.Err(),
+                            .in_memory_data_fd = mem_fd,
+                            .verbose = parsed.verbose}));
+    return CF_ERR("Should be unreachable");
+  }
+
+  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
+  cvd_common::Args CmdList() const override { return {kRestartServer}; }
+  constexpr static char kRestartServer[] = "restart-server";
+
+ private:
+  struct Parsed {
+    bool help;
+    bool verbose;
+    std::optional<std::string> subcmd;
+    std::optional<std::string> exec_path;
+  };
+  Result<Parsed> Parse(const cvd_common::Args& args) {
+    // it's ugly but let's reuse the frontline parser
+    auto parser_with_result =
+        CF_EXPECT(FrontlineParser::Parse({.internal_cmds = supported_modes_,
+                                          .all_args = args,
+                                          .cvd_flags = flags_}));
+    CF_EXPECT(parser_with_result != nullptr,
+              "FrontlineParser::Parse() returned nullptr");
+    // If there was a subcmd, the flags for the subcmd is in SubCmdArgs().
+    // If not, the flags for restart-server would be in CvdArgs()
+    std::optional<std::string> subcmd_opt = parser_with_result->SubCmd();
+    cvd_common::Args subcmd_args =
+        (subcmd_opt ? parser_with_result->SubCmdArgs()
+                    : parser_with_result->CvdArgs());
+    auto name_flag_map = CF_EXPECT(flags_.CalculateFlags(subcmd_args));
+    CF_EXPECT(Contains(name_flag_map, "help"));
+    CF_EXPECT(Contains(name_flag_map, "verbose"));
+
+    bool help =
+        CF_EXPECT(FlagCollection::GetValue<bool>(name_flag_map.at("help")));
+    bool verbose =
+        CF_EXPECT(FlagCollection::GetValue<bool>(name_flag_map.at("verbose")));
+    std::optional<std::string> exec_path;
+    if (Contains(name_flag_map, "exec-path")) {
+      exec_path = CF_EXPECT(
+          FlagCollection::GetValue<std::string>(name_flag_map.at("exec-path")));
+    }
+    return Parsed{.help = help,
+                  .verbose = verbose,
+                  .subcmd = subcmd_opt,
+                  .exec_path = exec_path};
+  }
+
+  Result<SharedFD> NewExecFromPath(const RequestWithStdio& request,
+                                   const std::string& exec_path) {
+    std::string emulated_absolute_path;
+    const std::string client_pwd =
+        request.Message().command_request().working_directory();
+    // ~ that means $HOME is not supported
+    CF_EXPECT(!android::base::StartsWith(exec_path, "~/"),
+              "Path starting with ~/ is not supported.");
+    CF_EXPECT_NE(
+        exec_path, "~",
+        "~ is not supported as a executable path, and likely is not a file.");
+    emulated_absolute_path =
+        CF_EXPECT(EmulateAbsolutePath({.current_working_dir = client_pwd,
+                                       .path_to_convert = exec_path,
+                                       .follow_symlink = false}),
+                  "Failed to change exec_path to an absolute path.");
+    auto new_exe = SharedFD::Open(emulated_absolute_path, O_RDONLY);
+    CF_EXPECT(new_exe->IsOpen(), "Failed to open \""
+                                     << exec_path << " that is "
+                                     << emulated_absolute_path
+                                     << "\": " << new_exe->StrError());
+    return new_exe;
+  }
+
+  Result<std::string> SerializedInstanceDatabaseToString(
+      const uid_t client_uid) {
+    auto db_json = CF_EXPECT(instance_manager_.Serialize(client_uid),
+                             "Failed to serialized instance database");
+    return db_json.toStyledString();
+  }
+
+  Result<SharedFD> CreateMemFileWithSerializedDb(
+      const std::string& json_string) {
+    const std::string mem_file_name = "cvd_server_" + std::to_string(getpid());
+    auto mem_fd = SharedFD::MemfdCreateWithData(mem_file_name, json_string);
+    CF_EXPECT(mem_fd->IsOpen(),
+              "MemfdCreateWithData failed: " << mem_fd->StrError());
+    return mem_fd;
+  }
+
+  void PrintFileLink(const SharedFD& fd_stream, const SharedFD& mem_fd) const {
+    auto link_target_result = mem_fd->ProcFdLinkTarget();
+    if (!link_target_result.ok()) {
+      WriteAll(fd_stream,
+               "Failed to resolve the link target for the memory file.\n");
+      return;
+    }
+    std::string message("The link target for the memory file is ");
+    message.append(*link_target_result).append("\n");
+    WriteAll(fd_stream, message);
+    return;
+  }
+
+  BuildApi& build_api_;
+  std::vector<std::string> supported_modes_;
+  FlagCollection flags_;
+  CvdServer& server_;
+  InstanceManager& instance_manager_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<BuildApi, CvdServer, InstanceManager>>
+CvdRestartComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdRestartHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/serial_launch.cpp b/host/commands/cvd/server_command/serial_launch.cpp
new file mode 100644
index 0000000..65cd527
--- /dev/null
+++ b/host/commands/cvd/server_command/serial_launch.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/serial_launch.h"
+
+#include <sys/types.h>
+
+#include <chrono>
+#include <mutex>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "android-base/parseint.h"
+#include "android-base/strings.h"
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/instance_lock.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+// copied from "../demo_multi_vd.cpp"
+namespace cuttlefish {
+namespace {
+
+template <typename... Args>
+cvd::Request CreateCommandRequest(
+    const google::protobuf::Map<std::string, std::string>& envs,
+    Args&&... args) {
+  cvd::Request request;
+  auto& cmd_request = *request.mutable_command_request();
+  (cmd_request.add_args(std::forward<Args>(args)), ...);
+  *cmd_request.mutable_env() = envs;
+  return request;
+}
+
+std::vector<cvd::Request> AppendRequestVectors(
+    std::vector<cvd::Request>&& dest, std::vector<cvd::Request>&& src) {
+  auto merged = std::move(dest);
+  for (auto& request : src) {
+    merged.emplace_back(std::move(request));
+  }
+  return merged;
+}
+
+struct DemoCommandSequence {
+  std::vector<InstanceLockFile> instance_locks;
+  std::vector<RequestWithStdio> requests;
+};
+
+/** Returns a `Flag` object that accepts comma-separated unsigned integers. */
+template <typename T>
+Flag DeviceSpecificUintFlag(const std::string& name, std::vector<T>& values,
+                            const RequestWithStdio& request) {
+  return GflagsCompatFlag(name).Setter(
+      [&request, &values](const FlagMatch& match) {
+        auto parsed_values = android::base::Tokenize(match.value, ", ");
+        for (auto& parsed_value : parsed_values) {
+          std::uint32_t num = 0;
+          if (!android::base::ParseUint(parsed_value, &num)) {
+            constexpr char kError[] = "Failed to parse integer";
+            WriteAll(request.Out(), kError, sizeof(kError));
+            return false;
+          }
+          values.push_back(num);
+        }
+        return true;
+      });
+}
+
+/** Returns a `Flag` object that accepts comma-separated strings. */
+Flag DeviceSpecificStringFlag(const std::string& name,
+                              std::vector<std::string>& values) {
+  return GflagsCompatFlag(name).Setter([&values](const FlagMatch& match) {
+    auto parsed_values = android::base::Tokenize(match.value, ", ");
+    for (auto& parsed_value : parsed_values) {
+      values.push_back(parsed_value);
+    }
+    return true;
+  });
+}
+
+std::string ParentDir(const uid_t uid) {
+  constexpr char kParentDirPrefix[] = "/tmp/cvd/";
+  std::stringstream ss;
+  ss << kParentDirPrefix << uid << "/";
+  return ss.str();
+}
+
+}  // namespace
+
+class SerialLaunchCommand : public CvdServerHandler {
+ public:
+  INJECT(SerialLaunchCommand(CommandSequenceExecutor& executor,
+                             InstanceLockFileManager& lock_file_manager))
+      : executor_(executor), lock_file_manager_(lock_file_manager) {}
+  ~SerialLaunchCommand() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return invocation.command == "experimental" &&
+           invocation.arguments.size() >= 1 &&
+           invocation.arguments[0] == "serial_launch";
+  }
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interrupt_mutex_);
+    if (interrupted_) {
+      return CF_ERR("Interrupted");
+    }
+    CF_EXPECT(CF_EXPECT(CanHandle(request)));
+
+    auto commands = CF_EXPECT(CreateCommandSequence(request));
+    interrupt_lock.unlock();
+    CF_EXPECT(executor_.Execute(commands.requests, request.Err()));
+
+    for (auto& lock : commands.instance_locks) {
+      CF_EXPECT(lock.Status(InUseState::kInUse));
+    }
+
+    cvd::Response response;
+    response.mutable_command_response();
+    return response;
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interrupt_mutex_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override { return {"experimental"}; }
+
+  Result<DemoCommandSequence> CreateCommandSequence(
+      const RequestWithStdio& request) {
+    const auto& client_env = request.Message().command_request().env();
+    const auto client_uid = CF_EXPECT(request.Credentials()).uid;
+
+    std::vector<Flag> flags;
+
+    bool help = false;
+    flags.emplace_back(GflagsCompatFlag("help", help));
+
+    std::string credentials;
+    flags.emplace_back(GflagsCompatFlag("credentials", credentials));
+
+    bool verbose = false;
+    flags.emplace_back(GflagsCompatFlag("verbose", verbose));
+
+    std::vector<std::uint32_t> x_res;
+    flags.emplace_back(DeviceSpecificUintFlag("x_res", x_res, request));
+
+    std::vector<std::uint32_t> y_res;
+    flags.emplace_back(DeviceSpecificUintFlag("y_res", y_res, request));
+
+    std::vector<std::uint32_t> dpi;
+    flags.emplace_back(DeviceSpecificUintFlag("dpi", dpi, request));
+
+    std::vector<std::uint32_t> cpus;
+    flags.emplace_back(DeviceSpecificUintFlag("cpus", cpus, request));
+
+    std::vector<std::uint32_t> memory_mb;
+    flags.emplace_back(DeviceSpecificUintFlag("memory_mb", memory_mb, request));
+
+    std::vector<std::string> setupwizard_mode;
+    flags.emplace_back(
+        DeviceSpecificStringFlag("setupwizard_mode", setupwizard_mode));
+
+    std::vector<std::string> report_anonymous_usage_stats;
+    flags.emplace_back(DeviceSpecificStringFlag("report_anonymous_usage_stats",
+                                                report_anonymous_usage_stats));
+
+    std::vector<std::string> webrtc_device_id;
+    flags.emplace_back(
+        DeviceSpecificStringFlag("webrtc_device_id", webrtc_device_id));
+
+    bool daemon = true;
+    flags.emplace_back(GflagsCompatFlag("daemon", daemon));
+
+    struct Device {
+      std::string build;
+      std::string home_dir;
+      InstanceLockFile ins_lock;
+    };
+
+    auto time = std::chrono::system_clock::now().time_since_epoch().count();
+    std::vector<Device> devices;
+    auto& device_flag = flags.emplace_back();
+    device_flag.Alias({FlagAliasMode::kFlagPrefix, "--device="});
+    device_flag.Alias({FlagAliasMode::kFlagConsumesFollowing, "--device"});
+    device_flag.Setter(
+        [this, time, client_uid, &devices, &request](const FlagMatch& mat) {
+          auto lock = lock_file_manager_.TryAcquireUnusedLock();
+          if (!lock.ok()) {
+            WriteAll(request.Err(), lock.error().Message());
+            return false;
+          } else if (!lock->has_value()) {
+            constexpr char kError[] = "could not acquire instance lock";
+            WriteAll(request.Err(), kError, sizeof(kError));
+            return false;
+          }
+          int num = (*lock)->Instance();
+          std::string home_dir = ParentDir(client_uid) + std::to_string(time) +
+                                 "_" + std::to_string(num) + "/";
+          devices.emplace_back(Device{
+              .build = mat.value,
+              .home_dir = std::move(home_dir),
+              .ins_lock = std::move(**lock),
+          });
+          return true;
+        });
+
+    auto args = ParseInvocation(request.Message()).arguments;
+    for (const auto& arg : args) {
+      std::string message = "argument: \"" + arg + "\"\n";
+      CF_EXPECT(WriteAll(request.Err(), message) == message.size());
+    }
+
+    CF_EXPECT(ParseFlags(flags, args));
+
+    if (help) {
+      static constexpr char kHelp[] =
+          "Usage: cvd experimental serial_launch [--verbose] --credentials=XYZ "
+          "--device=build/target --device=build/target";
+      CF_EXPECT(WriteAll(request.Out(), kHelp, sizeof(kHelp)) == sizeof(kHelp));
+      return {};
+    }
+
+    CF_EXPECT(devices.size() < 2 || daemon,
+              "--daemon=true required for more than 1 device");
+
+    std::vector<std::vector<std::uint32_t>*> int_device_args = {
+        &x_res, &y_res, &dpi, &cpus, &memory_mb,
+    };
+    for (const auto& int_device_arg : int_device_args) {
+      CF_EXPECT(int_device_arg->size() == 0 ||
+                    int_device_arg->size() == devices.size(),
+                "If given, device-specific flags should have as many values as "
+                "there are `--device` arguments");
+    }
+    std::vector<std::vector<std::string>*> string_device_args = {
+        &setupwizard_mode,
+        &report_anonymous_usage_stats,
+        &webrtc_device_id,
+    };
+    for (const auto& string_device_arg : string_device_args) {
+      CF_EXPECT(string_device_arg->size() == 0 ||
+                    string_device_arg->size() == devices.size(),
+                "If given, device-specific flags should have as many values as "
+                "there are `--device` arguments");
+    }
+
+    std::vector<cvd::Request> req_protos;
+
+    auto mkdir_ancestors_requests =
+        CF_EXPECT(CreateMkdirCommandRequestRecursively(client_env,
+                                                       ParentDir(client_uid)));
+    req_protos = AppendRequestVectors(std::move(req_protos),
+                                      std::move(mkdir_ancestors_requests));
+
+    bool is_first = true;
+
+    int index = 0;
+    for (const auto& device : devices) {
+      auto& mkdir_cmd = *req_protos.emplace_back().mutable_command_request();
+      *mkdir_cmd.mutable_env() = client_env;
+      mkdir_cmd.add_args("cvd");
+      mkdir_cmd.add_args("mkdir");
+      mkdir_cmd.add_args(device.home_dir);
+
+      auto& fetch_cmd = *req_protos.emplace_back().mutable_command_request();
+      *fetch_cmd.mutable_env() = client_env;
+      fetch_cmd.set_working_directory(device.home_dir);
+      fetch_cmd.add_args("cvd");
+      fetch_cmd.add_args("fetch");
+      fetch_cmd.add_args("--directory=" + device.home_dir);
+      fetch_cmd.add_args("-default_build=" + device.build);
+      fetch_cmd.add_args("-credential_source=" + credentials);
+
+      auto& launch_cmd = *req_protos.emplace_back().mutable_command_request();
+      *launch_cmd.mutable_env() = client_env;
+      launch_cmd.set_working_directory(device.home_dir);
+      (*launch_cmd.mutable_env())["HOME"] = device.home_dir;
+      (*launch_cmd.mutable_env())["ANDROID_HOST_OUT"] = device.home_dir;
+      (*launch_cmd.mutable_env())["ANDROID_PRODUCT_OUT"] = device.home_dir;
+      launch_cmd.add_args("cvd");
+      /* TODO(kwstephenkim): remove kAcquireFileLockOpt flag when
+       * SerialLaunchCommand is re-implemented so that it does not have to
+       * acquire a file lock.
+       */
+      launch_cmd.mutable_selector_opts()->add_args(
+          std::string("--") + selector::SelectorFlags::kAcquireFileLock +
+          "=false");
+      launch_cmd.add_args("start");
+      launch_cmd.add_args(
+          "--undefok=daemon,base_instance_num,x_res,y_res,dpi,cpus,memory_mb,"
+          "setupwizard_mode,report_anonymous_usage_stats,webrtc_device_id");
+      launch_cmd.add_args("--daemon");
+      launch_cmd.add_args("--base_instance_num=" +
+                          std::to_string(device.ins_lock.Instance()));
+      if (index < x_res.size()) {
+        launch_cmd.add_args("--x_res=" + std::to_string(x_res[index]));
+      }
+      if (index < y_res.size()) {
+        launch_cmd.add_args("--y_res=" + std::to_string(y_res[index]));
+      }
+      if (index < dpi.size()) {
+        launch_cmd.add_args("--dpi=" + std::to_string(dpi[index]));
+      }
+      if (index < cpus.size()) {
+        launch_cmd.add_args("--cpus=" + std::to_string(cpus[index]));
+      }
+      if (index < memory_mb.size()) {
+        launch_cmd.add_args("--memory_mb=" + std::to_string(memory_mb[index]));
+      }
+      if (index < setupwizard_mode.size()) {
+        launch_cmd.add_args("--setupwizard_mode=" + setupwizard_mode[index]);
+      }
+      if (index < report_anonymous_usage_stats.size()) {
+        launch_cmd.add_args("--report_anonymous_usage_stats=" +
+                            report_anonymous_usage_stats[index]);
+      }
+      if (index < webrtc_device_id.size()) {
+        launch_cmd.add_args("--webrtc_device_id=" + webrtc_device_id[index]);
+      }
+
+      index++;
+      if (is_first) {
+        is_first = false;
+        continue;
+      }
+      const auto& first = devices[0];
+      const auto& first_instance_num =
+          std::to_string(first.ins_lock.Instance());
+      auto hwsim_path = first.home_dir + "cuttlefish_runtime." +
+                        first_instance_num + "/internal/vhost_user_mac80211";
+      launch_cmd.add_args("--vhost_user_mac80211_hwsim=" + hwsim_path);
+      launch_cmd.add_args("--rootcanal_instance_num=" + first_instance_num);
+    }
+
+    std::vector<SharedFD> fds;
+    if (verbose) {
+      fds = request.FileDescriptors();
+    } else {
+      auto dev_null = SharedFD::Open("/dev/null", O_RDWR);
+      CF_EXPECT(dev_null->IsOpen(), dev_null->StrError());
+      fds = {dev_null, dev_null, dev_null};
+    }
+
+    DemoCommandSequence ret;
+    for (auto& device : devices) {
+      ret.instance_locks.emplace_back(std::move(device.ins_lock));
+    }
+    for (auto& request_proto : req_protos) {
+      ret.requests.emplace_back(request.Client(), request_proto, fds,
+                                request.Credentials());
+    }
+
+    return ret;
+  }
+
+ private:
+  Result<std::vector<cvd::Request>> CreateMkdirCommandRequestRecursively(
+      const google::protobuf::Map<std::string, std::string>& client_env,
+      const std::string& path) {
+    std::vector<cvd::Request> output;
+    CF_EXPECT(!path.empty() && path.at(0) == '/',
+              "Only absolute path is supported.");
+    if (path == "/") {
+      return output;
+    }
+    std::string path_exclude_root = path.substr(1);
+    std::vector<std::string> tokens =
+        android::base::Tokenize(path_exclude_root, "/");
+    std::string current_dir = "/";
+    for (int i = 0; i < tokens.size(); i++) {
+      current_dir.append(tokens[i]);
+      if (!DirectoryExists(current_dir)) {
+        output.emplace_back(
+            CreateCommandRequest(client_env, "cvd", "mkdir", current_dir));
+      }
+      current_dir.append("/");
+    }
+    return output;
+  }
+
+  CommandSequenceExecutor& executor_;
+  InstanceLockFileManager& lock_file_manager_;
+
+  std::mutex interrupt_mutex_;
+  bool interrupted_ = false;
+};
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+cvdSerialLaunchComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, SerialLaunchCommand>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/serial_launch.h b/host/commands/cvd/server_command/serial_launch.h
new file mode 100644
index 0000000..a452878
--- /dev/null
+++ b/host/commands/cvd/server_command/serial_launch.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+cvdSerialLaunchComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/serial_preset.cpp b/host/commands/cvd/server_command/serial_preset.cpp
new file mode 100644
index 0000000..53d6d0e
--- /dev/null
+++ b/host/commands/cvd/server_command/serial_preset.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/commands/cvd/server_command/serial_preset.h"
+
+#include <mutex>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+// file copied from "../demo_multi_vd.cpp"
+namespace cuttlefish {
+
+class SerialPreset : public CvdServerHandler {
+ public:
+  INJECT(SerialPreset(CommandSequenceExecutor& executor))
+      : executor_(executor) {}
+  ~SerialPreset() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return invocation.command == "experimental" &&
+           invocation.arguments.size() >= 1 &&
+           Presets().count(invocation.arguments[0]) > 0;
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    std::unique_lock interrupt_lock(interrupt_mutex_);
+    if (interrupted_) {
+      return CF_ERR("Interrupted");
+    }
+    CF_EXPECT(CF_EXPECT(CanHandle(request)));
+
+    auto invocation = ParseInvocation(request.Message());
+    CF_EXPECT(invocation.arguments.size() >= 1);
+    const auto& presets = Presets();
+    auto devices = presets.find(invocation.arguments[0]);
+    CF_EXPECT(devices != presets.end(), "could not find preset");
+
+    cvd::Request inner_req_proto = request.Message();
+    auto& cmd = *inner_req_proto.mutable_command_request();
+    cmd.clear_args();
+    cmd.add_args("cvd");
+    cmd.add_args("experimental");
+    cmd.add_args("serial_launch");
+    for (const auto& device : devices->second) {
+      cmd.add_args("--device=" + device);
+    }
+    for (int i = 1; i < invocation.arguments.size(); i++) {
+      cmd.add_args(invocation.arguments[i]);
+    }
+
+    RequestWithStdio inner_request(request.Client(), std::move(inner_req_proto),
+                                   request.FileDescriptors(),
+                                   request.Credentials());
+
+    CF_EXPECT(executor_.Execute({std::move(inner_request)}, request.Err()));
+    interrupt_lock.unlock();
+
+    cvd::Response response;
+    response.mutable_command_response();
+    return response;
+  }
+
+  Result<void> Interrupt() override {
+    std::scoped_lock interrupt_lock(interrupt_mutex_);
+    interrupted_ = true;
+    CF_EXPECT(executor_.Interrupt());
+    return {};
+  }
+
+  cvd_common::Args CmdList() const override { return {"experimental"}; }
+
+ private:
+  CommandSequenceExecutor& executor_;
+
+  static std::unordered_map<std::string, std::vector<std::string>> Presets() {
+    return {
+        {"create_phone_tablet",
+         {"git_master/cf_x86_64_phone-userdebug",
+          "git_master/cf_x86_64_tablet-userdebug"}},
+        {"create_phone_wear",
+         {"git_master/cf_x86_64_phone-userdebug", "git_master/cf_gwear_x86"}},
+    };
+  }
+
+  std::mutex interrupt_mutex_;
+  bool interrupted_ = false;
+};
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+cvdSerialPresetComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, SerialPreset>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/serial_preset.h b/host/commands/cvd/server_command/serial_preset.h
new file mode 100644
index 0000000..06b16fd
--- /dev/null
+++ b/host/commands/cvd/server_command/serial_preset.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/command_sequence.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<CommandSequenceExecutor>>
+cvdSerialPresetComponent();
+
+}
diff --git a/host/commands/cvd/server_command/server_handler.h b/host/commands/cvd/server_command/server_handler.h
new file mode 100644
index 0000000..4d327f5
--- /dev/null
+++ b/host/commands/cvd/server_command/server_handler.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CvdServerHandler {
+ public:
+  virtual ~CvdServerHandler() = default;
+
+  virtual Result<bool> CanHandle(const RequestWithStdio&) const = 0;
+  virtual Result<cvd::Response> Handle(const RequestWithStdio&) = 0;
+  virtual Result<void> Interrupt() = 0;
+  // returns the list of subcommand it can handle
+  virtual cvd_common::Args CmdList() const = 0;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/shutdown.cpp b/host/commands/cvd/server_command/shutdown.cpp
new file mode 100644
index 0000000..d5d1223
--- /dev/null
+++ b/host/commands/cvd/server_command/shutdown.cpp
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server.h"
+
+#include <sys/types.h>
+
+#include <fruit/fruit.h>
+
+#include "cvd_server.pb.h"
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/components.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace {
+
+class CvdShutdownHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdShutdownHandler(CvdServer& server,
+                            InstanceManager& instance_manager))
+      : server_(server), instance_manager_(instance_manager) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    return request.Message().contents_case() ==
+           cvd::Request::ContentsCase::kShutdownRequest;
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(request.Credentials() != std::nullopt);
+    const uid_t uid = request.Credentials()->uid;
+
+    cvd::Response response;
+    response.mutable_shutdown_response();
+
+    if (!request.Extra()) {
+      response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
+      response.mutable_status()->set_message(
+          "Missing extra SharedFD for shutdown");
+      return response;
+    }
+
+    if (request.Message().shutdown_request().clear()) {
+      *response.mutable_status() =
+          instance_manager_.CvdClear(request.Out(), request.Err());
+      if (response.status().code() != cvd::Status::OK) {
+        return response;
+      }
+    }
+
+    if (instance_manager_.HasInstanceGroups(uid)) {
+      response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
+      response.mutable_status()->set_message(
+          "Cannot shut down cvd_server while devices are being tracked. "
+          "Try `cvd kill-server`.");
+      return response;
+    }
+
+    // Intentionally leak the write_pipe fd so that it only closes
+    // when this process fully exits.
+    (*request.Extra())->UNMANAGED_Dup();
+
+    WriteAll(request.Out(), "Stopping the cvd_server.\n");
+    server_.Stop();
+
+    response.mutable_status()->set_code(cvd::Status::OK);
+    return response;
+  }
+
+  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
+
+  // For now, shutdown isn't done by cvd shutdown.
+  cvd_common::Args CmdList() const override { return {}; }
+
+ private:
+  CvdServer& server_;
+  InstanceManager& instance_manager_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<CvdServer, InstanceManager>>
+cvdShutdownComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdShutdownHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/start.cpp b/host/commands/cvd/server_command/start.cpp
new file mode 100644
index 0000000..c4b9f43
--- /dev/null
+++ b/host/commands/cvd/server_command/start.cpp
@@ -0,0 +1,886 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/start.h"
+
+#include <sys/types.h>
+
+#include <array>
+#include <atomic>
+#include <cstdint>
+#include <cstdlib>
+#include <fstream>
+#include <iostream>
+#include <map>
+#include <mutex>
+#include <regex>
+#include <sstream>
+#include <string>
+
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/command_sequence.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/server_command/generic.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/start_impl.h"
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+class CvdStartCommandHandler : public CvdServerHandler {
+ public:
+  INJECT(
+      CvdStartCommandHandler(InstanceManager& instance_manager,
+                             HostToolTargetManager& host_tool_target_manager))
+      : instance_manager_(instance_manager),
+        host_tool_target_manager_(host_tool_target_manager),
+        acloud_action_ended_(false) {}
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const;
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override;
+  Result<void> Interrupt() override;
+  std::vector<std::string> CmdList() const override;
+
+ private:
+  Result<void> UpdateInstanceDatabase(
+      const uid_t uid, const selector::GroupCreationInfo& group_creation_info);
+  Result<void> FireCommand(Command&& command, const bool wait);
+  bool HasHelpOpts(const cvd_common::Args& args) const;
+
+  Result<Command> ConstructCvdNonHelpCommand(
+      const std::string& bin_file,
+      const selector::GroupCreationInfo& group_info,
+      const RequestWithStdio& request);
+
+  // call this only if !is_help
+  Result<selector::GroupCreationInfo> GetGroupCreationInfo(
+      const std::string& start_bin, const std::string& subcmd,
+      const cvd_common::Args& subcmd_args, const cvd_common::Envs& envs,
+      const RequestWithStdio& request);
+
+  Result<cvd::Response> FillOutNewInstanceInfo(
+      cvd::Response&& response,
+      const selector::GroupCreationInfo& group_creation_info);
+
+  struct UpdatedArgsAndEnvs {
+    cvd_common::Args args;
+    cvd_common::Envs envs;
+  };
+  Result<UpdatedArgsAndEnvs> UpdateInstanceArgsAndEnvs(
+      cvd_common::Args&& args, cvd_common::Envs&& envs,
+      const std::vector<selector::PerInstanceInfo>& instances,
+      const std::string& artifacts_path, const std::string& start_bin);
+
+  static Result<std::vector<std::string>> UpdateWebrtcDeviceId(
+      std::vector<std::string>&& args, const std::string& group_name,
+      const std::vector<selector::PerInstanceInfo>& per_instance_info);
+
+  Result<selector::GroupCreationInfo> UpdateArgsAndEnvs(
+      selector::GroupCreationInfo&& old_group_info,
+      const std::string& start_bin);
+
+  Result<std::string> FindStartBin(const std::string& android_host_out);
+
+  Result<void> SetBuildId(const uid_t uid, const std::string& group_name,
+                          const std::string& home);
+
+  static void MarkLockfiles(selector::GroupCreationInfo& group_info,
+                            const InUseState state);
+  static void MarkLockfilesInUse(selector::GroupCreationInfo& group_info) {
+    MarkLockfiles(group_info, InUseState::kInUse);
+  }
+
+  Result<void> HandleNoDaemonWorker(
+      const selector::GroupCreationInfo& group_creation_info,
+      std::atomic<bool>* interrupted, const uid_t uid);
+
+  Result<cvd::Response> HandleNoDaemon(
+      const std::optional<selector::GroupCreationInfo>& group_creation_info,
+      const uid_t uid);
+  Result<cvd::Response> HandleDaemon(
+      std::optional<selector::GroupCreationInfo>& group_creation_info,
+      const uid_t uid);
+  Result<void> AcloudCompatActions(
+      const selector::GroupCreationInfo& group_creation_info,
+      const RequestWithStdio& request);
+
+  InstanceManager& instance_manager_;
+  SubprocessWaiter subprocess_waiter_;
+  HostToolTargetManager& host_tool_target_manager_;
+  CommandSequenceExecutor command_executor_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+  /*
+   * Used by Interrupt() not to call command_executor_.Interrupt()
+   *
+   * If true, it is guaranteed that the command_executor_ ended the execution.
+   * If false, it may or may not be after the command_executor_.Execute()
+   */
+  std::atomic<bool> acloud_action_ended_;
+  static const std::array<std::string, 2> supported_commands_;
+};
+
+fruit::Component<> GenericNestedHandlerComponent(
+    InstanceManager* instance_manager,
+    HostToolTargetManager* host_tool_target_manager,
+    SubprocessWaiter* subprocess_waiter_for_nested_handler) {
+  return fruit::createComponent()
+      .bindInstance(*instance_manager)
+      .bindInstance(*host_tool_target_manager)
+      .bindInstance(*subprocess_waiter_for_nested_handler)
+      .install(cvdGenericCommandComponent);
+}
+
+Result<void> CvdStartCommandHandler::AcloudCompatActions(
+    const selector::GroupCreationInfo& group_creation_info,
+    const RequestWithStdio& request) {
+  std::unique_lock interrupt_lock(interruptible_);
+  CF_EXPECT(!interrupted_, "Interrupted");
+  // rm -fr "TempDir()/acloud_cvd_temp/local-instance-<i>"
+  std::string acloud_compat_home_prefix =
+      TempDir() + "/acloud_cvd_temp/local-instance-";
+  std::vector<std::string> acloud_compat_homes;
+  acloud_compat_homes.reserve(group_creation_info.instances.size());
+  for (const auto instance : group_creation_info.instances) {
+    acloud_compat_homes.push_back(
+        ConcatToString(acloud_compat_home_prefix, instance.instance_id_));
+  }
+  for (const auto acloud_compat_home : acloud_compat_homes) {
+    bool result_deleted = true;
+    std::stringstream acloud_compat_home_stream;
+    if (!FileExists(acloud_compat_home)) {
+      continue;
+    }
+    if (!Contains(group_creation_info.envs, kLaunchedByAcloud) ||
+        group_creation_info.envs.at(kLaunchedByAcloud) != "true") {
+      if (!DirectoryExists(acloud_compat_home, /*follow_symlinks=*/false)) {
+        // cvd created a symbolic link
+        result_deleted = RemoveFile(acloud_compat_home);
+      } else {
+        // acloud created a directory
+        // rm -fr isn't supporetd by TreeHugger, so if we fork-and-exec to
+        // literally run "rm -fr", the presubmit testing may fail if ever this
+        // code is tested in the future.
+        result_deleted = RecursivelyRemoveDirectory(acloud_compat_home);
+      }
+    }
+    if (!result_deleted) {
+      LOG(ERROR) << "Removing " << acloud_compat_home << " failed.";
+      continue;
+    }
+  }
+
+  // ln -f -s  [target] [symlink]
+  // 1. mkdir -p home
+  // 2. ln -f -s android_host_out home/host_bins
+  // 3. for each i in ids,
+  //     ln -f -s home /tmp/acloud_cvd_temp/local-instance-<i>
+  std::vector<MakeRequestForm> request_forms;
+  const cvd_common::Envs& common_envs = group_creation_info.envs;
+
+  const std::string& home_dir = group_creation_info.home;
+  const std::string client_pwd =
+      request.Message().command_request().working_directory();
+  request_forms.push_back(
+      {.working_dir = client_pwd,
+       .cmd_args = cvd_common::Args{"mkdir", "-p", home_dir},
+       .env = common_envs,
+       .selector_args = cvd_common::Args{}});
+  const std::string& android_host_out = group_creation_info.host_artifacts_path;
+  request_forms.push_back(
+      {.working_dir = client_pwd,
+       .cmd_args = cvd_common::Args{"ln", "-T", "-f", "-s", android_host_out,
+                                    home_dir + "/host_bins"},
+       .env = common_envs,
+       .selector_args = cvd_common::Args{}});
+  /* TODO(weihsu@): cvd acloud delete/list must handle multi-tenancy gracefully
+   *
+   * acloud delete just calls, for all instances in a group,
+   *  /tmp/acloud_cvd_temp/local-instance-<i>/host_bins/stop_cvd
+   *
+   * That isn't necessary. Not desirable. Cvd acloud should read the instance
+   * manager's in-memory data structure, and call stop_cvd once for the entire
+   * group.
+   *
+   * Likewise, acloud list simply shows all instances in a flattened way. The
+   * user has no clue about an instance group. Cvd acloud should show the
+   * hierarchy.
+   *
+   * For now, we create the symbolic links so that it is compatible with acloud
+   * in Python.
+   */
+  for (const auto& acloud_compat_home : acloud_compat_homes) {
+    if (acloud_compat_home == home_dir) {
+      LOG(ERROR) << "The \"HOME\" directory is acloud workspace, which will "
+                 << "be deleted by next cvd start or acloud command with the"
+                 << " same directory being \"HOME\"";
+      continue;
+    }
+    request_forms.push_back({
+        .working_dir = client_pwd,
+        .cmd_args = cvd_common::Args{"ln", "-T", "-f", "-s", home_dir,
+                                     acloud_compat_home},
+        .env = common_envs,
+        .selector_args = cvd_common::Args{},
+    });
+  }
+  std::vector<cvd::Request> request_protos;
+  for (const auto& request_form : request_forms) {
+    request_protos.emplace_back(MakeRequest(request_form));
+  }
+  std::vector<RequestWithStdio> new_requests;
+  auto dev_null = SharedFD::Open("/dev/null", O_RDWR);
+  CF_EXPECT(dev_null->IsOpen(), dev_null->StrError());
+  std::vector<SharedFD> dev_null_fds = {dev_null, dev_null, dev_null};
+  for (auto& request_proto : request_protos) {
+    new_requests.emplace_back(request.Client(), request_proto, dev_null_fds,
+                              request.Credentials());
+  }
+  SubprocessWaiter subprocess_waiter;
+  // injector only with the GenericCommandHandler for ln and mkdir
+  fruit::Injector<> injector(GenericNestedHandlerComponent,
+                             std::addressof(this->instance_manager_),
+                             std::addressof(this->host_tool_target_manager_),
+                             std::addressof(subprocess_waiter));
+  CF_EXPECT(command_executor_.LateInject(injector),
+            "Creating local CommandSequenceExecutor in cvd start failed.");
+  interrupt_lock.unlock();
+  CF_EXPECT(command_executor_.Execute(new_requests, dev_null));
+  return {};
+}
+
+void CvdStartCommandHandler::MarkLockfiles(
+    selector::GroupCreationInfo& group_info, const InUseState state) {
+  auto& instances = group_info.instances;
+  for (auto& instance : instances) {
+    if (!instance.instance_file_lock_) {
+      continue;
+    }
+    auto result = instance.instance_file_lock_->Status(state);
+    if (!result.ok()) {
+      LOG(ERROR) << result.error().Message();
+    }
+  }
+}
+
+Result<bool> CvdStartCommandHandler::CanHandle(
+    const RequestWithStdio& request) const {
+  auto invocation = ParseInvocation(request.Message());
+  return Contains(supported_commands_, invocation.command);
+}
+
+Result<CvdStartCommandHandler::UpdatedArgsAndEnvs>
+CvdStartCommandHandler::UpdateInstanceArgsAndEnvs(
+    cvd_common::Args&& args, cvd_common::Envs&& envs,
+    const std::vector<selector::PerInstanceInfo>& instances,
+    const std::string& artifacts_path, const std::string& start_bin) {
+  std::vector<unsigned> ids;
+  ids.reserve(instances.size());
+  for (const auto& instance : instances) {
+    ids.emplace_back(instance.instance_id_);
+  }
+
+  cvd_common::Args new_args{std::move(args)};
+  std::string old_instance_nums;
+  std::string old_num_instances;
+  std::string old_base_instance_num;
+
+  std::vector<Flag> instance_id_flags{
+      GflagsCompatFlag("instance_nums", old_instance_nums),
+      GflagsCompatFlag("num_instances", old_num_instances),
+      GflagsCompatFlag("base_instance_num", old_base_instance_num)};
+  // discard old ones
+  ParseFlags(instance_id_flags, new_args);
+
+  auto check_flag = [artifacts_path, start_bin,
+                     this](const std::string& flag_name) -> Result<void> {
+    CF_EXPECT(
+        host_tool_target_manager_.ReadFlag({.artifacts_path = artifacts_path,
+                                            .op = "start",
+                                            .flag_name = flag_name}));
+    return {};
+  };
+  auto max = *(std::max_element(ids.cbegin(), ids.cend()));
+  auto min = *(std::min_element(ids.cbegin(), ids.cend()));
+
+  const bool is_consecutive = ((max - min) == (ids.size() - 1));
+  const bool is_sorted = std::is_sorted(ids.begin(), ids.end());
+
+  if (!is_consecutive || !is_sorted) {
+    std::string flag_value = android::base::Join(ids, ",");
+    CF_EXPECT(check_flag("instance_nums"));
+    new_args.emplace_back("--instance_nums=" + flag_value);
+    return UpdatedArgsAndEnvs{.args = std::move(new_args),
+                              .envs = std::move(envs)};
+  }
+
+  // sorted and consecutive, so let's use old flags
+  // like --num_instances and --base_instance_num
+  if (ids.size() > 1) {
+    CF_EXPECT(check_flag("num_instances"),
+              "--num_instances is not supported but multi-tenancy requested.");
+    new_args.emplace_back("--num_instances=" + std::to_string(ids.size()));
+  }
+  cvd_common::Envs new_envs{std::move(envs)};
+  if (check_flag("base_instance_num").ok()) {
+    new_args.emplace_back("--base_instance_num=" + std::to_string(min));
+  }
+  new_envs[kCuttlefishInstanceEnvVarName] = std::to_string(min);
+  return UpdatedArgsAndEnvs{.args = std::move(new_args),
+                            .envs = std::move(new_envs)};
+}
+
+/*
+ * Adds --webrtc_device_id when necessary to cmd_args_
+ */
+Result<std::vector<std::string>> CvdStartCommandHandler::UpdateWebrtcDeviceId(
+    std::vector<std::string>&& args, const std::string& group_name,
+    const std::vector<selector::PerInstanceInfo>& per_instance_info) {
+  std::vector<std::string> new_args{std::move(args)};
+  // consume webrtc_device_id
+  // it was verified by start_selector_parser
+  std::string flag_value;
+  std::vector<Flag> webrtc_device_id_flag{
+      GflagsCompatFlag("webrtc_device_id", flag_value)};
+  CF_EXPECT(ParseFlags(webrtc_device_id_flag, new_args));
+
+  CF_EXPECT(!group_name.empty());
+  std::vector<std::string> device_name_list;
+  device_name_list.reserve(per_instance_info.size());
+  for (const auto& instance : per_instance_info) {
+    const auto& per_instance_name = instance.per_instance_name_;
+    std::string device_name{group_name};
+    device_name.append("-").append(per_instance_name);
+    device_name_list.emplace_back(device_name);
+  }
+  // take --webrtc_device_id flag away
+  new_args.emplace_back("--webrtc_device_id=" +
+                        android::base::Join(device_name_list, ","));
+  return new_args;
+}
+
+Result<Command> CvdStartCommandHandler::ConstructCvdNonHelpCommand(
+    const std::string& bin_file, const selector::GroupCreationInfo& group_info,
+    const RequestWithStdio& request) {
+  auto bin_path = group_info.host_artifacts_path;
+  bin_path.append("/bin/").append(bin_file);
+  CF_EXPECT(!group_info.home.empty());
+  ConstructCommandParam construct_cmd_param{
+      .bin_path = bin_path,
+      .home = group_info.home,
+      .args = group_info.args,
+      .envs = group_info.envs,
+      .working_dir = request.Message().command_request().working_directory(),
+      .command_name = bin_file,
+      .in = request.In(),
+      .out = request.Out(),
+      .err = request.Err()};
+  Command non_help_command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+  return non_help_command;
+}
+
+// call this only if !is_help
+Result<selector::GroupCreationInfo>
+CvdStartCommandHandler::GetGroupCreationInfo(
+    const std::string& start_bin, const std::string& subcmd,
+    const std::vector<std::string>& subcmd_args, const cvd_common::Envs& envs,
+    const RequestWithStdio& request) {
+  using CreationAnalyzerParam =
+      selector::CreationAnalyzer::CreationAnalyzerParam;
+  const auto& selector_opts =
+      request.Message().command_request().selector_opts();
+  const auto selector_args = cvd_common::ConvertToArgs(selector_opts.args());
+  CreationAnalyzerParam analyzer_param{
+      .cmd_args = subcmd_args, .envs = envs, .selector_args = selector_args};
+  auto cred = CF_EXPECT(request.Credentials());
+  auto group_creation_info =
+      CF_EXPECT(instance_manager_.Analyze(subcmd, analyzer_param, cred));
+  auto final_group_creation_info =
+      CF_EXPECT(UpdateArgsAndEnvs(std::move(group_creation_info), start_bin));
+  return final_group_creation_info;
+}
+
+Result<selector::GroupCreationInfo> CvdStartCommandHandler::UpdateArgsAndEnvs(
+    selector::GroupCreationInfo&& old_group_info,
+    const std::string& start_bin) {
+  selector::GroupCreationInfo group_creation_info = std::move(old_group_info);
+  // update instance related-flags, envs
+  const auto& instances = group_creation_info.instances;
+  const auto& host_artifacts_path = group_creation_info.host_artifacts_path;
+  auto [new_args, new_envs] = CF_EXPECT(UpdateInstanceArgsAndEnvs(
+      std::move(group_creation_info.args), std::move(group_creation_info.envs),
+      instances, host_artifacts_path, start_bin));
+  group_creation_info.args = std::move(new_args);
+  group_creation_info.envs = std::move(new_envs);
+
+  auto webrtc_device_id_flag = host_tool_target_manager_.ReadFlag(
+      {.artifacts_path = group_creation_info.host_artifacts_path,
+       .op = "start",
+       .flag_name = "webrtc_device_id"});
+  if (webrtc_device_id_flag.ok()) {
+    group_creation_info.args = CF_EXPECT(UpdateWebrtcDeviceId(
+        std::move(group_creation_info.args), group_creation_info.group_name,
+        group_creation_info.instances));
+  }
+
+  group_creation_info.envs["HOME"] = group_creation_info.home;
+  group_creation_info.envs[kAndroidHostOut] =
+      group_creation_info.host_artifacts_path;
+  group_creation_info.envs[kAndroidProductOut] =
+      group_creation_info.product_out_path;
+  /* b/253644566
+   *
+   * Old branches used kAndroidSoongHostOut instead of kAndroidHostOut
+   */
+  group_creation_info.envs[kAndroidSoongHostOut] =
+      group_creation_info.host_artifacts_path;
+  group_creation_info.envs[kCvdMarkEnv] = "true";
+  return group_creation_info;
+}
+
+static std::ostream& operator<<(std::ostream& out, const cvd_common::Args& v) {
+  if (v.empty()) {
+    return out;
+  }
+  for (int i = 0; i < v.size() - 1; i++) {
+    out << v.at(i) << " ";
+  }
+  out << v.back();
+  return out;
+}
+
+static void ShowLaunchCommand(const std::string& bin,
+                              const cvd_common::Args& args,
+                              const cvd_common::Envs& envs) {
+  std::stringstream ss;
+  std::vector<std::string> interesting_env_names{"HOME",
+                                                 kAndroidHostOut,
+                                                 kAndroidSoongHostOut,
+                                                 "ANDROID_PRODUCT_OUT",
+                                                 kCuttlefishInstanceEnvVarName,
+                                                 kCuttlefishConfigEnvVarName};
+  for (const auto& interesting_env_name : interesting_env_names) {
+    if (Contains(envs, interesting_env_name)) {
+      ss << interesting_env_name << "=\"" << envs.at(interesting_env_name)
+         << "\" ";
+    }
+  }
+  ss << " " << bin << " " << args;
+  LOG(ERROR) << "launcher command: " << ss.str();
+}
+
+static void ShowLaunchCommand(const std::string& bin,
+                              selector::GroupCreationInfo& group_info) {
+  ShowLaunchCommand(bin, group_info.args, group_info.envs);
+}
+
+Result<std::string> CvdStartCommandHandler::FindStartBin(
+    const std::string& android_host_out) {
+  auto start_bin = CF_EXPECT(host_tool_target_manager_.ExecBaseName({
+      .artifacts_path = android_host_out,
+      .op = "start",
+  }));
+  return start_bin;
+}
+
+// std::string -> bool
+enum class BoolValueType : std::uint8_t { kTrue = 0, kFalse, kUnknown };
+static Result<bool> IsDaemonModeFlag(const cvd_common::Args& args) {
+  /*
+   * --daemon could be either bool or string flags.
+   */
+  bool is_daemon = false;
+  auto initial_size = args.size();
+  Flag daemon_bool = GflagsCompatFlag("daemon", is_daemon);
+  std::vector<Flag> as_bool_flags{daemon_bool};
+  cvd_common::Args copied_args{args};
+  if (ParseFlags(as_bool_flags, copied_args)) {
+    if (initial_size != copied_args.size()) {
+      return is_daemon;
+    }
+  }
+  std::string daemon_values;
+  Flag daemon_string = GflagsCompatFlag("daemon", daemon_values);
+  cvd_common::Args copied_args2{args};
+  std::vector<Flag> as_string_flags{daemon_string};
+  if (!ParseFlags(as_string_flags, copied_args2)) {
+    return false;
+  }
+  if (initial_size == copied_args2.size()) {
+    return false;  // not consumed
+  }
+  // --daemon should have been handled above
+  CF_EXPECT(!daemon_values.empty());
+  std::unordered_set<std::string> true_strings = {"y", "yes", "true"};
+  std::unordered_set<std::string> false_strings = {"n", "no", "false"};
+  auto tokens = android::base::Tokenize(daemon_values, ",");
+  std::unordered_set<BoolValueType> value_set;
+  for (const auto& token : tokens) {
+    std::string daemon_value(token);
+    /*
+     * https://en.cppreference.com/w/cpp/string/byte/tolower
+     *
+     * char should be converted to unsigned char first.
+     */
+    std::transform(daemon_value.begin(), daemon_value.end(),
+                   daemon_value.begin(),
+                   [](unsigned char c) { return std::tolower(c); });
+    if (Contains(true_strings, daemon_value)) {
+      value_set.insert(BoolValueType::kTrue);
+      continue;
+    }
+    if (Contains(false_strings, daemon_value)) {
+      value_set.insert(BoolValueType::kFalse);
+    } else {
+      value_set.insert(BoolValueType::kUnknown);
+    }
+  }
+  CF_EXPECT_LE(value_set.size(), 1,
+               "Vectorized flags for --daemon is not supported by cvd");
+  const auto only_element = *(value_set.begin());
+  // We want to, basically, launch with daemon mode, and want to know
+  // when we must not do so
+  if (only_element == BoolValueType::kFalse) {
+    return false;
+  }
+  // if kUnknown, the launcher will fail. Which mode doesn't matter
+  // for the launcher. But it matters for cvd in how cvd handles the
+  // failure.
+  return true;
+}
+
+Result<cvd::Response> CvdStartCommandHandler::Handle(
+    const RequestWithStdio& request) {
+  std::unique_lock interrupt_lock(interruptible_);
+  if (interrupted_) {
+    return CF_ERR("Interrupted");
+  }
+  CF_EXPECT(CanHandle(request));
+
+  cvd::Response response;
+  response.mutable_command_response();
+
+  auto precondition_verified = VerifyPrecondition(request);
+  if (!precondition_verified.ok()) {
+    response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
+    response.mutable_status()->set_message(
+        precondition_verified.error().Message());
+    return response;
+  }
+
+  const uid_t uid = request.Credentials()->uid;
+  cvd_common::Envs envs =
+      cvd_common::ConvertToEnvs(request.Message().command_request().env());
+  if (Contains(envs, "HOME")) {
+    if (envs.at("HOME").empty()) {
+      envs.erase("HOME");
+    } else {
+      // As the end-user may override HOME, this could be a relative path
+      // to client's pwd, or may include "~" which is the client's actual
+      // home directory.
+      auto client_pwd = request.Message().command_request().working_directory();
+      const auto given_home_dir = envs.at("HOME");
+      /*
+       * Imagine this scenario:
+       *   client$ export HOME=/tmp/new/dir
+       *   client$ HOME="~/subdir" cvd start
+       *
+       * The value of ~ isn't sent to the server. The server can't figure that
+       * out as it might be overridden before the cvd start command.
+       */
+      CF_EXPECT(!android::base::StartsWith(given_home_dir, "~") &&
+                    !android::base::StartsWith(given_home_dir, "~/"),
+                "The HOME directory should not start with ~");
+      envs["HOME"] = CF_EXPECT(
+          EmulateAbsolutePath({.current_working_dir = client_pwd,
+                               .home_dir = CF_EXPECT(SystemWideUserHome(uid)),
+                               .path_to_convert = given_home_dir,
+                               .follow_symlink = false}));
+    }
+  }
+  CF_EXPECT(Contains(envs, kAndroidHostOut));
+  const auto bin = CF_EXPECT(FindStartBin(envs.at(kAndroidHostOut)));
+
+  // update DB if not help
+  // collect group creation infos
+  auto [subcmd, subcmd_args] = ParseInvocation(request.Message());
+  CF_EXPECT(Contains(supported_commands_, subcmd),
+            "subcmd should be start but is " << subcmd);
+  const bool is_help = HasHelpOpts(subcmd_args);
+  const bool is_daemon = CF_EXPECT(IsDaemonModeFlag(subcmd_args));
+
+  std::optional<selector::GroupCreationInfo> group_creation_info;
+  if (!is_help) {
+    group_creation_info = CF_EXPECT(
+        GetGroupCreationInfo(bin, subcmd, subcmd_args, envs, request));
+    CF_EXPECT(UpdateInstanceDatabase(uid, *group_creation_info));
+    response = CF_EXPECT(
+        FillOutNewInstanceInfo(std::move(response), *group_creation_info));
+  }
+
+  Command command =
+      is_help
+          ? CF_EXPECT(ConstructCvdHelpCommand(bin, envs, subcmd_args, request))
+          : CF_EXPECT(
+                ConstructCvdNonHelpCommand(bin, *group_creation_info, request));
+
+  if (!is_help) {
+    CF_EXPECT(
+        group_creation_info != std::nullopt,
+        "group_creation_info should be nullopt only when --help is given.");
+  }
+
+  if (is_help) {
+    ShowLaunchCommand(command.Executable(), subcmd_args, envs);
+  } else {
+    ShowLaunchCommand(command.Executable(), *group_creation_info);
+    CF_EXPECT(request.Message().command_request().wait_behavior() !=
+              cvd::WAIT_BEHAVIOR_START);
+  }
+
+  FireCommand(std::move(command), /*should_wait*/ true);
+  interrupt_lock.unlock();
+
+  if (is_help) {
+    auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+    return ResponseFromSiginfo(infop);
+  }
+
+  // make acquire interrupt_lock inside.
+  auto acloud_compat_action_result =
+      AcloudCompatActions(*group_creation_info, request);
+  acloud_action_ended_ = true;
+  if (!acloud_compat_action_result.ok()) {
+    LOG(ERROR) << acloud_compat_action_result.error().Trace();
+    LOG(ERROR) << "AcloudCompatActions() failed"
+               << " but continue as they are minor errors.";
+  }
+  return is_daemon ? HandleDaemon(group_creation_info, uid)
+                   : HandleNoDaemon(group_creation_info, uid);
+}
+
+Result<void> CvdStartCommandHandler::HandleNoDaemonWorker(
+    const selector::GroupCreationInfo& group_creation_info,
+    std::atomic<bool>* interrupted, const uid_t uid) {
+  const std::string home_dir = group_creation_info.home;
+  const std::string group_name = group_creation_info.group_name;
+  std::string kernel_log_path =
+      ConcatToString(home_dir, "/cuttlefish_runtime/kernel.log");
+  std::regex finger_pattern(
+      "\\[\\s*[0-9]*\\.[0-9]+\\]\\s*GUEST_BUILD_FINGERPRINT:");
+  std::regex boot_pattern("VIRTUAL_DEVICE_BOOT_COMPLETED");
+  std::streampos last_pos;
+  bool first_iteration = true;
+  while (*interrupted == false) {
+    if (!FileExists(kernel_log_path)) {
+      LOG(ERROR) << kernel_log_path << " does not yet exist, so wait for 5s";
+      using namespace std::chrono_literals;
+      std::this_thread::sleep_for(5s);
+      continue;
+    }
+    std::ifstream kernel_log_file(kernel_log_path);
+    CF_EXPECT(kernel_log_file.is_open(),
+              "The kernel log file exists but it cannot be open.");
+    if (!first_iteration) {
+      kernel_log_file.seekg(last_pos);
+    } else {
+      first_iteration = false;
+      last_pos = kernel_log_file.tellg();
+    }
+    for (std::string line; std::getline(kernel_log_file, line);) {
+      last_pos = kernel_log_file.tellg();
+      // if the line broke before a newline, this will end up reading the
+      // previous line one more time but only with '\n'. That's okay
+      last_pos -= line.size();
+      if (last_pos != std::ios_base::beg) {
+        last_pos -= std::string("\n").size();
+      }
+      std::smatch matched;
+      if (std::regex_search(line, matched, finger_pattern)) {
+        std::string build_id = matched.suffix().str();
+        CF_EXPECT(instance_manager_.SetBuildId(uid, group_name, build_id));
+        continue;
+      }
+      if (std::regex_search(line, matched, boot_pattern)) {
+        return {};
+      }
+    }
+    using namespace std::chrono_literals;
+    std::this_thread::sleep_for(2s);
+  }
+  return CF_ERR("Cvd start kernel monitor interrupted.");
+}
+
+Result<cvd::Response> CvdStartCommandHandler::HandleNoDaemon(
+    const std::optional<selector::GroupCreationInfo>& group_creation_info,
+    const uid_t uid) {
+  std::atomic<bool> interrupted;
+  std::atomic<bool> worker_success;
+  interrupted = false;
+  worker_success = false;
+  const auto* group_info = std::addressof(*group_creation_info);
+  auto* interrupted_ptr = std::addressof(interrupted);
+  auto* worker_success_ptr = std::addressof(worker_success);
+  std::thread worker = std::thread(
+      [this, group_info, interrupted_ptr, worker_success_ptr, uid]() {
+        LOG(ERROR) << "worker thread started.";
+        auto result = HandleNoDaemonWorker(*group_info, interrupted_ptr, uid);
+        *worker_success_ptr = result.ok();
+        if (*worker_success_ptr == false) {
+          LOG(ERROR) << result.error().Trace();
+        }
+      });
+  auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+  if (infop.si_code != CLD_EXITED || infop.si_status != EXIT_SUCCESS) {
+    // perhaps failed in launch
+    instance_manager_.RemoveInstanceGroup(uid, group_creation_info->home);
+    interrupted = true;
+  }
+  worker.join();
+  auto final_response = ResponseFromSiginfo(infop);
+  if (!final_response.has_status() ||
+      final_response.status().code() != cvd::Status::OK) {
+    return final_response;
+  }
+  // group_creation_info is nullopt only if is_help is false
+  return FillOutNewInstanceInfo(std::move(final_response),
+                                *group_creation_info);
+}
+
+Result<cvd::Response> CvdStartCommandHandler::HandleDaemon(
+    std::optional<selector::GroupCreationInfo>& group_creation_info,
+    const uid_t uid) {
+  auto infop = CF_EXPECT(subprocess_waiter_.Wait());
+  if (infop.si_code != CLD_EXITED || infop.si_status != EXIT_SUCCESS) {
+    instance_manager_.RemoveInstanceGroup(uid, group_creation_info->home);
+  }
+
+  auto final_response = ResponseFromSiginfo(infop);
+  if (!final_response.has_status() ||
+      final_response.status().code() != cvd::Status::OK) {
+    return final_response;
+  }
+  MarkLockfilesInUse(*group_creation_info);
+
+  auto set_build_id_result = SetBuildId(uid, group_creation_info->group_name,
+                                        group_creation_info->home);
+  if (!set_build_id_result.ok()) {
+    LOG(ERROR) << "Failed to set a build Id for "
+               << group_creation_info->group_name << " but will continue.";
+    LOG(ERROR) << "The error message was : "
+               << set_build_id_result.error().Trace();
+  }
+
+  // group_creation_info is nullopt only if is_help is false
+  return FillOutNewInstanceInfo(std::move(final_response),
+                                *group_creation_info);
+}
+
+Result<void> CvdStartCommandHandler::SetBuildId(const uid_t uid,
+                                                const std::string& group_name,
+                                                const std::string& home) {
+  // build id can't be found before this point
+  const auto build_id = CF_EXPECT(cvd_start_impl::ExtractBuildId(home));
+  CF_EXPECT(instance_manager_.SetBuildId(uid, group_name, build_id));
+  return {};
+}
+
+Result<void> CvdStartCommandHandler::Interrupt() {
+  std::scoped_lock interrupt_lock(interruptible_);
+  interrupted_ = true;
+  if (!acloud_action_ended_) {
+    auto result = command_executor_.Interrupt();
+    if (!result.ok()) {
+      LOG(ERROR) << "Failed to interrupt CommandExecutor"
+                 << result.error().Message();
+    }
+  }
+  CF_EXPECT(subprocess_waiter_.Interrupt());
+  return {};
+}
+
+Result<cvd::Response> CvdStartCommandHandler::FillOutNewInstanceInfo(
+    cvd::Response&& response,
+    const selector::GroupCreationInfo& group_creation_info) {
+  auto new_response = std::move(response);
+  auto& command_response = *(new_response.mutable_command_response());
+  auto& instance_group_info =
+      *(CF_EXPECT(command_response.mutable_instance_group_info()));
+  instance_group_info.set_group_name(group_creation_info.group_name);
+  instance_group_info.add_home_directories(group_creation_info.home);
+  for (const auto& per_instance_info : group_creation_info.instances) {
+    auto* new_entry = CF_EXPECT(instance_group_info.add_instances());
+    new_entry->set_name(per_instance_info.per_instance_name_);
+    new_entry->set_instance_id(per_instance_info.instance_id_);
+  }
+  return new_response;
+}
+
+Result<void> CvdStartCommandHandler::UpdateInstanceDatabase(
+    const uid_t uid, const selector::GroupCreationInfo& group_creation_info) {
+  CF_EXPECT(instance_manager_.SetInstanceGroup(uid, group_creation_info),
+            group_creation_info.home
+                << " is already taken so can't create new instance.");
+  return {};
+}
+
+Result<void> CvdStartCommandHandler::FireCommand(Command&& command,
+                                                 const bool wait) {
+  SubprocessOptions options;
+  if (!wait) {
+    options.ExitWithParent(false);
+  }
+  CF_EXPECT(subprocess_waiter_.Setup(command.Start(options)));
+  return {};
+}
+
+bool CvdStartCommandHandler::HasHelpOpts(
+    const std::vector<std::string>& args) const {
+  return IsHelpSubcmd(args);
+}
+
+std::vector<std::string> CvdStartCommandHandler::CmdList() const {
+  std::vector<std::string> subcmd_list;
+  subcmd_list.reserve(supported_commands_.size());
+  for (const auto& cmd : supported_commands_) {
+    subcmd_list.emplace_back(cmd);
+  }
+  return subcmd_list;
+}
+
+const std::array<std::string, 2> CvdStartCommandHandler::supported_commands_{
+    "start", "launch_cvd"};
+
+fruit::Component<fruit::Required<InstanceManager, HostToolTargetManager>>
+CvdStartCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdStartCommandHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/start.h b/host/commands/cvd/server_command/start.h
new file mode 100644
index 0000000..b02a9d3
--- /dev/null
+++ b/host/commands/cvd/server_command/start.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<InstanceManager, HostToolTargetManager>>
+CvdStartCommandComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/start_impl.cpp b/host/commands/cvd/server_command/start_impl.cpp
new file mode 100644
index 0000000..a9ff456
--- /dev/null
+++ b/host/commands/cvd/server_command/start_impl.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/start_impl.h"
+
+#include <fstream>
+#include <regex>
+
+#include <android-base/strings.h>
+
+#include "common/libs/utils/files.h"
+#include "host/commands/cvd/common_utils.h"
+
+namespace cuttlefish {
+namespace cvd_start_impl {
+
+/* Picks up the line starting with [*] GUEST_BUILD_FINGERPRINT:,
+ * and removes the "[*] GUEST_BUILD_FINGERPRINT:" part.
+ *
+ */
+static Result<std::string> ExtractBuildIdLineValue(
+    const std::string& home_dir) {
+  std::string kernel_log_path =
+      ConcatToString(home_dir, "/cuttlefish_runtime/kernel.log");
+  if (!FileExists(kernel_log_path)) {
+    kernel_log_path =
+        ConcatToString(home_dir, "/cuttlefish_runtime_runtime/kernel.log");
+  }
+  std::ifstream kernel_log_file(kernel_log_path);
+  CF_EXPECT(kernel_log_file.is_open(),
+            "The " << kernel_log_path << " is not open.");
+  std::regex pattern("\\[\\s*[0-9]*\\.[0-9]+\\]\\s*GUEST_BUILD_FINGERPRINT:");
+  for (std::string line; std::getline(kernel_log_file, line);) {
+    std::smatch matched;
+    if (!std::regex_search(line, matched, pattern)) {
+      continue;
+    }
+    return matched.suffix().str();
+  }
+  auto err_message =
+      ConcatToString("The GUEST_BUILD_FINGERPRINT line is not found in the",
+                     kernel_log_path, " file");
+  return CF_ERR(err_message);
+}
+
+Result<std::string> ExtractBuildId(const std::string& home_dir) {
+  auto fingerprint_line_value = CF_EXPECT(ExtractBuildIdLineValue(home_dir));
+  /* format:
+   *  <not sure>/target/build year/branch.id/who built it/when:target/??
+   *
+   * We need the branch followed by . followed by sort of Id part
+   */
+  std::vector<std::string> tokens =
+      android::base::Tokenize(fingerprint_line_value, "/");
+  CF_EXPECT(tokens.size() > 2);
+  return tokens.at(3);
+}
+
+}  // namespace cvd_start_impl
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/start_impl.h b/host/commands/cvd/server_command/start_impl.h
new file mode 100644
index 0000000..ce41f31
--- /dev/null
+++ b/host/commands/cvd/server_command/start_impl.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace cvd_start_impl {
+
+Result<std::string> ExtractBuildId(const std::string& home_dir);
+
+}  // namespace cvd_start_impl
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/subcmd.h b/host/commands/cvd/server_command/subcmd.h
new file mode 100644
index 0000000..3c0b8a4
--- /dev/null
+++ b/host/commands/cvd/server_command/subcmd.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "host/commands/cvd/server_command/components.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
diff --git a/host/commands/cvd/server_command/subprocess_waiter.cpp b/host/commands/cvd/server_command/subprocess_waiter.cpp
new file mode 100644
index 0000000..7a276fd
--- /dev/null
+++ b/host/commands/cvd/server_command/subprocess_waiter.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/subprocess_waiter.h"
+
+namespace cuttlefish {
+
+Result<void> SubprocessWaiter::Setup(Subprocess subprocess) {
+  std::unique_lock interrupt_lock(interruptible_);
+  CF_EXPECT(!interrupted_, "Interrupted");
+  CF_EXPECT(!subprocess_, "Already running");
+
+  subprocess_ = std::move(subprocess);
+  return {};
+}
+
+Result<siginfo_t> SubprocessWaiter::Wait() {
+  std::unique_lock interrupt_lock(interruptible_);
+  CF_EXPECT(!interrupted_, "Interrupted");
+  CF_EXPECT(subprocess_.has_value());
+
+  siginfo_t infop{};
+
+  interrupt_lock.unlock();
+
+  // This blocks until the process exits, but doesn't reap it.
+  auto result = subprocess_->Wait(&infop, WEXITED | WNOWAIT);
+  CF_EXPECT(result != -1, "Lost track of subprocess pid");
+  interrupt_lock.lock();
+  // Perform a reaping wait on the process (which should already have exited).
+  result = subprocess_->Wait(&infop, WEXITED);
+  CF_EXPECT(result != -1, "Lost track of subprocess pid");
+  // The double wait avoids a race around the kernel reusing pids. Waiting
+  // with WNOWAIT won't cause the child process to be reaped, so the kernel
+  // won't reuse the pid until the Wait call below, and any kill signals won't
+  // reach unexpected processes.
+
+  subprocess_ = {};
+
+  return infop;
+}
+
+Result<void> SubprocessWaiter::Interrupt() {
+  std::scoped_lock interrupt_lock(interruptible_);
+  if (subprocess_) {
+    auto stop_result = subprocess_->Stop();
+    switch (stop_result) {
+      case StopperResult::kStopFailure:
+        return CF_ERR("Failed to stop subprocess");
+      case StopperResult::kStopCrash:
+        return CF_ERR("Stopper caused process to crash");
+      case StopperResult::kStopSuccess:
+        return {};
+      default:
+        return CF_ERR("Unknown stop result: " << (uint64_t)stop_result);
+    }
+  }
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/subprocess_waiter.h b/host/commands/cvd/server_command/subprocess_waiter.h
new file mode 100644
index 0000000..492eee3
--- /dev/null
+++ b/host/commands/cvd/server_command/subprocess_waiter.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <optional>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+
+namespace cuttlefish {
+
+class SubprocessWaiter {
+ public:
+  INJECT(SubprocessWaiter()) {}
+
+  Result<void> Setup(Subprocess subprocess);
+  Result<siginfo_t> Wait();
+  Result<void> Interrupt();
+
+ private:
+  std::optional<Subprocess> subprocess_;
+  std::mutex interruptible_;
+  bool interrupted_ = false;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/try_acloud.cpp b/host/commands/cvd/server_command/try_acloud.cpp
new file mode 100644
index 0000000..6b72033
--- /dev/null
+++ b/host/commands/cvd/server_command/try_acloud.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/try_acloud.h"
+
+#include <mutex>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "cvd_server.pb.h"
+#include "host/commands/cvd/server_command/server_handler.h"
+#include "host/commands/cvd/server_command/utils.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class TryAcloudCommand : public CvdServerHandler {
+ public:
+  INJECT(TryAcloudCommand(ConvertAcloudCreateCommand& converter,
+                          ANNOTATED(AcloudTranslatorOptOut,
+                                    const std::atomic<bool>&) optout))
+      : converter_(converter), optout_(optout) {}
+  ~TryAcloudCommand() = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    auto invocation = ParseInvocation(request.Message());
+    return invocation.command == "try-acloud";
+  }
+
+  cvd_common::Args CmdList() const override { return {"try-acloud"}; }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    CF_EXPECT(IsSubOperationSupported(request));
+    CF_EXPECT(converter_.Convert(request));
+    // currently, optout/optin feature only works in local instance
+    // remote instance still uses legacy python acloud
+    CF_EXPECT(!optout_);
+    cvd::Response response;
+    response.mutable_command_response();
+    return response;
+  }
+  Result<void> Interrupt() override { return CF_ERR("Can't be interrupted."); }
+
+ private:
+  ConvertAcloudCreateCommand& converter_;
+  const std::atomic<bool>& optout_;
+};
+
+fruit::Component<fruit::Required<
+    ConvertAcloudCreateCommand,
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+TryAcloudCommandComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, TryAcloudCommand>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/try_acloud.h b/host/commands/cvd/server_command/try_acloud.h
new file mode 100644
index 0000000..68212c2
--- /dev/null
+++ b/host/commands/cvd/server_command/try_acloud.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/commands/cvd/acloud/converter.h"
+#include "host/commands/cvd/server_command/acloud_common.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<
+    ConvertAcloudCreateCommand,
+    fruit::Annotated<AcloudTranslatorOptOut, std::atomic<bool>>>>
+TryAcloudCommandComponent();
+
+}
diff --git a/host/commands/cvd/server_command/utils.cpp b/host/commands/cvd/server_command/utils.cpp
new file mode 100644
index 0000000..5e44c29
--- /dev/null
+++ b/host/commands/cvd/server_command/utils.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server_command/utils.h"
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/instance_manager.h"
+#include "host/commands/cvd/server.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+CommandInvocation ParseInvocation(const cvd::Request& request) {
+  CommandInvocation invocation;
+  if (request.contents_case() != cvd::Request::ContentsCase::kCommandRequest) {
+    return invocation;
+  }
+  if (request.command_request().args_size() == 0) {
+    return invocation;
+  }
+  for (const std::string& arg : request.command_request().args()) {
+    invocation.arguments.push_back(arg);
+  }
+  invocation.arguments[0] = cpp_basename(invocation.arguments[0]);
+  if (invocation.arguments[0] == "cvd") {
+    invocation.command = invocation.arguments[1];
+    invocation.arguments.erase(invocation.arguments.begin());
+    invocation.arguments.erase(invocation.arguments.begin());
+  } else {
+    invocation.command = invocation.arguments[0];
+    invocation.arguments.erase(invocation.arguments.begin());
+  }
+  return invocation;
+}
+
+Result<void> VerifyPrecondition(const RequestWithStdio& request) {
+  CF_EXPECT(
+      Contains(request.Message().command_request().env(), kAndroidHostOut),
+      "ANDROID_HOST_OUT in client environment is invalid.");
+  return {};
+}
+
+cuttlefish::cvd::Response ResponseFromSiginfo(siginfo_t infop) {
+  cvd::Response response;
+  response.mutable_command_response();  // set oneof field
+  auto& status = *response.mutable_status();
+  if (infop.si_code == CLD_EXITED && infop.si_status == 0) {
+    status.set_code(cvd::Status::OK);
+    return response;
+  }
+
+  status.set_code(cvd::Status::INTERNAL);
+  std::string status_code_str = std::to_string(infop.si_status);
+  if (infop.si_code == CLD_EXITED) {
+    status.set_message("Exited with code " + status_code_str);
+  } else if (infop.si_code == CLD_KILLED) {
+    status.set_message("Exited with signal " + status_code_str);
+  } else {
+    status.set_message("Quit with code " + status_code_str);
+  }
+  return response;
+}
+
+Result<Command> ConstructCommand(const ConstructCommandParam& param) {
+  Command command(param.command_name);
+  command.SetExecutable(param.bin_path);
+  for (const std::string& arg : param.args) {
+    command.AddParameter(arg);
+  }
+  // Set CuttlefishConfig path based on assembly dir,
+  // used by subcommands when locating the CuttlefishConfig.
+  if (param.envs.count(cuttlefish::kCuttlefishConfigEnvVarName) == 0) {
+    auto config_path = InstanceManager::GetCuttlefishConfigPath(param.home);
+    if (config_path.ok()) {
+      command.AddEnvironmentVariable(cuttlefish::kCuttlefishConfigEnvVarName,
+                                     *config_path);
+    }
+  }
+  for (auto& it : param.envs) {
+    command.UnsetFromEnvironment(it.first);
+    command.AddEnvironmentVariable(it.first, it.second);
+  }
+  // Redirect stdin, stdout, stderr back to the cvd client
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdIn, param.in);
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, param.out);
+  command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, param.err);
+
+  if (!param.working_dir.empty()) {
+    auto fd =
+        SharedFD::Open(param.working_dir, O_RDONLY | O_PATH | O_DIRECTORY);
+    CF_EXPECT(fd->IsOpen(), "Couldn't open \"" << param.working_dir
+                                               << "\": " << fd->StrError());
+    command.SetWorkingDirectory(fd);
+  }
+  return {std::move(command)};
+}
+
+Result<Command> ConstructCvdHelpCommand(
+    const std::string& bin_file, cvd_common::Envs envs,
+    const std::vector<std::string>& subcmd_args,
+    const RequestWithStdio& request) {
+  const auto host_artifacts_path = envs.at("ANDROID_HOST_OUT");
+  const auto bin_path = host_artifacts_path + "/bin/" + bin_file;
+  auto client_pwd = request.Message().command_request().working_directory();
+  const auto home = (Contains(envs, "HOME") ? envs.at("HOME") : client_pwd);
+  cvd_common::Envs envs_copy{envs};
+  envs_copy["HOME"] = AbsolutePath(home);
+  envs[kAndroidSoongHostOut] = envs.at(kAndroidHostOut);
+  ConstructCommandParam construct_cmd_param{.bin_path = bin_path,
+                                            .home = home,
+                                            .args = subcmd_args,
+                                            .envs = std::move(envs_copy),
+                                            .working_dir = client_pwd,
+                                            .command_name = bin_file,
+                                            .in = request.In(),
+                                            .out = request.Out(),
+                                            .err = request.Err()};
+  Command help_command = CF_EXPECT(ConstructCommand(construct_cmd_param));
+  return help_command;
+}
+
+Result<Command> ConstructCvdGenericNonHelpCommand(
+    const ConstructNonHelpForm& request_form, const RequestWithStdio& request) {
+  cvd_common::Envs envs{request_form.envs};
+  envs["HOME"] = request_form.home;
+  envs[kAndroidHostOut] = request_form.android_host_out;
+  envs[kAndroidSoongHostOut] = request_form.android_host_out;
+  const auto bin_path = ConcatToString(request_form.android_host_out, "/bin/",
+                                       request_form.bin_file);
+
+  if (request_form.verbose) {
+    std::stringstream verbose_stream;
+    verbose_stream << "HOME=" << request_form.home << " ";
+    verbose_stream << kAndroidHostOut << "=" << envs.at(kAndroidHostOut) << " "
+                   << kAndroidSoongHostOut << "="
+                   << envs.at(kAndroidSoongHostOut) << " ";
+    verbose_stream << bin_path << "\\" << std::endl;
+    for (const auto& cmd_arg : request_form.cmd_args) {
+      verbose_stream << cmd_arg << " ";
+    }
+    if (!request_form.cmd_args.empty()) {
+      // remove trailing " ", and add a new line
+      verbose_stream.seekp(-1, std::ios_base::end);
+      verbose_stream << std::endl;
+    }
+    WriteAll(request.Err(), verbose_stream.str());
+  }
+  ConstructCommandParam construct_cmd_param{
+      .bin_path = bin_path,
+      .home = request_form.home,
+      .args = request_form.cmd_args,
+      .envs = envs,
+      .working_dir = request.Message().command_request().working_directory(),
+      .command_name = request_form.bin_file,
+      .in = request.In(),
+      .out = request.Out(),
+      .err = request.Err()};
+  return CF_EXPECT(ConstructCommand(construct_cmd_param));
+}
+
+/*
+ * From external/gflags/src, commit:
+ *  061f68cd158fa658ec0b9b2b989ed55764870047
+ *
+ */
+constexpr static std::array help_bool_opts{
+    "help", "helpfull", "helpshort", "helppackage", "helpxml", "version"};
+constexpr static std::array help_str_opts{
+    "helpon",
+    "helpmatch",
+};
+
+bool IsHelpSubcmd(const std::vector<std::string>& args) {
+  std::vector<std::string> copied_args(args);
+  std::vector<Flag> flags;
+  flags.reserve(help_bool_opts.size() + help_str_opts.size());
+  bool bool_value_placeholder = false;
+  std::string str_value_placeholder;
+  for (const auto bool_opt : help_bool_opts) {
+    flags.emplace_back(GflagsCompatFlag(bool_opt, bool_value_placeholder));
+  }
+  for (const auto str_opt : help_str_opts) {
+    flags.emplace_back(GflagsCompatFlag(str_opt, str_value_placeholder));
+  }
+  ParseFlags(flags, copied_args);
+  // if there was any match, some in copied_args were consumed.
+  return (args.size() != copied_args.size());
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/utils.h b/host/commands/cvd/server_command/utils.h
new file mode 100644
index 0000000..08a0cd5
--- /dev/null
+++ b/host/commands/cvd/server_command/utils.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "cvd_server.pb.h"
+
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/server_client.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+struct CommandInvocation {
+  std::string command;
+  std::vector<std::string> arguments;
+};
+
+CommandInvocation ParseInvocation(const cvd::Request& request);
+
+cuttlefish::cvd::Response ResponseFromSiginfo(siginfo_t infop);
+
+Result<void> VerifyPrecondition(const RequestWithStdio& request);
+
+struct ConstructCommandParam {
+  const std::string& bin_path;
+  const std::string& home;
+  const std::vector<std::string>& args;
+  const cvd_common::Envs& envs;
+  const std::string& working_dir;
+  const std::string& command_name;
+  SharedFD in;
+  SharedFD out;
+  SharedFD err;
+};
+Result<Command> ConstructCommand(const ConstructCommandParam& cmd_param);
+
+// Constructs a command for cvd whatever --help or --help-related-option
+Result<Command> ConstructCvdHelpCommand(const std::string& bin_file,
+                                        cvd_common::Envs envs,
+                                        const cvd_common::Args& _args,
+                                        const RequestWithStdio& request);
+
+// Constructs a command for cvd non-start-op
+struct ConstructNonHelpForm {
+  std::string bin_file;
+  cvd_common::Envs envs;
+  cvd_common::Args cmd_args;
+  std::string android_host_out;
+  std::string home;
+  bool verbose;
+};
+Result<Command> ConstructCvdGenericNonHelpCommand(
+    const ConstructNonHelpForm& request_form, const RequestWithStdio& request);
+
+// e.g. cvd start --help, cvd stop --help
+bool IsHelpSubcmd(const std::vector<std::string>& args);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_command/version.cpp b/host/commands/cvd/server_command/version.cpp
new file mode 100644
index 0000000..bf16183
--- /dev/null
+++ b/host/commands/cvd/server_command/version.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/server.h"
+
+#include <build/version.h>
+#include <cvd_server.pb.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/common_utils.h"
+#include "host/commands/cvd/server_command/components.h"
+#include "host/commands/cvd/server_constants.h"
+#include "host/commands/cvd/types.h"
+#include "host/libs/config/host_tools_version.h"
+
+namespace cuttlefish {
+namespace {
+
+class CvdVersionHandler : public CvdServerHandler {
+ public:
+  INJECT(CvdVersionHandler()) = default;
+
+  Result<bool> CanHandle(const RequestWithStdio& request) const override {
+    return request.Message().contents_case() ==
+           cvd::Request::ContentsCase::kVersionRequest;
+  }
+
+  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
+    CF_EXPECT(CanHandle(request));
+    cvd::Response response;
+    auto& version = *response.mutable_version_response()->mutable_version();
+    version.set_major(cvd::kVersionMajor);
+    version.set_minor(cvd::kVersionMinor);
+    version.set_build(android::build::GetBuildNumber());
+    version.set_crc32(FileCrc(kServerExecPath));
+    response.mutable_status()->set_code(cvd::Status::OK);
+    return response;
+  }
+
+  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
+
+  cvd_common::Args CmdList() const override { return {"version"}; }
+};
+
+}  // namespace
+
+fruit::Component<> cvdVersionComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CvdServerHandler, CvdVersionHandler>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_constants.h b/host/commands/cvd/server_constants.h
index 300c8c4..b3c00d5 100644
--- a/host/commands/cvd/server_constants.h
+++ b/host/commands/cvd/server_constants.h
@@ -20,7 +20,7 @@
 // Major version uprevs are backwards incompatible.
 // Minor version uprevs are backwards compatible within major version.
 constexpr int kVersionMajor = 1;
-constexpr int kVersionMinor = 1;
+constexpr int kVersionMinor = 2;
 
 // Pathname of the abstract cvd_server socket.
 constexpr char kServerSocketPath[] = "cvd_server";
diff --git a/host/commands/cvd/server_shutdown.cpp b/host/commands/cvd/server_shutdown.cpp
deleted file mode 100644
index ec7af0b..0000000
--- a/host/commands/cvd/server_shutdown.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/commands/cvd/server.h"
-
-#include <fruit/fruit.h>
-
-#include "cvd_server.pb.h"
-
-#include "common/libs/fs/shared_buf.h"
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/result.h"
-#include "host/commands/cvd/instance_manager.h"
-
-namespace cuttlefish {
-namespace {
-
-class CvdShutdownHandler : public CvdServerHandler {
- public:
-  INJECT(CvdShutdownHandler(CvdServer& server,
-                            InstanceManager& instance_manager))
-      : server_(server), instance_manager_(instance_manager) {}
-
-  Result<bool> CanHandle(const RequestWithStdio& request) const override {
-    return request.Message().contents_case() ==
-           cvd::Request::ContentsCase::kShutdownRequest;
-  }
-
-  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
-    CF_EXPECT(CanHandle(request));
-    cvd::Response response;
-    response.mutable_shutdown_response();
-
-    if (!request.Extra()) {
-      response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
-      response.mutable_status()->set_message(
-          "Missing extra SharedFD for shutdown");
-      return response;
-    }
-
-    if (request.Message().shutdown_request().clear()) {
-      *response.mutable_status() =
-          instance_manager_.CvdClear(request.Out(), request.Err());
-      if (response.status().code() != cvd::Status::OK) {
-        return response;
-      }
-    }
-
-    if (instance_manager_.HasInstanceGroups()) {
-      response.mutable_status()->set_code(cvd::Status::FAILED_PRECONDITION);
-      response.mutable_status()->set_message(
-          "Cannot shut down cvd_server while devices are being tracked. "
-          "Try `cvd kill-server`.");
-      return response;
-    }
-
-    // Intentionally leak the write_pipe fd so that it only closes
-    // when this process fully exits.
-    (*request.Extra())->UNMANAGED_Dup();
-
-    WriteAll(request.Out(), "Stopping the cvd_server.\n");
-    server_.Stop();
-
-    response.mutable_status()->set_code(cvd::Status::OK);
-    return response;
-  }
-
-  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
-
- private:
-  CvdServer& server_;
-  InstanceManager& instance_manager_;
-};
-
-}  // namespace
-
-fruit::Component<fruit::Required<CvdServer, InstanceManager>>
-cvdShutdownComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CvdServerHandler, CvdShutdownHandler>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/server_version.cpp b/host/commands/cvd/server_version.cpp
deleted file mode 100644
index 2168c0f..0000000
--- a/host/commands/cvd/server_version.cpp
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/commands/cvd/server.h"
-
-#include <build/version.h>
-#include <fruit/fruit.h>
-
-#include "cvd_server.pb.h"
-
-#include "common/libs/utils/result.h"
-#include "host/commands/cvd/server_constants.h"
-#include "host/libs/config/host_tools_version.h"
-
-namespace cuttlefish {
-namespace {
-
-class CvdVersionHandler : public CvdServerHandler {
- public:
-  INJECT(CvdVersionHandler()) = default;
-
-  Result<bool> CanHandle(const RequestWithStdio& request) const override {
-    return request.Message().contents_case() ==
-           cvd::Request::ContentsCase::kVersionRequest;
-  }
-
-  Result<cvd::Response> Handle(const RequestWithStdio& request) override {
-    CF_EXPECT(CanHandle(request));
-    cvd::Response response;
-    auto& version = *response.mutable_version_response()->mutable_version();
-    version.set_major(cvd::kVersionMajor);
-    version.set_minor(cvd::kVersionMinor);
-    version.set_build(android::build::GetBuildNumber());
-    version.set_crc32(FileCrc("/proc/self/exe"));
-    response.mutable_status()->set_code(cvd::Status::OK);
-    return response;
-  }
-
-  Result<void> Interrupt() override { return CF_ERR("Can't interrupt"); }
-};
-
-}  // namespace
-
-fruit::Component<> cvdVersionComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CvdServerHandler, CvdVersionHandler>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/cvd/types.cpp b/host/commands/cvd/types.cpp
new file mode 100644
index 0000000..651cb14
--- /dev/null
+++ b/host/commands/cvd/types.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace cvd_common {
+
+Args ConvertToArgs(
+    const google::protobuf::RepeatedPtrField<std::string>& proto_args) {
+  Args args;
+  args.reserve(proto_args.size());
+  for (const auto& proto_arg : proto_args) {
+    args.emplace_back(proto_arg);
+  }
+  return args;
+}
+
+Envs ConvertToEnvs(
+    const google::protobuf::Map<std::string, std::string>& proto_map) {
+  cvd_common::Envs envs;
+  for (const auto& entry : proto_map) {
+    envs[entry.first] = entry.second;
+  }
+  return envs;
+}
+
+}  // namespace cvd_common
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/types.h b/host/commands/cvd/types.h
new file mode 100644
index 0000000..8c5c112
--- /dev/null
+++ b/host/commands/cvd/types.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+#include <unordered_set>
+#include <vector>
+
+#include <cvd_server.pb.h>
+
+namespace cuttlefish {
+namespace cvd_common {
+
+using Args = std::vector<std::string>;
+using Envs = std::unordered_map<std::string, std::string>;
+
+Envs ConvertToEnvs(
+    const google::protobuf::Map<std::string, std::string>& proto_map);
+
+Args ConvertToArgs(
+    const google::protobuf::RepeatedPtrField<std::string>& proto_args);
+
+}  // namespace cvd_common
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/Android.bp b/host/commands/cvd/unittests/Android.bp
new file mode 100644
index 0000000..fe60b9c
--- /dev/null
+++ b/host/commands/cvd/unittests/Android.bp
@@ -0,0 +1,18 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
diff --git a/host/commands/cvd/unittests/parser/Android.bp b/host/commands/cvd/unittests/parser/Android.bp
new file mode 100644
index 0000000..f5f33fa
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/Android.bp
@@ -0,0 +1,61 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_defaults {
+    name: "cvd_load_defaults",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "liblog",
+        "libicuuc",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+cc_test_host {
+    name: "cvd_load_test",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+    ],
+    srcs: [
+        "test_common.cc",
+        "flags_parser_test.cc",
+        "configs_inheritance_test.cc",
+        "instance/vm_configs_test.cc",
+        "instance/boot_configs_test.cc",
+        "instance/metrics_configs_test.cc",
+        "instance/graphics_configs_test.cc",
+    ],
+    static_libs: [
+        "libprotobuf-cpp-full",
+        "libcuttlefish_launch_cvd_proto",
+        "libcvd_parser",
+        "libcuttlefish_host_config",
+        "libgmock",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_load_defaults"],
+}
\ No newline at end of file
diff --git a/host/commands/cvd/unittests/parser/configs_inheritance_test.cc b/host/commands/cvd/unittests/parser/configs_inheritance_test.cc
new file mode 100644
index 0000000..979929f
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/configs_inheritance_test.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <fstream>
+#include <iostream>
+
+#include <android-base/file.h>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/parser/cf_configs_common.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+
+TEST(FlagsInheritanceTest, MergeTwoIndependentJson) {
+  const char* dst_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            }
+        }
+    ]
+}
+  )"""";
+
+  const char* src_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 720,
+                        "height": 1280,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
+  )"""";
+
+  Json::Value src_object, dst_object;
+  std::string src_text(src_string);
+  std::string dst_text(dst_string);
+  EXPECT_TRUE(ParseJsonString(dst_text, dst_object)) << "Invalid Json string";
+  EXPECT_TRUE(ParseJsonString(src_text, src_object)) << "Invalid Json string";
+
+  cuttlefish::MergeTwoJsonObjs(dst_object, src_object);
+  EXPECT_TRUE(dst_object["instances"][0].isMember("graphics"));
+  EXPECT_TRUE(dst_object["instances"][0]["graphics"].isMember("displays"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("width"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("height"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("dpi"));
+
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["width"],
+            720);
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["height"],
+            1280);
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["dpi"], 320);
+}
+
+TEST(FlagsInheritanceTest, MergeTwoOverlappedJson) {
+  const char* dst_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 1024
+            }
+        }
+    ]
+}
+  )"""";
+
+  const char* src_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 720,
+                        "height": 1280,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
+  )"""";
+
+  Json::Value src_object, dst_object;
+  std::string src_text(src_string);
+  std::string dst_text(dst_string);
+  EXPECT_TRUE(ParseJsonString(dst_text, dst_object)) << "Invalid Json string";
+  EXPECT_TRUE(ParseJsonString(src_text, src_object)) << "Invalid Json string";
+
+  cuttlefish::MergeTwoJsonObjs(dst_object, src_object);
+  EXPECT_TRUE(dst_object["instances"][0].isMember("graphics"));
+  EXPECT_TRUE(dst_object["instances"][0]["graphics"].isMember("displays"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("width"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("height"));
+  EXPECT_TRUE(
+      dst_object["instances"][0]["graphics"]["displays"][0].isMember("dpi"));
+
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["width"],
+            720);
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["height"],
+            1280);
+  EXPECT_EQ(dst_object["instances"][0]["graphics"]["displays"][0]["dpi"], 320);
+  // Check for overlapped values
+  EXPECT_EQ(dst_object["instances"][0]["vm"]["memory_mb"], 2048);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/flags_parser_test.cc b/host/commands/cvd/unittests/parser/flags_parser_test.cc
new file mode 100644
index 0000000..4542468
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/flags_parser_test.cc
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <fstream>
+#include <iostream>
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+namespace cuttlefish {
+TEST(FlagsParserTest, ParseInvalidJson) {
+  const char* test_string = R""""(
+    instances=50;
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_FALSE(ParseJsonString(json_text, json_configs));
+}
+
+TEST(FlagsParserTest, ParseJsonWithSpellingError) {
+  const char* test_string = R""""(
+{
+    "Insta" :
+    [
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_FALSE(serialized_data.ok());
+}
+
+TEST(FlagsParserTest, ParseBasicJsonSingleInstances) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+          "vm": {
+            "crosvm":{
+            }
+          }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--num_instances=1"))
+      << "num_instances flag is missing or wrongly formatted";
+}
+
+TEST(FlagsParserTest, ParseBasicJsonTwoInstances) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+          "vm": {
+            "crosvm":{
+            }
+          }
+        },
+        {
+          "vm": {
+            "crosvm":{
+            }
+          }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--num_instances=2"))
+      << "num_instances flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseNetSimFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+  "instances" :
+  [
+        {
+          "vm": {
+            "crosvm":{
+            }
+          }
+        }
+  ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--netsim_bt=false)"))
+      << "netsim_bt flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseNetSimFlagEnabled) {
+  const char* test_string = R""""(
+{
+   "netsim_bt": true,
+     "instances" :
+     [
+        {
+          "vm": {
+            "crosvm":{
+            }
+          }
+        }
+      ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--netsim_bt=true)"))
+      << "netsim_bt flag is missing or wrongly formatted";
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/instance/boot_configs_test.cc b/host/commands/cvd/unittests/parser/instance/boot_configs_test.cc
new file mode 100644
index 0000000..13daad9
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/instance/boot_configs_test.cc
@@ -0,0 +1,694 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+#ifndef GENERATE_MVP_FLAGS_ONLY
+TEST(BootFlagsParserTest, ParseTwoInstancesExtraBootConfigFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--extra_bootconfig_args=,)"))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesExtraBootConfigFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "extra_bootconfig_args": "androidboot.X=Y"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data,
+                         R"(--extra_bootconfig_args=,androidboot.X=Y)"))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesExtraBootConfigFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "extra_bootconfig_args": "androidboot.X=Y"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "extra_bootconfig_args": "androidboot.X=Z"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data,
+                 R"(--extra_bootconfig_args=androidboot.X=Y,androidboot.X=Z)"))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesBootAnimationFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--enable_bootanimation=true,true)"))
+      << "enable_bootanimation flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesBootAnimationFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "enable_bootanimation": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--enable_bootanimation=true,false)"))
+      << "enable_bootanimation flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesBootAnimationFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "enable_bootanimation": false
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "enable_bootanimation": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--enable_bootanimation=false,false)"))
+      << "enable_bootanimation flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesSerialNumberFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data,
+                         R"(--serial_number=CUTTLEFISHCVD01,CUTTLEFISHCVD01)"))
+      << "serial_number flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesSerialNumberFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "CUTTLEFISHCVD101"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data,
+                         R"(--serial_number=CUTTLEFISHCVD01,CUTTLEFISHCVD101)"))
+      << "serial_number flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesSerialNumberFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "CUTTLEFISHCVD101"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "CUTTLEFISHCVD102"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(
+      *serialized_data, R"(--serial_number=CUTTLEFISHCVD101,CUTTLEFISHCVD102)"))
+      << "serial_number flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesRandomSerialFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--use_random_serial=false,false)"))
+      << "use_random_serial flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesRandomSerialFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "CUTTLEFISHCVD101"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "@random"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--use_random_serial=false,true)"))
+      << "use_random_serial flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesRandomSerialFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "@random"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "serial_number": "@random"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--use_random_serial=true,true)"))
+      << "use_random_serial flag is missing or wrongly formatted";
+}
+#endif
+
+TEST(BootFlagsParserTest, ParseTwoInstancesEnforceSecurityFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--guest_enforce_security=true,true)"))
+      << "guest_enforce_security flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesEnforceSecurityFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "guest_enforce_security": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--guest_enforce_security=true,false)"))
+      << "guest_enforce_security flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesEnforceSecurityFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "guest_enforce_security": false
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "security": {
+                "guest_enforce_security": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--guest_enforce_security=false,false)"))
+      << "guest_enforce_security flag is missing or wrongly formatted";
+}
+
+#ifndef GENERATE_MVP_FLAGS_ONLY
+TEST(BootFlagsParserTest, ParseTwoInstancesKernelCmdFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--extra_kernel_cmdline=,)"))
+      << "extra_kernel_cmdline flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesKernelCmdFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "kernel": {
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "kernel": {
+                    "extra_kernel_cmdline": "androidboot.selinux=permissive"
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data,
+                 R"(--extra_kernel_cmdline=,androidboot.selinux=permissive)"))
+      << "extra_kernel_cmdline flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesKernelCmdFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "kernel": {
+                    "extra_kernel_cmdline": "androidboot.selinux=permissive"
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            },
+            "boot": {
+                "kernel": {
+                    "extra_kernel_cmdline": "lpm_levels.sleep_disabled=1"
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(
+      *serialized_data,
+      R"(--extra_kernel_cmdline=androidboot.selinux=permissive,lpm_levels.sleep_disabled=1)"))
+      << "extra_kernel_cmdline flag is missing or wrongly formatted";
+}
+#endif
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/instance/graphics_configs_test.cc b/host/commands/cvd/unittests/parser/instance/graphics_configs_test.cc
new file mode 100644
index 0000000..57956fe
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/instance/graphics_configs_test.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+
+TEST(BootFlagsParserTest, ParseTwoInstancesDisplaysFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        },
+        {
+        }
+    ]
+}
+)"""";
+
+  const char* expected_string =
+      R""""(--displays_binproto=Cg0KCwjQBRCAChjAAiA8Cg0KCwjQBRCAChjAAiA8)"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, expected_string))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesDisplaysFlagEmptyGraphics) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "graphics": {
+            }
+        },
+        {
+            "graphics": {
+            }
+        }
+    ]
+}
+  )"""";
+
+  const char* expected_string =
+      R""""(--displays_binproto=Cg0KCwjQBRCAChjAAiA8Cg0KCwjQBRCAChjAAiA8)"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, expected_string))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesDisplaysFlagEmptyDisplays) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "graphics":{
+                "displays":[
+                    {
+                    }
+                ]
+                }
+        },
+        {
+            "graphics":{
+                "displays":[
+                    {
+                    },
+                    {
+                    }
+                ]
+                }
+        }
+    ]
+}
+)"""";
+
+  const char* expected_string =
+      R""""(--displays_binproto=Cg0KCwjQBRCAChjAAiA8ChoKCwjQBRCAChjAAiA8CgsI0AUQgAoYwAIgPA==)"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, expected_string))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+TEST(BootFlagsParserTest, ParseTwoInstancesAutoTabletDisplaysFlag) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 1080,
+                        "height": 600,
+                        "dpi": 120,
+                        "refresh_rate_hertz": 60
+                    },
+                    {
+                        "width": 400,
+                        "height": 600,
+                        "dpi": 120,
+                        "refresh_rate_hertz": 60
+                    }
+                ]
+                }
+        },
+        {
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 2560,
+                        "height": 1800,
+                        "dpi": 320,
+                        "refresh_rate_hertz": 60
+                    }
+                ]
+                }
+        }
+    ]
+}
+  )"""";
+
+  const char* expected_string =
+      R""""(--displays_binproto=ChgKCgi4CBDYBBh4IDwKCgiQAxDYBBh4IDwKDQoLCIAUEIgOGMACIDw=)"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, expected_string))
+      << "extra_bootconfig_args flag is missing or wrongly formatted";
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/instance/metrics_configs_test.cc b/host/commands/cvd/unittests/parser/instance/metrics_configs_test.cc
new file mode 100644
index 0000000..44c7bf7
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/instance/metrics_configs_test.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+TEST(MetricsFlagsParserTest, ParseOneInstanceMetricsReportInvalidValue) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "metrics": {
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_FALSE(serialized_data.ok()) << serialized_data.error().Trace();
+}
+
+TEST(MetricsFlagsParserTest, ParseOneInstancesMetricsReportFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--report_anonymous_usage_stats=n)"))
+      << "report_anonymous_usage_stats flag is missing or wrongly formatted";
+}
+
+TEST(MetricsFlagsParserTest, ParseTwoInstancesMetricsReportFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        },
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--report_anonymous_usage_stats=n)"))
+      << "report_anonymous_usage_stats flag is missing or wrongly formatted";
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/instance/vm_configs_test.cc b/host/commands/cvd/unittests/parser/instance/vm_configs_test.cc
new file mode 100644
index 0000000..f72e551
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/instance/vm_configs_test.cc
@@ -0,0 +1,827 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <fstream>
+#include <iostream>
+
+#include <android-base/file.h>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+
+TEST(VmFlagsParserTest, ParseTwoInstancesCpuFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--cpus=2,2"))
+      << "cpus flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesCpuFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "cpus": 4
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--cpus=2,4"))
+      << "cpus flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesCpuFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "cpus": 4
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "cpus": 6
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--cpus=4,6"))
+      << "cpus flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesMemoryFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--memory_mb=2048,2048"))
+      << "memory_mb flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesMemoryFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "memory_mb": 4096
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--memory_mb=2048,4096"))
+      << "memory_mb flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesMemoryFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "memory_mb": 4096
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "memory_mb": 8192
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--memory_mb=4096,8192"))
+      << "memory_mb flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSdCardFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        },
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--use_sdcard=true,true"))
+      << "use_sdcard flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSdCardFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        },
+        {
+            "vm": {
+                "use_sdcard": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--use_sdcard=true,false"))
+      << "use_sdcard flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSdCardFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "use_sdcard": false
+            }
+        },
+        {
+            "vm": {
+                "use_sdcard": false
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, "--use_sdcard=false,false"))
+      << "use_sdcard flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesVmManagerFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--vm_manager=crosvm,crosvm)"))
+      << "vm_manager flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesVmManagerFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "gem5":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--vm_manager=crosvm,gem5)"))
+      << "vm_manager flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesVmManagerFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "qemu":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--vm_manager=qemu_cli,crosvm)"))
+      << "vm_manager flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesVmManagerFlagDefault) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+            }
+        },
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--vm_manager=crosvm,crosvm)"))
+      << "vm_manager flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseOneInstanceSetupWizardInvalidValue) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "ENABLED"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs));
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_FALSE(serialized_data.ok());
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSetupWizardFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--setupwizard_mode=DISABLED,DISABLED)"))
+      << "setupwizard_mode flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSetupWizardFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "REQUIRED"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--setupwizard_mode=DISABLED,REQUIRED)"))
+      << "setupwizard_mode flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSetupWizardFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "OPTIONAL"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "REQUIRED"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfig(*serialized_data, R"(--setupwizard_mode=OPTIONAL,REQUIRED)"))
+      << "setupwizard_mode flag is missing or wrongly formatted";
+}
+
+#ifndef GENERATE_MVP_FLAGS_ONLY
+TEST(VmFlagsParserTest, ParseTwoInstancesUuidFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(
+      *serialized_data,
+      R"(--uuid=699acfc4-c8c4-11e7-882b-5065f31dc101,699acfc4-c8c4-11e7-882b-5065f31dc101)"))
+      << "uuid flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesUuidFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "uuid": "870acfc4-c8c4-11e7-99ac-5065f31dc250"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(
+      *serialized_data,
+      R"(--uuid=699acfc4-c8c4-11e7-882b-5065f31dc101,870acfc4-c8c4-11e7-99ac-5065f31dc250)"))
+      << "uuid flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesUuidFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "uuid": "870acfc4-c8c4-11e7-99ac-5065f31dc250"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "uuid": "870acfc4-c8c4-11e7-99ac-5065f31dc251"
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(
+      *serialized_data,
+      R"(--uuid=870acfc4-c8c4-11e7-99ac-5065f31dc250,870acfc4-c8c4-11e7-99ac-5065f31dc251)"))
+      << "uuid flag is missing or wrongly formatted";
+}
+#endif
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSandboxFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--enable_sandbox=false,false)"))
+      << "enable_sandbox flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSandboxFlagPartialJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--enable_sandbox=false,true)"))
+      << "enable_sandbox flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesSandboxFlagFullJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--enable_sandbox=true,true)"))
+      << "enable_sandbox flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesCustomActionsFlagEmptyJson) {
+  const char* test_string = R""""(
+{
+    "instances" :
+    [
+        {
+        }
+    ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(FindConfig(*serialized_data, R"(--custom_actions=unset)"))
+      << "custom_actions flag is missing or wrongly formatted";
+}
+
+TEST(VmFlagsParserTest, ParseTwoInstancesCustomActionsFlagPartialJson) {
+  const char* test_string = R""""(
+{
+        "instances" :
+        [
+            {
+            },
+            {
+                "vm": {
+                        "custom_actions" : [
+                                {
+                                        "device_states": [
+                                                {
+                                                        "lid_switch_open": false,
+                                                        "hinge_angle_value": 0
+                                                }
+                                        ]
+                                }
+                        ]
+                }
+            }
+        ]
+}
+  )"""";
+
+  Json::Value json_configs;
+  std::string json_text(test_string);
+  std::string expected_custom_actions =
+      R"""(--custom_actions=[{\"device_states\":[{\"hinge_angle_value\":0,\"lid_switch_open\":false}]}])""";
+
+  EXPECT_TRUE(ParseJsonString(json_text, json_configs))
+      << "Invalid Json string";
+  auto serialized_data = LaunchCvdParserTester(json_configs);
+  EXPECT_TRUE(serialized_data.ok()) << serialized_data.error().Trace();
+  EXPECT_TRUE(
+      FindConfigIgnoreSpaces(*serialized_data, R"(--custom_actions=unset)"))
+      << "custom_actions flag is missing or wrongly formatted";
+
+  EXPECT_TRUE(FindConfigIgnoreSpaces(*serialized_data, expected_custom_actions))
+      << "custom_actions flag is missing or wrongly formatted";
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/parser/test_common.cc b/host/commands/cvd/unittests/parser/test_common.cc
new file mode 100644
index 0000000..2885cca
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/test_common.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+
+#include <fstream>
+#include <iostream>
+
+#include "host/commands/cvd/parser/cf_flags_validator.h"
+#include "host/commands/cvd/parser/launch_cvd_parser.h"
+#include "host/commands/cvd/unittests/parser/test_common.h"
+
+namespace cuttlefish {
+
+bool ParseJsonString(std::string& json_text, Json::Value& root) {
+  Json::Reader reader;  //  Reader
+  return reader.parse(json_text, root);
+}
+
+bool FindConfig(const std::vector<std::string>& vec,
+                const std::string& element) {
+  auto it = find(vec.begin(), vec.end(), element);
+  return it != vec.end();
+}
+bool FindConfigIgnoreSpaces(const std::vector<std::string>& vec,
+                            const std::string& str) {
+  std::string target = str;
+  target.erase(std::remove(target.begin(), target.end(), ' '), target.end());
+  target.erase(std::remove(target.begin(), target.end(), '\t'), target.end());
+
+  for (const auto& s : vec) {
+    std::string current = s;
+    current.erase(std::remove(current.begin(), current.end(), ' '),
+                  current.end());
+    current.erase(std::remove(current.begin(), current.end(), '\t'),
+                  target.end());
+    if (current == target) {
+      return true;
+    }
+  }
+  return false;
+}
+
+Result<std::vector<std::string>> LaunchCvdParserTester(Json::Value& root) {
+  CF_EXPECT(ValidateCfConfigs(root), "Loaded Json validation failed");
+  return ParseLaunchCvdConfigs(root);
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/cvd/unittests/parser/test_common.h b/host/commands/cvd/unittests/parser/test_common.h
new file mode 100644
index 0000000..41aec34
--- /dev/null
+++ b/host/commands/cvd/unittests/parser/test_common.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+
+#include <fstream>
+#include <iostream>
+
+#include <json/json.h>
+
+#include "common/libs/utils/result.h"
+
+#define GENERATE_MVP_FLAGS_ONLY true
+namespace cuttlefish {
+
+bool ParseJsonString(std::string& json_text, Json::Value& root);
+
+bool FindConfig(const std::vector<std::string>& vec,
+                const std::string& element);
+
+bool FindConfigIgnoreSpaces(const std::vector<std::string>& vec,
+                            const std::string& str);
+Result<std::vector<std::string>> LaunchCvdParserTester(Json::Value& root);
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/cvd/unittests/selector/Android.bp b/host/commands/cvd/unittests/selector/Android.bp
new file mode 100644
index 0000000..e81019a
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/Android.bp
@@ -0,0 +1,113 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_test_host {
+    name: "cvd_db_instance_test",
+    srcs: [
+        "instance_record_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_db_group_test",
+    srcs: [
+        "group_record_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_db_test",
+    srcs: [
+        "instance_database_helper.cpp",
+        "instance_database_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_selector_parser_test",
+    srcs: [
+        "parser_ids_helper.cpp",
+        "parser_ids_test.cpp",
+        "parser_names_helper.cpp",
+        "parser_names_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_creation_analyzer_test",
+    srcs: [
+        "creation_analyzer_helper.cpp",
+        "creation_analyzer_test.cpp",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_client_lexer_test",
+    srcs: [
+        "client_lexer_helper.cpp",
+        "client_lexer_test.cpp",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_host_tool_target_test",
+    srcs: [
+        "host_tool_target_test.cpp",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_flags_test",
+    srcs: [
+        "cvd_flags_helper.cpp",
+        "cvd_flags_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
diff --git a/host/commands/cvd/unittests/selector/client_lexer_helper.cpp b/host/commands/cvd/unittests/selector/client_lexer_helper.cpp
new file mode 100644
index 0000000..2fbce42
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/client_lexer_helper.cpp
@@ -0,0 +1,31 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/selector/client_lexer_helper.h"
+
+namespace cuttlefish {
+namespace selector {
+
+LexerTestBase::LexerTestBase() { Init(); }
+
+void LexerTestBase::Init() {
+  auto param = GetParam();
+  known_flags_ = param.known_flags_;
+  lex_input_ = param.lex_input_;
+  expected_tokens_ = param.expected_tokens_;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/client_lexer_helper.h b/host/commands/cvd/unittests/selector/client_lexer_helper.h
new file mode 100644
index 0000000..de1800d
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/client_lexer_helper.h
@@ -0,0 +1,55 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/arguments_lexer.h"
+
+namespace cuttlefish {
+namespace selector {
+
+using Tokens = std::vector<ArgToken>;
+
+struct LexerInputOutput {
+  LexerFlagsSpecification known_flags_;
+  std::string lex_input_;
+  std::optional<Tokens> expected_tokens_;
+};
+
+class LexerTestBase : public testing::TestWithParam<LexerInputOutput> {
+ protected:
+  LexerTestBase();
+  void Init();
+
+  LexerFlagsSpecification known_flags_;
+  std::string lex_input_;
+  std::optional<Tokens> expected_tokens_;
+};
+
+class EmptyArgsLexTest : public LexerTestBase {};
+class NonBooleanArgsTest : public LexerTestBase {};
+class BooleanArgsTest : public LexerTestBase {};
+class BothArgsTest : public LexerTestBase {};
+
+class BooleanBadArgsTest : public LexerTestBase {};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/client_lexer_test.cpp b/host/commands/cvd/unittests/selector/client_lexer_test.cpp
new file mode 100644
index 0000000..5723c63
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/client_lexer_test.cpp
@@ -0,0 +1,221 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/arguments_lexer.h"
+#include "host/commands/cvd/unittests/selector/client_lexer_helper.h"
+
+namespace cuttlefish {
+namespace selector {
+namespace {
+
+const LexerFlagsSpecification empty_known_flags;
+const LexerFlagsSpecification boolean_known_flags{
+    .known_boolean_flags = {"clean"}};
+const LexerFlagsSpecification non_boolean_known_flags{
+    .known_value_flags = {"group_name"}};
+const LexerFlagsSpecification both_known_flags{
+    .known_boolean_flags = {"clean"}, .known_value_flags = {"group_name"}};
+
+}  // namespace
+
+TEST_P(EmptyArgsLexTest, SuccessExpectedTest) {
+  auto lexer_gen_result = ArgumentsLexerBuilder::Build(known_flags_);
+  std::unique_ptr<ArgumentsLexer> lexer =
+      lexer_gen_result.ok() ? std::move(*lexer_gen_result) : nullptr;
+  if (!lexer) {
+    GTEST_SKIP() << "Memory allocation failed but it is not in the test scope.";
+  }
+  auto tokenized_result = lexer->Tokenize(lex_input_);
+
+  ASSERT_TRUE(tokenized_result.ok()) << tokenized_result.error().Trace();
+  ASSERT_EQ(*tokenized_result, *expected_tokens_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    ClientSpecificOptionParser, EmptyArgsLexTest,
+    testing::Values(LexerInputOutput{.known_flags_ = empty_known_flags,
+                                     .lex_input_ = "",
+                                     .expected_tokens_ = Tokens{}},
+                    LexerInputOutput{.known_flags_ = boolean_known_flags,
+                                     .lex_input_ = "",
+                                     .expected_tokens_ = Tokens{}},
+                    LexerInputOutput{.known_flags_ = non_boolean_known_flags,
+                                     .lex_input_ = "",
+                                     .expected_tokens_ = Tokens{}},
+                    LexerInputOutput{.known_flags_ = both_known_flags,
+                                     .lex_input_ = "",
+                                     .expected_tokens_ = Tokens{}}));
+
+TEST_P(NonBooleanArgsTest, SuccessExpectedTest) {
+  auto lexer_gen_result = ArgumentsLexerBuilder::Build(known_flags_);
+  std::unique_ptr<ArgumentsLexer> lexer =
+      lexer_gen_result.ok() ? std::move(*lexer_gen_result) : nullptr;
+  if (!lexer) {
+    GTEST_SKIP() << "Memory allocation failed but it is not in the test scope.";
+  }
+  auto tokenized_result = lexer->Tokenize(lex_input_);
+
+  ASSERT_TRUE(tokenized_result.ok()) << tokenized_result.error().Trace();
+  ASSERT_EQ(*tokenized_result, *expected_tokens_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    ClientSpecificOptionParser, NonBooleanArgsTest,
+    testing::Values(
+        LexerInputOutput{
+            .known_flags_ = non_boolean_known_flags,
+            .lex_input_ = "cvd --group_name=yumi",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownFlagAndValue,
+                                                "--group_name=yumi"}}},
+        LexerInputOutput{
+            .known_flags_ = non_boolean_known_flags,
+            .lex_input_ = "cvd --group_name yumi",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownValueFlag,
+                                                "--group_name"},
+                                       ArgToken{ArgType::kPositional, "yumi"}}},
+        LexerInputOutput{.known_flags_ = non_boolean_known_flags,
+                         .lex_input_ = "cvd --group_name yumi start --daemon",
+                         .expected_tokens_ = Tokens{
+                             ArgToken{ArgType::kPositional, "cvd"},
+                             ArgToken{ArgType::kKnownValueFlag, "--group_name"},
+                             ArgToken{ArgType::kPositional, "yumi"},
+                             ArgToken{ArgType::kPositional, "start"},
+                             ArgToken{ArgType::kUnknownFlag, "--daemon"}}}));
+
+TEST_P(BooleanArgsTest, SuccessExpectedTest) {
+  auto lexer_gen_result = ArgumentsLexerBuilder::Build(known_flags_);
+  std::unique_ptr<ArgumentsLexer> lexer =
+      lexer_gen_result.ok() ? std::move(*lexer_gen_result) : nullptr;
+  if (!lexer) {
+    GTEST_SKIP() << "Memory allocation failed but it is not in the test scope.";
+  }
+  auto tokenized_result = lexer->Tokenize(lex_input_);
+
+  ASSERT_TRUE(tokenized_result.ok()) << tokenized_result.error().Trace();
+  ASSERT_EQ(*tokenized_result, *expected_tokens_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    ClientSpecificOptionParser, BooleanArgsTest,
+    testing::Values(
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --clean",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownBoolFlag,
+                                                "--clean"}}},
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --clean=TrUe",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownBoolFlag,
+                                                "--clean"}}},
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --noclean",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownBoolNoFlag,
+                                                "--noclean"}}},
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --noclean=redundant",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownBoolNoFlag,
+                                                "--noclean"}}},
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --clean=no --norandom=y",
+            .expected_tokens_ = Tokens{
+                ArgToken{ArgType::kPositional, "cvd"},
+                ArgToken{ArgType::kKnownBoolNoFlag, "--noclean"},
+                ArgToken{ArgType::kUnknownFlag, "--norandom=y"}}}));
+
+TEST_P(BothArgsTest, SuccessExpectedTest) {
+  auto lexer_gen_result = ArgumentsLexerBuilder::Build(known_flags_);
+  std::unique_ptr<ArgumentsLexer> lexer =
+      lexer_gen_result.ok() ? std::move(*lexer_gen_result) : nullptr;
+  if (!lexer) {
+    GTEST_SKIP() << "Memory allocation failed but it is not in the test scope.";
+  }
+  auto tokenized_result = lexer->Tokenize(lex_input_);
+
+  ASSERT_TRUE(tokenized_result.ok()) << tokenized_result.error().Trace();
+  ASSERT_EQ(*tokenized_result, *expected_tokens_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    ClientSpecificOptionParser, BothArgsTest,
+    testing::Values(
+        LexerInputOutput{
+            .known_flags_ = both_known_flags,
+            .lex_input_ = "cvd --clean -group_name=yumi",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kKnownBoolFlag,
+                                                "--clean"},
+                                       ArgToken{ArgType::kKnownFlagAndValue,
+                                                "-group_name=yumi"}}},
+        LexerInputOutput{
+            .known_flags_ = both_known_flags,
+            .lex_input_ = "cvd --group_name -noclean",
+            .expected_tokens_ = Tokens{
+                ArgToken{ArgType::kPositional, "cvd"},
+                ArgToken{ArgType::kKnownValueFlag, "--group_name"},
+                ArgToken{ArgType::kKnownBoolNoFlag, "-noclean"}}}));
+
+TEST_P(BooleanBadArgsTest, FailureExpectedTest) {
+  auto lexer_gen_result = ArgumentsLexerBuilder::Build(known_flags_);
+  std::unique_ptr<ArgumentsLexer> lexer =
+      lexer_gen_result.ok() ? std::move(*lexer_gen_result) : nullptr;
+  if (!lexer) {
+    GTEST_SKIP() << "Memory allocation failed but it is not in the test scope.";
+  }
+  auto tokenized_result = lexer->Tokenize(lex_input_);
+
+  if (!expected_tokens_) {
+    ASSERT_FALSE(tokenized_result.ok())
+        << "Lexing " << lex_input_ << " should have failed.";
+    return;
+  }
+  ASSERT_TRUE(tokenized_result.ok()) << tokenized_result.error().Trace();
+  ASSERT_EQ(*tokenized_result, *expected_tokens_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    ClientSpecificOptionParser, BooleanBadArgsTest,
+    testing::Values(
+        LexerInputOutput{
+            .known_flags_ = boolean_known_flags,
+            .lex_input_ = "cvd --yesclean",
+            .expected_tokens_ = Tokens{ArgToken{ArgType::kPositional, "cvd"},
+                                       ArgToken{ArgType::kUnknownFlag,
+                                                "--yesclean"}}},
+        LexerInputOutput{.known_flags_ = boolean_known_flags,
+                         .lex_input_ = "cvd --clean=Hello",
+                         .expected_tokens_ = std::nullopt},
+        LexerInputOutput{.known_flags_ = boolean_known_flags,
+                         .lex_input_ = "cvd --clean false",
+                         .expected_tokens_ = Tokens{
+                             ArgToken{ArgType::kPositional, "cvd"},
+                             ArgToken{ArgType::kKnownBoolFlag, "--clean"},
+                             ArgToken{ArgType::kPositional, "false"}}}));
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/creation_analyzer_helper.cpp b/host/commands/cvd/unittests/selector/creation_analyzer_helper.cpp
new file mode 100644
index 0000000..1a1d206
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/creation_analyzer_helper.cpp
@@ -0,0 +1,70 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/selector/creation_analyzer_helper.h"
+
+#include <android-base/strings.h>
+
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+namespace {
+
+// copied from server.h
+struct CommandInvocation {
+  std::string command;
+  std::vector<std::string> arguments;
+};
+
+CommandInvocation MockParseInvocation(const std::vector<std::string>& args) {
+  if (args.empty()) {
+    return CommandInvocation{};
+  }
+  if (args[0] != "cvd") {
+    return CommandInvocation{.command = args[0],
+                             .arguments = cvd_common::Args{}};
+  }
+  if (args.size() == 1) {
+    return CommandInvocation{.command = "help",
+                             .arguments = cvd_common::Args{}};
+  }
+  cvd_common::Args program_args{args.begin() + 2, args.end()};
+  return CommandInvocation{.command = args[1], .arguments = program_args};
+}
+
+}  // namespace
+
+CreationInfoGenTest::CreationInfoGenTest() { Init(); }
+void CreationInfoGenTest::Init() {
+  const auto& input_param = GetParam();
+  selector_args_ = android::base::Tokenize(input_param.selector_args, " ");
+  auto cmd_invocation =
+      MockParseInvocation(android::base::Tokenize(input_param.cmd_args, " "));
+  sub_cmd_ = cmd_invocation.command;
+  cmd_args_ = std::move(cmd_invocation.arguments);
+  if (!input_param.home.empty()) {
+    envs_["HOME"] = input_param.home;
+  }
+  if (!input_param.android_host_out.empty()) {
+    envs_["ANDROID_HOST_OUT"] = input_param.android_host_out;
+  }
+  expected_output_ = input_param.expected_output.output;
+  expected_success_ = input_param.expected_output.is_success;
+  credential_ = ucred{.pid = getpid(), .uid = getuid(), .gid = getgid()};
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/creation_analyzer_helper.h b/host/commands/cvd/unittests/selector/creation_analyzer_helper.h
new file mode 100644
index 0000000..cc48625
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/creation_analyzer_helper.h
@@ -0,0 +1,80 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/creation_analyzer.h"
+
+namespace cuttlefish {
+namespace selector {
+
+struct OutputInfo {
+  std::string home;
+  std::string host_artifacts_path;  ///< e.g. out/host/linux-x86
+  std::string group_name;
+  std::vector<unsigned> instances;
+  std::vector<std::string> args;
+  std::unordered_map<std::string, std::string> envs;
+};
+
+struct Expected {
+  OutputInfo output;
+  bool is_success;
+};
+
+struct InputOutput {
+  // inputs
+  std::string selector_args;
+  std::string cmd_args;
+  std::string home;
+  std::string android_host_out;
+
+  // output
+  Expected expected_output;
+};
+
+class CreationInfoGenTest : public testing::TestWithParam<InputOutput> {
+ protected:
+  CreationInfoGenTest();
+  void Init();
+
+  std::vector<std::string> selector_args_;
+  std::string sub_cmd_;
+  std::vector<std::string> cmd_args_;
+  std::unordered_map<std::string, std::string> envs_;
+  ucred credential_;
+  OutputInfo expected_output_;
+  bool expected_success_;
+  InstanceDatabase instance_db_;
+  InstanceLockFileManager instance_lock_file_manager_;
+};
+
+class HomeTest : public CreationInfoGenTest {};
+class HostArtifactsTest : public CreationInfoGenTest {};
+class InvalidSubCmdTest : public CreationInfoGenTest {};
+class ValidSubCmdTest : public CreationInfoGenTest {};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/creation_analyzer_test.cpp b/host/commands/cvd/unittests/selector/creation_analyzer_test.cpp
new file mode 100644
index 0000000..a3b5fe6
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/creation_analyzer_test.cpp
@@ -0,0 +1,289 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "host/commands/cvd/unittests/selector/creation_analyzer_helper.h"
+
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/users.h"
+#include "host/commands/cvd/selector/instance_database_utils.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+namespace {
+
+std::string TestUserHome() {
+  static const std::string home = StringFromEnv("HOME", "");
+  if (!home.empty()) {
+    return home;
+  }
+  auto result = SystemWideUserHome(getuid());
+  return (result.ok() ? *result : "");
+}
+
+std::string AutoGeneratedHome(const std::string& subdir) {
+  auto parent_result = ParentOfAutogeneratedHomes(getuid(), getgid());
+  if (!parent_result.ok()) {
+    return "";
+  }
+  std::string parent(*parent_result);
+  return parent + "/" + std::to_string(getuid()) + "/" + subdir;
+}
+
+}  // namespace
+
+static auto home_test_inputs = testing::Values(
+    InputOutput{
+        .cmd_args = "cvd start --daemon",
+        .selector_args = "--group_name=cf --instance_name=1",
+        .android_host_out = "/home/user/download",
+        .home = "/usr/local/home/_fake_user",
+        .expected_output =
+            Expected{.output = OutputInfo{.home = "/usr/local/home/_fake_user",
+                                          .host_artifacts_path =
+                                              "/home/user/download"},
+                     .is_success = true}},
+    InputOutput{.cmd_args = "cvd start --daemon",
+                /* no selector_args */
+                .android_host_out = "/home/user/download",
+                .home = TestUserHome(),
+                .expected_output =
+                    Expected{.output = OutputInfo{.home = TestUserHome(),
+                                                  .host_artifacts_path =
+                                                      "/home/user/download"},
+                             .is_success = true}},
+    InputOutput{
+        .cmd_args = "cvd start --daemon",
+        /* no selector_args */
+        .android_host_out = "/home/user/download",
+        /* undefined HOME */
+        .expected_output = Expected{
+            .output = OutputInfo{.home = TestUserHome(),
+                                 .host_artifacts_path = "/home/user/download"},
+            .is_success = true}});
+
+TEST_P(HomeTest, HomeTest) {
+  if (TestUserHome().empty()) {
+    /*
+     * If $HOME is the same as the real home directory (i.e. HOME is not
+     * overridden), cvd uses an automatically generated path in place of
+     * HOME when the operation is "start".
+     *
+     * Otherwise, for backward compatibility, cvd respects the overridden
+     * HOME.
+     *
+     * In testing that feature, if we cannot get the real home directory,
+     * the testing is not possible.
+     */
+    GTEST_SKIP() << "$HOME should be available for this set of tests.";
+  }
+  auto param = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_, .envs = envs_, .selector_args = selector_args_};
+
+  auto result = CreationAnalyzer::Analyze(
+      sub_cmd_, param, credential_, instance_db_, instance_lock_file_manager_);
+
+  ASSERT_EQ(result.ok(), expected_success_) << result.error().Trace();
+  if (!expected_success_) {
+    return;
+  }
+  ASSERT_EQ(result->home, expected_output_.home);
+}
+
+INSTANTIATE_TEST_SUITE_P(CvdCreationInfo, HomeTest, home_test_inputs);
+
+static auto host_out_test_inputs = testing::Values(
+    InputOutput{.cmd_args = "cvd start --daemon",
+                .selector_args = "--group_name=cf --instance_name=1",
+                .android_host_out = "/home/user/download",
+                .home = "/home/fake_user",
+                .expected_output =
+                    Expected{.output = OutputInfo{.home = "/home/fake_user",
+                                                  .host_artifacts_path =
+                                                      "/home/user/download"},
+                             .is_success = true}},
+    InputOutput{.cmd_args = "cvd start --daemon",
+                .selector_args = "--group_name=cf --instance_name=1",
+                /* missing ANDROID_HOST_OUT */
+                .home = "/home/fake_user",
+                .expected_output =
+                    Expected{.output = OutputInfo{.home = "/home/fake_user"},
+                             .is_success = false}});
+
+TEST_P(HostArtifactsTest, HostArtifactsTest) {
+  auto param = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_, .envs = envs_, .selector_args = selector_args_};
+
+  auto result = CreationAnalyzer::Analyze(
+      sub_cmd_, param, credential_, instance_db_, instance_lock_file_manager_);
+
+  ASSERT_EQ(result.ok(), expected_success_) << result.error().Trace();
+  if (!expected_success_) {
+    return;
+  }
+  ASSERT_EQ(result->host_artifacts_path, expected_output_.host_artifacts_path);
+}
+
+INSTANTIATE_TEST_SUITE_P(CvdCreationInfo, HostArtifactsTest,
+                         host_out_test_inputs);
+
+static auto invalid_sub_cmd_test_inputs =
+    testing::Values(InputOutput{.cmd_args = "cvd stop --daemon",
+                                .android_host_out = "/home/user/download",
+                                .home = "/home/fake_user"},
+                    InputOutput{.cmd_args = "cvd",
+                                .android_host_out = "/home/user/download",
+                                .home = "/home/fake_user"},
+                    InputOutput{.cmd_args = "cvd help --daemon",
+                                .android_host_out = "/home/user/download",
+                                .home = "/home/fake_user"});
+
+TEST_P(InvalidSubCmdTest, InvalidSubCmdTest) {
+  auto param = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_, .envs = envs_, .selector_args = selector_args_};
+
+  auto result = CreationAnalyzer::Analyze(
+      sub_cmd_, param, credential_, instance_db_, instance_lock_file_manager_);
+
+  ASSERT_FALSE(result.ok())
+      << "Analyze() had to fail with the subcmd in " << GetParam().cmd_args;
+}
+
+INSTANTIATE_TEST_SUITE_P(CvdCreationInfo, InvalidSubCmdTest,
+                         invalid_sub_cmd_test_inputs);
+
+static auto& valid_sub_cmd_test_inputs = home_test_inputs;
+
+TEST_P(ValidSubCmdTest, ValidSubCmdTest) {
+  auto param = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_, .envs = envs_, .selector_args = selector_args_};
+
+  auto result = CreationAnalyzer::Analyze(
+      sub_cmd_, param, credential_, instance_db_, instance_lock_file_manager_);
+
+  ASSERT_TRUE(result.ok()) << result.error().Trace();
+}
+
+INSTANTIATE_TEST_SUITE_P(CvdCreationInfo, ValidSubCmdTest,
+                         valid_sub_cmd_test_inputs);
+
+/*
+ * Tries to run Cuttlefish with default group two times, so the second
+ * run should fail as the default group_name is registed in the Instance-
+ * Database.
+ */
+TEST(AutoHomeTest, DefaultFailAtSecondTrialTest) {
+  auto android_host_out = StringFromEnv("ANDROID_HOST_OUT", ".");
+  if (android_host_out.empty()) {
+    GTEST_SKIP() << "This test requires ANDROID_HOST_OUT to be set";
+  }
+  auto credential = ucred{.pid = getpid(), .uid = getuid(), .gid = getgid()};
+  InstanceLockFileManager lock_manager;
+  InstanceDatabase instance_db;
+  cvd_common::Envs envs = {{"ANDROID_HOST_OUT", android_host_out}};
+  cvd_common::Args empty_args;
+  std::vector<cvd_common::Args> cmd_args_list{
+      cvd_common::Args{"--daemon", "--instance_nums=7"},
+      cvd_common::Args{"--daemon", "--instance_nums=3"}};
+  auto param0 = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_list[0], .envs = envs, .selector_args = empty_args};
+  auto param1 = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_list[1], .envs = envs, .selector_args = empty_args};
+
+  auto result_1st_exec = CreationAnalyzer::Analyze("start", param0, credential,
+                                                   instance_db, lock_manager);
+  auto result_db_addition =
+      instance_db.AddInstanceGroup({.group_name = "cvd",
+                                    .home_dir = TestUserHome(),
+                                    .host_artifacts_path = android_host_out,
+                                    .product_out_path = android_host_out});
+  if (!result_db_addition.ok()) {
+    GTEST_SKIP() << "This test requires mock group addition to work.";
+  }
+  auto result_2nd_exec = CreationAnalyzer::Analyze("start", param1, credential,
+                                                   instance_db, lock_manager);
+
+  ASSERT_TRUE(result_1st_exec.ok()) << result_1st_exec.error().Trace();
+  ASSERT_EQ(result_1st_exec->home, TestUserHome());
+  ASSERT_FALSE(result_2nd_exec.ok())
+      << "Meant to be fail but returned home : " << result_2nd_exec->home;
+}
+
+TEST(AutoHomeTest, DefaultFollowedByNonDefaultTest) {
+  auto android_host_out = StringFromEnv("ANDROID_HOST_OUT", ".");
+  if (android_host_out.empty()) {
+    GTEST_SKIP() << "This test requires ANDROID_HOST_OUT to be set";
+  }
+  if (AutoGeneratedHome("goog").empty()) {
+    GTEST_SKIP() << "This test requires read-writable temp directory";
+  }
+  auto credential = ucred{.pid = getpid(), .uid = getuid(), .gid = getgid()};
+  InstanceLockFileManager lock_manager;
+  InstanceDatabase instance_db;
+  cvd_common::Envs envs = {{"ANDROID_HOST_OUT", android_host_out}};
+  // needs this as the CreationAnalyzerParam takes references, not copies.
+  // i.e. .cmd_args = cvd_common::Args{} won't work.
+  cvd_common::Args empty_args;
+  cvd_common::Args cmd_arg_for_default{"--daemon", "--instance_nums=7"};
+  cvd_common::Args cmd_args_1st_non_default{"--daemon", "--instance_nums=3"};
+  cvd_common::Args cmd_args_2nd_non_default{"--daemon", "--instance_nums=5"};
+  cvd_common::Args selector_device_name_args{"--group_name=goog",
+                                             "--instance_name=tv"};
+  auto param_default =
+      CreationAnalyzer::CreationAnalyzerParam{.cmd_args = cmd_arg_for_default,
+                                              .envs = envs,
+                                              .selector_args = empty_args};
+  auto param_1st_non_default = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_1st_non_default,
+      .envs = envs,
+      .selector_args = selector_device_name_args};
+  // To make second non-default run non-default.
+  cvd_common::Envs envs_with_home{envs};
+  envs_with_home["HOME"] = "/home/mocktester";
+  auto param_2nd_non_default = CreationAnalyzer::CreationAnalyzerParam{
+      .cmd_args = cmd_args_2nd_non_default,
+      .selector_args = empty_args,
+      .envs = envs_with_home};
+
+  auto result_default = CreationAnalyzer::Analyze(
+      "start", param_default, credential, instance_db, lock_manager);
+  auto result_db_addition =
+      instance_db.AddInstanceGroup({.group_name = "cvd",
+                                    .home_dir = TestUserHome(),
+                                    .host_artifacts_path = android_host_out,
+                                    .product_out_path = android_host_out});
+  if (!result_db_addition.ok()) {
+    GTEST_SKIP() << "This test requires mock group addition to work.";
+  }
+  auto result_1st_non_default = CreationAnalyzer::Analyze(
+      "start", param_1st_non_default, credential, instance_db, lock_manager);
+  auto result_2nd_non_default = CreationAnalyzer::Analyze(
+      "start", param_2nd_non_default, credential, instance_db, lock_manager);
+
+  ASSERT_TRUE(result_default.ok()) << result_default.error().Trace();
+  ASSERT_EQ(result_default->home, TestUserHome());
+  ASSERT_TRUE(result_1st_non_default.ok())
+      << result_1st_non_default.error().Trace();
+  ASSERT_EQ(result_1st_non_default->home, AutoGeneratedHome("goog"));
+  ASSERT_TRUE(result_2nd_non_default.ok())
+      << result_2nd_non_default.error().Trace();
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/cvd_flags_helper.cpp b/host/commands/cvd/unittests/selector/cvd_flags_helper.cpp
new file mode 100644
index 0000000..e5f249e
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/cvd_flags_helper.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/unittests/selector/cvd_flags_helper.h"
+
+#include <cstdint>
+
+#include <android-base/strings.h>
+
+namespace cuttlefish {
+
+CvdFlagCollectionTest::CvdFlagCollectionTest() {
+  std::string in_str = "--name=foo --not_consumed --enable_vnc --id 9";
+  input_ = android::base::Tokenize(in_str, " ");
+  CvdFlag<bool> Help("help", false);
+  CvdFlag<std::string> Name("name");
+  CvdFlag<bool> EnableVnc("enable_vnc", true);
+  CvdFlag<std::int32_t> Id("id");
+  CvdFlag<bool> NotGiven("not-given");
+
+  flag_collection_.EnrollFlag(std::move(Help));
+  flag_collection_.EnrollFlag(std::move(Name));
+  flag_collection_.EnrollFlag(std::move(EnableVnc));
+  flag_collection_.EnrollFlag(std::move(Id));
+  flag_collection_.EnrollFlag(std::move(NotGiven));
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/cvd_flags_helper.h b/host/commands/cvd/unittests/selector/cvd_flags_helper.h
new file mode 100644
index 0000000..1bb5fcb
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/cvd_flags_helper.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <variant>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/flag.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+// use this only when std::optional is not nullopt
+template <typename T>
+static Result<T> Get(
+    const std::optional<FlagCollection::ValueVariant>& opt_var) {
+  CF_EXPECT(opt_var != std::nullopt);
+  std::variant<std::int32_t, bool, std::string> var = *opt_var;
+  auto* ptr = std::get_if<T>(&var);
+  CF_EXPECT(ptr != nullptr);
+  return *ptr;
+}
+
+class CvdFlagCollectionTest : public testing::Test {
+ protected:
+  CvdFlagCollectionTest();
+
+  cvd_common::Args input_;
+  FlagCollection flag_collection_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/cvd_flags_test.cpp b/host/commands/cvd/unittests/selector/cvd_flags_test.cpp
new file mode 100644
index 0000000..a04866d
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/cvd_flags_test.cpp
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/commands/cvd/unittests/selector/cvd_flags_helper.h"
+
+namespace cuttlefish {
+
+TEST_F(CvdFlagCollectionTest, Init) {
+  auto output_result = flag_collection_.FilterFlags(input_);
+  ASSERT_TRUE(output_result.ok()) << output_result.error().Trace();
+}
+
+TEST_F(CvdFlagCollectionTest, Leftover) {
+  auto output_result = flag_collection_.FilterFlags(input_);
+  ASSERT_TRUE(output_result.ok()) << output_result.error().Trace();
+  ASSERT_EQ(input_, cvd_common::Args{"--not_consumed"});
+}
+
+TEST_F(CvdFlagCollectionTest, AllGivenFlagsListed) {
+  auto output_result = flag_collection_.FilterFlags(input_);
+  ASSERT_TRUE(output_result.ok()) << output_result.error().Trace();
+  ASSERT_EQ(input_, cvd_common::Args{"--not_consumed"});
+  auto output = std::move(*output_result);
+
+  ASSERT_FALSE(Contains(output, "help"));
+  ASSERT_TRUE(Contains(output, "name"));
+  ASSERT_TRUE(Contains(output, "enable_vnc"));
+  ASSERT_TRUE(Contains(output, "id"));
+  ASSERT_FALSE(Contains(output, "not-given"));
+  ASSERT_FALSE(Contains(output, "not-consumed"));
+}
+
+TEST_F(CvdFlagCollectionTest, DefaultValueFlagsAlsoListed) {
+  auto output_result = flag_collection_.CalculateFlags(input_);
+  ASSERT_TRUE(output_result.ok()) << output_result.error().Trace();
+  ASSERT_EQ(input_, cvd_common::Args{"--not_consumed"});
+  auto output = std::move(*output_result);
+
+  ASSERT_TRUE(Contains(output, "help"));
+  ASSERT_TRUE(Contains(output, "name"));
+  ASSERT_TRUE(Contains(output, "enable_vnc"));
+  ASSERT_TRUE(Contains(output, "id"));
+  ASSERT_FALSE(Contains(output, "not-given"));
+  ASSERT_FALSE(Contains(output, "not-consumed"));
+}
+
+TEST_F(CvdFlagCollectionTest, ValueTest) {
+  auto output_result = flag_collection_.CalculateFlags(input_);
+  ASSERT_TRUE(output_result.ok()) << output_result.error().Trace();
+  auto output = std::move(*output_result);
+  // without these verified, the code below will crash
+  ASSERT_TRUE(Contains(output, "help"));
+  ASSERT_TRUE(Contains(output, "name"));
+  ASSERT_TRUE(Contains(output, "enable_vnc"));
+  ASSERT_TRUE(Contains(output, "id"));
+  const auto help_output = output.at("help");
+  const auto name_output = output.at("name");
+  const auto enable_vnc_output = output.at("enable_vnc");
+  const auto id_output = output.at("id");
+
+  auto help_value_result = FlagCollection::GetValue<bool>(help_output.value);
+  auto name_value_result =
+      FlagCollection::GetValue<std::string>(name_output.value);
+  auto enable_vnc_value_result =
+      FlagCollection::GetValue<bool>(enable_vnc_output.value);
+  auto id_value_result =
+      FlagCollection::GetValue<std::int32_t>(id_output.value);
+
+  ASSERT_TRUE(help_value_result.ok());
+  ASSERT_TRUE(name_value_result.ok());
+  ASSERT_TRUE(enable_vnc_value_result.ok());
+  ASSERT_TRUE(id_value_result.ok());
+  ASSERT_EQ(*help_value_result, false);
+  ASSERT_EQ(*name_value_result, "foo");
+  ASSERT_EQ(*enable_vnc_value_result, true);
+  ASSERT_EQ(*id_value_result, 9);
+}
+
+TEST(CvdFlagTest, FlagProxyFilter) {
+  CvdFlag<std::string> no_default("no_default");
+  cvd_common::Args has_flag_args{"--no_default=Hello"};
+  cvd_common::Args not_has_flag_args{"--bar --foo=name --enable_vnc"};
+  cvd_common::Args empty_args{""};
+
+  CvdFlagProxy no_default_proxy(std::move(no_default));
+  auto expected_hello_opt_result = no_default_proxy.FilterFlag(has_flag_args);
+  auto expected_null_result = no_default_proxy.FilterFlag(not_has_flag_args);
+  auto expected_null_result2 = no_default_proxy.FilterFlag(empty_args);
+
+  ASSERT_TRUE(expected_hello_opt_result.ok());
+  ASSERT_TRUE(expected_null_result.ok());
+  ASSERT_TRUE(expected_null_result2.ok());
+
+  ASSERT_TRUE(*expected_hello_opt_result);
+  auto value_result = Get<std::string>(**expected_hello_opt_result);
+  ASSERT_TRUE(value_result.ok());
+  ASSERT_EQ(*value_result, "Hello");
+  ASSERT_FALSE(*expected_null_result);
+  ASSERT_FALSE(*expected_null_result2);
+
+  ASSERT_TRUE(has_flag_args.empty());
+  ASSERT_EQ(not_has_flag_args,
+            cvd_common::Args{"--bar --foo=name --enable_vnc"});
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/group_record_test.cpp b/host/commands/cvd/unittests/selector/group_record_test.cpp
new file mode 100644
index 0000000..30168e3
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/group_record_test.cpp
@@ -0,0 +1,134 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+
+namespace cuttlefish {
+namespace selector {
+
+static std::string GroupName() { return "yah_ong"; }
+static std::string HomeDir() { return "/home/user"; }
+static std::string TestBinDir() { return "/opt/android11"; }
+
+class CvdInstanceGroupUnitTest : public testing::Test {
+ protected:
+  CvdInstanceGroupUnitTest()
+      : group_({.group_name = GroupName(),
+                .home_dir = HomeDir(),
+                .host_artifacts_path = TestBinDir(),
+                .product_out_path = TestBinDir()}) {}
+  LocalInstanceGroup& Get() { return group_; }
+  LocalInstanceGroup group_;
+};
+
+// CvdInstanceGroupUnitTest + add 4 instances
+class CvdInstanceGroupSearchUnitTest : public testing::Test {
+ protected:
+  CvdInstanceGroupSearchUnitTest()
+      : group_({.group_name = GroupName(),
+                .home_dir = HomeDir(),
+                .host_artifacts_path = TestBinDir(),
+                .product_out_path = TestBinDir()}) {
+    is_setup_ =
+        (Get().AddInstance(1, "tv_instance").ok() &&
+         Get().AddInstance(2, "2").ok() && Get().AddInstance(3, "phone").ok() &&
+         Get().AddInstance(7, "tv_instance").ok());
+    is_setup_ = is_setup_ && (Get().Instances().size() == 4);
+  }
+  LocalInstanceGroup& Get() { return group_; }
+  bool IsSetup() const { return is_setup_; }
+
+ private:
+  bool is_setup_;
+  LocalInstanceGroup group_;
+};
+
+TEST_F(CvdInstanceGroupUnitTest, Fields) {
+  auto& group = Get();
+
+  ASSERT_EQ(group.InternalGroupName(), "cvd");
+  ASSERT_EQ(group.GroupName(), "yah_ong");
+  ASSERT_EQ(group.HomeDir(), HomeDir());
+  ASSERT_EQ(group.HostArtifactsPath(), TestBinDir());
+}
+
+TEST_F(CvdInstanceGroupUnitTest, AddInstances) {
+  auto& group = Get();
+
+  ASSERT_TRUE(group.AddInstance(1, "tv_instance").ok());
+  ASSERT_TRUE(group.AddInstance(2, "2").ok());
+  ASSERT_TRUE(group.AddInstance(3, "phone").ok());
+  ASSERT_EQ(group.Instances().size(), 3);
+}
+
+TEST_F(CvdInstanceGroupUnitTest, AddInstancesAndListAll) {
+  auto& group = Get();
+  group.AddInstance(1, "tv_instance");
+  group.AddInstance(2, "2");
+  group.AddInstance(3, "phone");
+  if (group.Instances().size() != 3) {
+    GTEST_SKIP() << "AddInstance failed but is verified in other testing.";
+  }
+
+  auto set_result = group.FindAllInstances();
+
+  ASSERT_TRUE(set_result.ok()) << set_result.error().Trace();
+  ASSERT_EQ(set_result->size(), 3);
+}
+
+TEST_F(CvdInstanceGroupSearchUnitTest, SearchById) {
+  auto& group = Get();
+  if (!IsSetup()) {
+    /*
+     * Here's why we skip the test rather than see it as a failure.
+     *
+     * 1. The test is specifically designed for searcy-by-id operations.
+     * 2. Adding instance to a group is tested in AddInstances test designed
+     *    specifically for it. It's a failure there but not here.
+     */
+    GTEST_SKIP() << "Failed to add instances to the group.";
+  }
+  // valid_ids were added in the CvdInstanceGroupSearchUnitTest_SearchById
+  // constructor.
+  std::vector<unsigned> valid_ids{1, 2, 7};
+  std::vector<unsigned> invalid_ids{20, 0, 5};
+
+  // valid search
+  for (auto const& valid_id : valid_ids) {
+    auto result = group.FindById(valid_id);
+    ASSERT_TRUE(result.ok());
+    auto set = *result;
+    ASSERT_EQ(set.size(), 1);
+    const LocalInstance& instance = *set.cbegin();
+    ASSERT_EQ(instance.InstanceId(), valid_id);
+  }
+
+  // invalid search
+  for (auto const& invalid_id : invalid_ids) {
+    auto result = group.FindById(invalid_id);
+    // it's okay not to be found
+    ASSERT_TRUE(result.ok());
+    ASSERT_TRUE(result->empty());
+  }
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/host_tool_target_test.cpp b/host/commands/cvd/unittests/selector/host_tool_target_test.cpp
new file mode 100644
index 0000000..0f8391b
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/host_tool_target_test.cpp
@@ -0,0 +1,113 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/cvd/server_command/host_tool_target_manager.h"
+
+namespace cuttlefish {
+
+TEST(HostToolTarget, KnownFlags) {
+  std::string android_host_out = StringFromEnv("ANDROID_HOST_OUT", "");
+  if (android_host_out.empty()) {
+    GTEST_SKIP() << "Set ANDROID_HOST_OUT";
+  }
+  std::unordered_map<std::string, std::vector<std::string>> ops_to_op_impl_map{
+      {"start", std::vector<std::string>{"cvd_internal_start", "launch_cvd"}}};
+
+  auto host_tool_target =
+      HostToolTarget::Create(android_host_out, ops_to_op_impl_map);
+  ASSERT_TRUE(host_tool_target.ok()) << host_tool_target.error().Trace();
+
+  auto daemon_flag =
+      host_tool_target->GetFlagInfo(HostToolTarget::FlagInfoRequest{
+          .operation_ = "start",
+          .flag_name_ = "daemon",
+      });
+
+  auto bad_flag = host_tool_target->GetFlagInfo(HostToolTarget::FlagInfoRequest{
+      .operation_ = "start",
+      .flag_name_ = "@never_exist@",
+  });
+
+  ASSERT_TRUE(daemon_flag.ok()) << daemon_flag.error().Trace();
+  ASSERT_EQ(daemon_flag->Name(), "daemon");
+  ASSERT_TRUE(daemon_flag->Type() == "string" || daemon_flag->Type() == "bool");
+  ASSERT_FALSE(bad_flag.ok());
+}
+
+fruit::Component<HostToolTargetManager> CreateManagerComponent() {
+  return fruit::createComponent()
+      .install(HostToolTargetManagerComponent)
+      .install(OperationToBinsMapComponent);
+}
+
+TEST(HostToolManager, KnownFlags) {
+  std::string android_host_out = StringFromEnv("ANDROID_HOST_OUT", "");
+  if (android_host_out.empty()) {
+    GTEST_SKIP() << "Set ANDROID_HOST_OUT";
+  }
+  fruit::Injector<HostToolTargetManager> injector(CreateManagerComponent);
+  HostToolTargetManager& host_tool_manager =
+      injector.get<HostToolTargetManager&>();
+
+  auto daemon_flag =
+      host_tool_manager.ReadFlag({.artifacts_path = android_host_out,
+                                  .op = "start",
+                                  .flag_name = "daemon"});
+  auto bad_flag =
+      host_tool_manager.ReadFlag({.artifacts_path = android_host_out,
+                                  .op = "start",
+                                  .flag_name = "@never_exist@"});
+
+  ASSERT_TRUE(daemon_flag.ok()) << daemon_flag.error().Trace();
+  ASSERT_EQ(daemon_flag->Name(), "daemon");
+  ASSERT_TRUE(daemon_flag->Type() == "string" || daemon_flag->Type() == "bool");
+  ASSERT_FALSE(bad_flag.ok());
+}
+
+TEST(HostToolManager, KnownBins) {
+  std::string android_host_out = StringFromEnv("ANDROID_HOST_OUT", "");
+  if (android_host_out.empty()) {
+    GTEST_SKIP() << "Set ANDROID_HOST_OUT";
+  }
+  fruit::Injector<HostToolTargetManager> injector(CreateManagerComponent);
+  HostToolTargetManager& host_tool_manager =
+      injector.get<HostToolTargetManager&>();
+
+  auto start_bin = host_tool_manager.ExecBaseName(
+      {.artifacts_path = android_host_out, .op = "start"});
+  auto stop_bin = host_tool_manager.ExecBaseName(
+      {.artifacts_path = android_host_out, .op = "stop"});
+  auto bad_bin = host_tool_manager.ExecBaseName(
+      {.artifacts_path = android_host_out, .op = "bad"});
+
+  ASSERT_TRUE(start_bin.ok()) << start_bin.error().Trace();
+  ASSERT_TRUE(stop_bin.ok()) << stop_bin.error().Trace();
+  ASSERT_FALSE(bad_bin.ok()) << "bad_bin should be CF_ERR but is " << *bad_bin;
+  ASSERT_TRUE(*start_bin == "cvd_internal_start" || *start_bin == "launch_cvd")
+      << "start_bin was " << *start_bin;
+  ASSERT_TRUE(*stop_bin == "cvd_internal_stop" || *stop_bin == "stop_cvd")
+      << "stop_bin was " << *stop_bin;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/instance_database_helper.cpp b/host/commands/cvd/unittests/selector/instance_database_helper.cpp
new file mode 100644
index 0000000..2c6ac68
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/instance_database_helper.cpp
@@ -0,0 +1,149 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/selector/instance_database_helper.h"
+
+#include <algorithm>
+#include <cstdio>
+#include <cstdlib>
+
+#include <android-base/file.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+
+namespace cuttlefish {
+namespace selector {
+namespace {
+
+// mktemp with /tmp/<subdir>.XXXXXX, and if failed,
+// mkdir -p /tmp/<subdir>.<default_suffix>
+std::optional<std::string> CreateTempDirectory(
+    const std::string& subdir, const std::string& default_suffix) {
+  std::string path_pattern = "/tmp/" + subdir + ".XXXXXX";
+  auto ptr = mkdtemp(path_pattern.data());
+  if (ptr) {
+    return {std::string(ptr)};
+  }
+  std::string default_path = "/tmp/" + subdir + "." + default_suffix;
+  return (EnsureDirectoryExists(default_path).ok() ? std::optional(default_path)
+                                                   : std::nullopt);
+}
+
+// Linux "touch" a(n empty) file
+bool Touch(const std::string& full_path) {
+  // this file is required only to make FileExists() true.
+  SharedFD new_file = SharedFD::Creat(full_path, S_IRUSR | S_IWUSR);
+  return new_file->IsOpen();
+}
+
+}  // namespace
+
+CvdInstanceDatabaseTest::CvdInstanceDatabaseTest()
+    : error_{.error_code = ErrorCode::kOk, .msg = ""} {
+  InitWorkspace() && InitMockAndroidHostOut();
+}
+
+CvdInstanceDatabaseTest::~CvdInstanceDatabaseTest() { ClearWorkspace(); }
+
+void CvdInstanceDatabaseTest::ClearWorkspace() {
+  if (!workspace_dir_.empty()) {
+    RecursivelyRemoveDirectory(workspace_dir_);
+  }
+}
+
+void CvdInstanceDatabaseTest::SetErrorCode(const ErrorCode error_code,
+                                           const std::string& msg) {
+  error_.error_code = error_code;
+  error_.msg = msg;
+}
+
+bool CvdInstanceDatabaseTest::InitWorkspace() {
+  // creating a parent dir of the mock home directories for each fake group
+  auto result_opt = CreateTempDirectory("cf_unittest", "default_location");
+  if (!result_opt) {
+    SetErrorCode(ErrorCode::kFileError, "Failed to create workspace");
+    return false;
+  }
+  workspace_dir_ = std::move(result_opt.value());
+  return true;
+}
+
+bool CvdInstanceDatabaseTest::InitMockAndroidHostOut() {
+  /* creating a fake host out directory
+   *
+   * As the automated testing system does not guarantee that there is either
+   * ANDROID_HOST_OUT or ".", where we can find host tools, we create a fake
+   * host tool directory just enough to deceive InstanceDatabase APIs.
+   *
+   */
+  std::string android_host_out = workspace_dir_ + "/android_host_out";
+  if (!EnsureDirectoryExists(android_host_out).ok()) {
+    SetErrorCode(ErrorCode::kFileError, "Failed to create " + android_host_out);
+    return false;
+  }
+  android_artifacts_path_ = android_host_out;
+  if (!EnsureDirectoryExists(android_artifacts_path_ + "/bin").ok()) {
+    SetErrorCode(ErrorCode::kFileError,
+                 "Failed to create " + android_artifacts_path_ + "/bin");
+    return false;
+  }
+  if (!Touch(android_artifacts_path_ + "/bin" + "/launch_cvd")) {
+    SetErrorCode(ErrorCode::kFileError, "Failed to create mock launch_cvd");
+    return false;
+  }
+  return true;
+}
+
+// Add an InstanceGroups with each home directory and android_host_out_
+bool CvdInstanceDatabaseTest::AddGroups(
+    const std::unordered_set<std::string>& base_names) {
+  for (const auto& base_name : base_names) {
+    const std::string home(Workspace() + "/" + base_name);
+    if (!EnsureDirectoryExists(home).ok()) {
+      SetErrorCode(ErrorCode::kFileError, home + " directory is not found.");
+      return false;
+    }
+    InstanceDatabase::AddInstanceGroupParam param{
+        .group_name = base_name,
+        .home_dir = home,
+        .host_artifacts_path = android_artifacts_path_,
+        .product_out_path = android_artifacts_path_};
+    if (!db_.AddInstanceGroup(param).ok()) {
+      SetErrorCode(ErrorCode::kInstanceDabaseError, "Failed to add group");
+      return false;
+    }
+  }
+  return true;
+}
+
+bool CvdInstanceDatabaseTest::AddInstances(
+    const std::string& group_name,
+    const std::vector<InstanceInfo>& instances_info) {
+  for (const auto& [id, per_instance_name] : instances_info) {
+    if (!db_.AddInstance(group_name, id, per_instance_name).ok()) {
+      SetErrorCode(ErrorCode::kInstanceDabaseError,
+                   "Failed to add instance " + per_instance_name);
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/instance_database_helper.h b/host/commands/cvd/unittests/selector/instance_database_helper.h
new file mode 100644
index 0000000..6e7b862
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/instance_database_helper.h
@@ -0,0 +1,105 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <optional>
+#include <set>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/constant_reference.h"
+#include "host/commands/cvd/selector/instance_database.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/**
+ * Creates n mock HOME directories, one per group. Also, creates
+ * 1 mock ANDROID_HOST_OUT with a mock launcher file.
+ *
+ * The test suite is to assess InstanceDatabase APIs such as
+ * adding groups, adding instances to the groups, etc. The thing
+ * is that the InstanceDatabase APIs will check if HOME and/or
+ * ANDROID_HOST_OUT are directories. Also, for ANDROID_HOST_OUT,
+ * as a bare minimum validity check, it will see if there is a launcher
+ * file under the bin directory of it.
+ *
+ * Thus, the mock environment should prepare an actual directories with
+ * a mock launcher file(s). In case the test runs/tests in the suite run
+ * in parallel, we give each test run a unique directory, and that's why
+ * all mock homes are under a temp directory created by mkdtemp()
+ *
+ */
+class CvdInstanceDatabaseTest : public ::testing::Test {
+ protected:
+  enum class ErrorCode : std::int32_t {
+    kOk,
+    kFileError,
+    kInstanceDabaseError,
+  };
+
+  struct SetupError {
+    ErrorCode error_code;
+    std::string msg;
+  };
+
+  CvdInstanceDatabaseTest();
+  ~CvdInstanceDatabaseTest();
+
+  bool SetUpOk() const { return error_.error_code == ErrorCode::kOk; }
+  const std::string& Workspace() const { return workspace_dir_; }
+  /*
+   * Returns a valid host artifacts dir, which is a prerequisite for
+   * InstanceDatabase APIs.
+   */
+  const std::string& HostArtifactsPath() const {
+    return android_artifacts_path_;
+  }
+
+  // Adds InstanceGroups, each by:
+  //    "mkdir" : Workspace() + "/" + base_name, HostArtifactsPath()
+  //    db_.AddInstanceGroup()
+  bool AddGroups(const std::unordered_set<std::string>& base_names);
+  struct InstanceInfo {
+    unsigned id;
+    std::string per_instance_name;
+  };
+  bool AddInstances(const std::string& group_name,
+                    const std::vector<InstanceInfo>& instances_info);
+  InstanceDatabase& GetDb() { return db_; }
+  const SetupError& Error() const { return error_; }
+
+ private:
+  void ClearWorkspace();
+  bool InitWorkspace();
+  bool InitMockAndroidHostOut();
+  // set error_ when there is an error
+  void SetErrorCode(const ErrorCode error_code, const std::string& msg);
+
+  std::string android_artifacts_path_;
+  std::string workspace_dir_;
+  SetupError error_;
+  InstanceDatabase db_;
+};
+
+using CvdInstanceDatabaseJsonTest = CvdInstanceDatabaseTest;
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/instance_database_test.cpp b/host/commands/cvd/unittests/selector/instance_database_test.cpp
new file mode 100644
index 0000000..1d128c9
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/instance_database_test.cpp
@@ -0,0 +1,475 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <algorithm>
+#include <iostream>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/json.h"
+#include "host/commands/cvd/selector/instance_database.h"
+#include "host/commands/cvd/selector/selector_constants.h"
+#include "host/commands/cvd/unittests/selector/instance_database_helper.h"
+
+/*
+ * SetUp creates a mock ANDROID_HOST_OUT directory where there is
+ * bin/launch_cvd, and a "Workspace" directory where supposedly HOME
+ * directories for each LocalInstanceGroup will be populated.
+ *
+ * InstanceDatabase APIs conduct validity checks: e.g. if the host tool
+ * directory actually has host tools such as launch_cvd, if the "HOME"
+ * directory for the LocalInstanceGroup is actually an existing directory,
+ * and so on.
+ *
+ * With TEST_F(Suite, Test), the following is the class declaration:
+ *  class Suite : public testing::Test;
+ *  class Suite_Test : public Suite;
+ *
+ * Thus, the set up is done inside the constructur of the Suite base class.
+ * Also, cleaning up the directories and files are done inside the destructor.
+ * If creating files/directories fails, the "Test" is skipped.
+ *
+ */
+
+namespace cuttlefish {
+namespace selector {
+
+TEST_F(CvdInstanceDatabaseTest, Empty) {
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  ASSERT_TRUE(db.IsEmpty());
+  ASSERT_TRUE(db.InstanceGroups().empty());
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddWithInvalidGroupInfo) {
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  // populate home directories under Workspace()
+  const std::string home{Workspace() + "/" + "meow"};
+  if (!EnsureDirectoryExists(home).ok()) {
+    // if ever failed, skip
+    GTEST_SKIP() << "Failed to find/create " << home;
+  }
+  const std::string invalid_host_artifacts_path{Workspace() + "/" + "host_out"};
+  if (!EnsureDirectoryExists(invalid_host_artifacts_path).ok() ||
+      !EnsureDirectoryExists(invalid_host_artifacts_path + "/bin").ok()) {
+    GTEST_SKIP() << "Failed to find/create "
+                 << invalid_host_artifacts_path + "/bin";
+  }
+
+  // group_name : "meow"
+  auto result_bad_home =
+      db.AddInstanceGroup({.group_name = "meow",
+                           .home_dir = "/path/to/never/exists",
+                           .host_artifacts_path = HostArtifactsPath(),
+                           .product_out_path = HostArtifactsPath()});
+  auto result_bad_host_bin_dir =
+      db.AddInstanceGroup({.group_name = "meow",
+                           .home_dir = home,
+                           .host_artifacts_path = "/path/to/never/exists",
+                           .product_out_path = "/path/to/never/exists"});
+  auto result_both_bad =
+      db.AddInstanceGroup({.group_name = "meow",
+                           .home_dir = "/path/to/never/exists",
+                           .host_artifacts_path = "/path/to/never/exists",
+                           .product_out_path = "/path/to/never/exists"});
+  auto result_bad_group_name =
+      db.AddInstanceGroup({.group_name = "0invalid_group_name",
+                           .home_dir = home,
+                           .host_artifacts_path = HostArtifactsPath(),
+                           .product_out_path = HostArtifactsPath()});
+  // Everything is correct but one thing: the host artifacts directory does not
+  // have host tool files such as launch_cvd
+  auto result_non_qualifying_host_tool_dir =
+      db.AddInstanceGroup({.group_name = "meow",
+                           .home_dir = home,
+                           .host_artifacts_path = invalid_host_artifacts_path,
+                           .product_out_path = invalid_host_artifacts_path});
+
+  ASSERT_FALSE(result_bad_home.ok());
+  ASSERT_FALSE(result_bad_host_bin_dir.ok());
+  ASSERT_FALSE(result_both_bad.ok());
+  ASSERT_FALSE(result_bad_group_name.ok());
+  ASSERT_FALSE(result_non_qualifying_host_tool_dir.ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddWithValidGroupInfo) {
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  const std::string home0{Workspace() + "/" + "home0"};
+  if (!EnsureDirectoryExists(home0).ok()) {
+    GTEST_SKIP() << "Failed to find/create " << home0;
+  }
+  const std::string home1{Workspace() + "/" + "home1"};
+  if (!EnsureDirectoryExists(home1).ok()) {
+    GTEST_SKIP() << "Failed to find/create " << home1;
+  }
+
+  ASSERT_TRUE(db.AddInstanceGroup({.group_name = "meow",
+                                   .home_dir = home0,
+                                   .host_artifacts_path = HostArtifactsPath(),
+                                   .product_out_path = HostArtifactsPath()})
+                  .ok());
+  ASSERT_TRUE(db.AddInstanceGroup({.group_name = "miaou",
+                                   .home_dir = home1,
+                                   .host_artifacts_path = HostArtifactsPath(),
+                                   .product_out_path = HostArtifactsPath()})
+                  .ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddToTakenHome) {
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  const std::string home{Workspace() + "/" + "my_home"};
+  if (!EnsureDirectoryExists(home).ok()) {
+    GTEST_SKIP() << "Failed to find/create " << home;
+  }
+
+  ASSERT_TRUE(db.AddInstanceGroup({.group_name = "meow",
+                                   .home_dir = home,
+                                   .host_artifacts_path = HostArtifactsPath(),
+                                   .product_out_path = HostArtifactsPath()})
+                  .ok());
+  ASSERT_FALSE(db.AddInstanceGroup({.group_name = "meow",
+                                    .home_dir = home,
+                                    .host_artifacts_path = HostArtifactsPath(),
+                                    .product_out_path = HostArtifactsPath()})
+                   .ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, Clear) {
+  /* AddGroups(name):
+   *   HOME: Workspace() + "/" + name
+   *   HostArtifactsPath: Workspace() + "/" + "android_host_out"
+   *   group_ := LocalInstanceGroup(name, HOME, HostArtifactsPath)
+   */
+  if (!SetUpOk() || !AddGroups({"nyah", "yah_ong"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+
+  // test Clear()
+  ASSERT_FALSE(db.IsEmpty());
+  db.Clear();
+  ASSERT_TRUE(db.IsEmpty());
+}
+
+TEST_F(CvdInstanceDatabaseTest, SearchGroups) {
+  if (!SetUpOk() || !AddGroups({"myau", "miau"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  const std::string valid_home_search_key{Workspace() + "/" + "myau"};
+  const std::string invalid_home_search_key{"/no/such/path"};
+
+  auto valid_groups = db.FindGroups({kHomeField, valid_home_search_key});
+  auto valid_group = db.FindGroup({kHomeField, valid_home_search_key});
+  auto invalid_groups = db.FindGroups({kHomeField, invalid_home_search_key});
+  auto invalid_group = db.FindGroup({kHomeField, invalid_home_search_key});
+
+  ASSERT_TRUE(valid_groups.ok());
+  ASSERT_EQ(valid_groups->size(), 1);
+  ASSERT_TRUE(valid_group.ok());
+
+  ASSERT_TRUE(invalid_groups.ok());
+  ASSERT_EQ(invalid_groups->size(), 0);
+  ASSERT_FALSE(invalid_group.ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, RemoveGroup) {
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  if (!AddGroups({"miaaaw", "meow", "mjau"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto eng_group = db.FindGroup({kHomeField, Workspace() + "/" + "meow"});
+  if (!eng_group.ok()) {
+    GTEST_SKIP() << "meow"
+                 << " group was not found.";
+  }
+
+  ASSERT_TRUE(db.RemoveInstanceGroup(*eng_group));
+  ASSERT_FALSE(db.RemoveInstanceGroup(*eng_group));
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddInstances) {
+  if (!SetUpOk() || !AddGroups({"yah_ong"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  auto kitty_group = db.FindGroup({kHomeField, Workspace() + "/" + "yah_ong"});
+  if (!kitty_group.ok()) {
+    GTEST_SKIP() << "yah_ong"
+                 << " group was not found";
+  }
+  const auto& instances = kitty_group->Get().Instances();
+
+  ASSERT_TRUE(db.AddInstance("yah_ong", 1, "yumi").ok());
+  ASSERT_FALSE(db.AddInstance("yah_ong", 3, "yumi").ok());
+  ASSERT_FALSE(db.AddInstance("yah_ong", 1, "tiger").ok());
+  ASSERT_TRUE(db.AddInstance("yah_ong", 3, "tiger").ok());
+  for (auto const& instance_unique_ptr : instances) {
+    ASSERT_TRUE(instance_unique_ptr->PerInstanceName() == "yumi" ||
+                instance_unique_ptr->PerInstanceName() == "tiger");
+  }
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddInstancesInvalid) {
+  if (!SetUpOk() || !AddGroups({"yah_ong"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  auto kitty_group = db.FindGroup({kHomeField, Workspace() + "/" + "yah_ong"});
+  if (!kitty_group.ok()) {
+    GTEST_SKIP() << "yah_ong"
+                 << " group was not found";
+  }
+
+  ASSERT_FALSE(db.AddInstance("yah_ong", 1, "!yumi").ok());
+  ASSERT_FALSE(db.AddInstance("yah_ong", 7, "ti ger").ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, FindByInstanceId) {
+  // The start of set up
+  if (!SetUpOk()) {
+    GTEST_SKIP() << Error().msg;
+  }
+  if (!AddGroups({"miau", "nyah"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  // per_instance_name could be the same if the parent groups are different.
+  std::vector<InstanceInfo> miau_group_instance_id_name_pairs{
+      {1, "8"}, {10, "tv-instance"}};
+  std::vector<InstanceInfo> nyah_group_instance_id_name_pairs{
+      {7, "my_favorite_phone"}, {11, "tv-instance"}, {3, "3_"}};
+  auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+  auto nyah_group = db.FindGroup({kHomeField, Workspace() + "/" + "nyah"});
+  if (!miau_group.ok() || !nyah_group.ok()) {
+    GTEST_SKIP() << "miau or nyah group"
+                 << " group was not found";
+  }
+  if (!AddInstances("miau", miau_group_instance_id_name_pairs) ||
+      !AddInstances("nyah", nyah_group_instance_id_name_pairs)) {
+    GTEST_SKIP() << Error().msg;
+  }
+  // The end of set up
+
+  auto result1 = db.FindInstance({kInstanceIdField, std::to_string(1)});
+  auto result10 = db.FindInstance({kInstanceIdField, std::to_string(10)});
+  auto result7 = db.FindInstance({kInstanceIdField, std::to_string(7)});
+  auto result11 = db.FindInstance({kInstanceIdField, std::to_string(11)});
+  auto result3 = db.FindInstance({kInstanceIdField, std::to_string(3)});
+  auto result_invalid = db.FindInstance({kInstanceIdField, std::to_string(20)});
+
+  ASSERT_TRUE(result1.ok());
+  ASSERT_TRUE(result10.ok());
+  ASSERT_TRUE(result7.ok());
+  ASSERT_TRUE(result11.ok());
+  ASSERT_TRUE(result3.ok());
+  ASSERT_EQ(result1->Get().PerInstanceName(), "8");
+  ASSERT_EQ(result10->Get().PerInstanceName(), "tv-instance");
+  ASSERT_EQ(result7->Get().PerInstanceName(), "my_favorite_phone");
+  ASSERT_EQ(result11->Get().PerInstanceName(), "tv-instance");
+  ASSERT_EQ(result3->Get().PerInstanceName(), "3_");
+  ASSERT_FALSE(result_invalid.ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, FindByPerInstanceName) {
+  // starting set up
+  if (!SetUpOk() || !AddGroups({"miau", "nyah"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  std::vector<InstanceInfo> miau_group_instance_id_name_pairs{
+      {1, "8"}, {10, "tv_instance"}};
+  std::vector<InstanceInfo> nyah_group_instance_id_name_pairs{
+      {7, "my_favorite_phone"}, {11, "tv_instance"}};
+  auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+  auto nyah_group = db.FindGroup({kHomeField, Workspace() + "/" + "nyah"});
+  if (!miau_group.ok() || !nyah_group.ok()) {
+    GTEST_SKIP() << "miau or nyah "
+                 << " group was not found";
+  }
+  if (!AddInstances("miau", miau_group_instance_id_name_pairs) ||
+      !AddInstances("nyah", nyah_group_instance_id_name_pairs)) {
+    GTEST_SKIP() << Error().msg;
+  }
+  // end of set up
+
+  auto result1 = db.FindInstance({kInstanceNameField, "8"});
+  auto result10_and_11 = db.FindInstances({kInstanceNameField, "tv_instance"});
+  auto result7 = db.FindInstance({kInstanceNameField, "my_favorite_phone"});
+  auto result_invalid =
+      db.FindInstance({kInstanceNameField, "name_never_seen"});
+
+  ASSERT_TRUE(result1.ok());
+  ASSERT_TRUE(result10_and_11.ok());
+  ASSERT_TRUE(result7.ok());
+  ASSERT_EQ(result10_and_11->size(), 2);
+  ASSERT_EQ(result1->Get().InstanceId(), 1);
+  ASSERT_EQ(result7->Get().InstanceId(), 7);
+  ASSERT_FALSE(result_invalid.ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, FindInstancesByGroupName) {
+  // starting set up
+  if (!SetUpOk() || !AddGroups({"miau", "nyah"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  std::vector<InstanceInfo> nyah_group_instance_id_name_pairs{
+      {7, "my_favorite_phone"}, {11, "tv_instance"}};
+  auto nyah_group = db.FindGroup({kHomeField, Workspace() + "/" + "nyah"});
+  if (!nyah_group.ok()) {
+    GTEST_SKIP() << "nyah group was not found";
+  }
+  if (!AddInstances("nyah", nyah_group_instance_id_name_pairs)) {
+    GTEST_SKIP() << Error().msg;
+  }
+  // end of set up
+
+  auto result_nyah = db.FindInstances({kGroupNameField, "nyah"});
+  auto result_invalid = db.FindInstance({kGroupNameField, "name_never_seen"});
+
+  ASSERT_TRUE(result_nyah.ok());
+  std::set<std::string> nyah_instance_names;
+  for (const auto& instance : *result_nyah) {
+    nyah_instance_names.insert(instance.Get().PerInstanceName());
+  }
+  std::set<std::string> expected{"my_favorite_phone", "tv_instance"};
+  ASSERT_EQ(nyah_instance_names, expected);
+  ASSERT_FALSE(result_invalid.ok());
+}
+
+TEST_F(CvdInstanceDatabaseTest, FindGroupByPerInstanceName) {
+  // starting set up
+  if (!SetUpOk() || !AddGroups({"miau", "nyah"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  std::vector<InstanceInfo> miau_group_instance_id_name_pairs{
+      {1, "8"}, {10, "tv_instance"}};
+  std::vector<InstanceInfo> nyah_group_instance_id_name_pairs{
+      {7, "my_favorite_phone"}, {11, "tv_instance"}};
+  auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+  auto nyah_group = db.FindGroup({kHomeField, Workspace() + "/" + "nyah"});
+  if (!miau_group.ok() || !nyah_group.ok()) {
+    GTEST_SKIP() << "miau or nyah "
+                 << " group was not found";
+  }
+  if (!AddInstances("miau", miau_group_instance_id_name_pairs) ||
+      !AddInstances("nyah", nyah_group_instance_id_name_pairs)) {
+    GTEST_SKIP() << Error().msg;
+  }
+  // end of set up
+
+  auto result_miau = db.FindGroups({kInstanceNameField, "8"});
+  auto result_both = db.FindGroups({kInstanceNameField, "tv_instance"});
+  auto result_nyah = db.FindGroups({kInstanceNameField, "my_favorite_phone"});
+  auto result_invalid = db.FindGroups({kInstanceNameField, "name_never_seen"});
+
+  ASSERT_TRUE(result_miau.ok());
+  ASSERT_TRUE(result_both.ok());
+  ASSERT_TRUE(result_nyah.ok());
+  ASSERT_TRUE(result_invalid.ok());
+  ASSERT_EQ(result_miau->size(), 1);
+  ASSERT_EQ(result_both->size(), 2);
+  ASSERT_EQ(result_nyah->size(), 1);
+  ASSERT_TRUE(result_invalid->empty())
+      << "result_invalid should be empty but with size: "
+      << result_invalid->size();
+}
+
+TEST_F(CvdInstanceDatabaseTest, AddInstancesTogether) {
+  // starting set up
+  if (!SetUpOk() || !AddGroups({"miau"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  std::vector<InstanceDatabase::InstanceInfo> miau_group_instance_id_name_pairs{
+      {1, "8"}, {10, "tv_instance"}};
+  auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+  if (!miau_group.ok()) {
+    GTEST_SKIP() << "miau group was not found";
+  }
+
+  auto add_result = db.AddInstances("miau", miau_group_instance_id_name_pairs);
+  ASSERT_TRUE(add_result.ok()) << add_result.error().Trace();
+
+  auto result_8 = db.FindInstance({kInstanceNameField, "8"});
+  auto result_tv = db.FindInstance({kInstanceNameField, "tv_instance"});
+
+  ASSERT_TRUE(result_8.ok()) << result_8.error().Trace();
+  ASSERT_TRUE(result_tv.ok()) << result_tv.error().Trace();
+}
+
+TEST_F(CvdInstanceDatabaseJsonTest, DumpLoadDumpCompare) {
+  // starting set up
+  if (!SetUpOk() || !AddGroups({"miau"})) {
+    GTEST_SKIP() << Error().msg;
+  }
+  auto& db = GetDb();
+  std::vector<InstanceDatabase::InstanceInfo> miau_group_instance_id_name_pairs{
+      {1, "8"}, {10, "tv_instance"}};
+  auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+  if (!miau_group.ok()) {
+    GTEST_SKIP() << "miau group was not found";
+  }
+  auto add_result = db.AddInstances("miau", miau_group_instance_id_name_pairs);
+  if (!add_result.ok()) {
+    GTEST_SKIP() << "Adding instances are not being tested in this test case.";
+  }
+
+  /*
+   * Dumping to json, clearing up the DB, loading from the json,
+   *
+   */
+  auto serialized_db = db.Serialize();
+  if (!db.RemoveInstanceGroup("miau")) {
+    // not testing RemoveInstanceGroup
+    GTEST_SKIP() << "miau had to be added.";
+  }
+  auto json_parsing = ParseJson(serialized_db.toStyledString());
+  ASSERT_TRUE(json_parsing.ok()) << serialized_db << std::endl
+                                 << " is not a valid json.";
+  auto load_result = db.LoadFromJson(serialized_db);
+  ASSERT_TRUE(load_result.ok()) << load_result.error().Trace();
+  {
+    // re-look up the group and the instances
+    auto miau_group = db.FindGroup({kHomeField, Workspace() + "/" + "miau"});
+    ASSERT_TRUE(miau_group.ok()) << miau_group.error().Trace();
+    auto result_8 = db.FindInstance({kInstanceNameField, "8"});
+    auto result_tv = db.FindInstance({kInstanceNameField, "tv_instance"});
+
+    ASSERT_TRUE(result_8.ok()) << result_8.error().Trace();
+    ASSERT_TRUE(result_tv.ok()) << result_tv.error().Trace();
+  }
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/instance_record_test.cpp b/host/commands/cvd/unittests/selector/instance_record_test.cpp
new file mode 100644
index 0000000..d9485a8
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/instance_record_test.cpp
@@ -0,0 +1,90 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/instance_group_record.h"
+#include "host/commands/cvd/selector/instance_record.h"
+
+namespace cuttlefish {
+namespace selector {
+
+/**
+ * Note that invalid inputs must be tested at the InstanceDatabase level
+ */
+TEST(CvdInstanceRecordUnitTest, Fields) {
+  LocalInstanceGroup parent_group(
+      {.group_name = "super",
+       .home_dir = "/home/user",
+       .host_artifacts_path = "/home/user/download/bin",
+       .product_out_path = "/home/user/download/bin"});
+  if (!parent_group.AddInstance(3, "phone").ok()) {
+    /*
+     * Here's why we skip the test rather than see it as a failure.
+     *
+     * 1. The test is specifically designed for operations in
+     *    LocalInstanceRecord.
+     * 2. Adding instance to a group is tested in another test suites designed
+     *    for LocalInstanceGroup. It's a failure there but not here.
+     *
+     */
+    GTEST_SKIP() << "Failed to add instance group. Set up failed.";
+  }
+  auto& instances = parent_group.Instances();
+  auto& instance = *instances.cbegin();
+
+  ASSERT_EQ(instance->InstanceId(), 3);
+  ASSERT_EQ(instance->InternalName(), "3");
+  ASSERT_EQ(instance->PerInstanceName(), "phone");
+  ASSERT_EQ(instance->InternalDeviceName(), "cvd-3");
+  ASSERT_EQ(instance->DeviceName(), "super-phone");
+  ASSERT_EQ(std::addressof(instance->ParentGroup()),
+            std::addressof(parent_group));
+}
+
+/**
+ * Note that invalid inputs must be tested at the InstanceDatabase level
+ */
+TEST(CvdInstanceRecordUnitTest, Copy) {
+  LocalInstanceGroup parent_group(
+      {.group_name = "super",
+       .home_dir = "/home/user",
+       .host_artifacts_path = "/home/user/download/bin",
+       .product_out_path = "/home/user/download/bin"});
+  if (!parent_group.AddInstance(3, "phone").ok()) {
+    /*
+     * Here's why we skip the test rather than see it as a failure.
+     *
+     * 1. The test is specifically designed for operations in
+     *    LocalInstanceRecord.
+     * 2. Adding instance to a group is tested in another test suites designed
+     *    for LocalInstanceGroup. It's a failure there but not here.
+     *
+     */
+    GTEST_SKIP() << "Failed to add instance group. Set up failed.";
+  }
+  auto& instances = parent_group.Instances();
+  auto& instance = *instances.cbegin();
+  auto copy = instance->GetCopy();
+
+  ASSERT_EQ(instance->InstanceId(), copy.InstanceId());
+  ASSERT_EQ(instance->InternalName(), copy.InternalName());
+  ASSERT_EQ(instance->PerInstanceName(), copy.PerInstanceName());
+  ASSERT_EQ(instance->InternalDeviceName(), copy.InternalDeviceName());
+  ASSERT_EQ(instance->DeviceName(), copy.DeviceName());
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_ids_helper.cpp b/host/commands/cvd/unittests/selector/parser_ids_helper.cpp
new file mode 100644
index 0000000..4c14eef
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_ids_helper.cpp
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/selector/parser_ids_helper.h"
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <android-base/strings.h>
+
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace selector {
+
+InstanceIdTest::InstanceIdTest() {
+  auto cuttlefish_instance = GetParam().cuttlefish_instance;
+  if (cuttlefish_instance) {
+    envs_[kCuttlefishInstanceEnvVarName] = cuttlefish_instance.value();
+  }
+  uid_ = getuid();
+  cmd_args_ = android::base::Tokenize(GetParam().cmd_args, " ");
+  selector_args_ = android::base::Tokenize(GetParam().selector_args, " ");
+  expected_ids_ = GetParam().expected_ids;
+  expected_result_ = GetParam().expected_result;
+  requested_num_instances_ = GetParam().requested_num_instances;
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_ids_helper.h b/host/commands/cvd/unittests/selector/parser_ids_helper.h
new file mode 100644
index 0000000..193bff7
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_ids_helper.h
@@ -0,0 +1,53 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+struct InstanceIdTestInput {
+  std::string cmd_args;
+  std::string selector_args;
+  std::optional<std::string> cuttlefish_instance;
+  std::optional<std::vector<unsigned>> expected_ids;
+  unsigned requested_num_instances;
+  bool expected_result;
+};
+
+class InstanceIdTest : public testing::TestWithParam<InstanceIdTestInput> {
+ protected:
+  InstanceIdTest();
+
+  bool expected_result_;
+  unsigned requested_num_instances_;
+  std::optional<std::vector<unsigned>> expected_ids_;
+  uid_t uid_;
+  cvd_common::Args cmd_args_;
+  cvd_common::Args selector_args_;
+  cvd_common::Envs envs_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_ids_test.cpp b/host/commands/cvd/unittests/selector/parser_ids_test.cpp
new file mode 100644
index 0000000..90db87c
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_ids_test.cpp
@@ -0,0 +1,122 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/strings.h>
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/start_selector_parser.h"
+#include "host/commands/cvd/unittests/selector/parser_ids_helper.h"
+
+namespace cuttlefish {
+namespace selector {
+
+TEST_P(InstanceIdTest, InstanceIdCalculation) {
+  auto parser = StartSelectorParser::ConductSelectFlagsParser(
+      uid_, selector_args_, cmd_args_, envs_);
+
+  ASSERT_EQ(parser.ok(), expected_result_);
+  if (!expected_result_) {
+    return;
+  }
+  ASSERT_EQ(parser->InstanceIds(), expected_ids_);
+  ASSERT_EQ(parser->RequestedNumInstances(), requested_num_instances_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CvdParser, InstanceIdTest,
+    testing::Values(
+        InstanceIdTestInput{.cuttlefish_instance = std::nullopt,
+                            .expected_ids = std::nullopt,
+                            .requested_num_instances = 1,
+                            .expected_result = true},
+        InstanceIdTestInput{.cuttlefish_instance = "8",
+                            .expected_ids = std::vector<unsigned>{8},
+                            .requested_num_instances = 1,
+                            .expected_result = true},
+        InstanceIdTestInput{.cmd_args = "--num_instances=2",
+                            .expected_ids = std::nullopt,
+                            .requested_num_instances = 2,
+                            .expected_result = true},
+        InstanceIdTestInput{.cmd_args = "--num_instances=2",
+                            .cuttlefish_instance = "8",
+                            .expected_ids = std::vector<unsigned>{8, 9},
+                            .requested_num_instances = 2,
+                            .expected_result = true},
+        InstanceIdTestInput{
+            .cmd_args = "--base_instance_num=10 --num_instances=2",
+            .cuttlefish_instance = "8",
+            .expected_ids = std::vector<unsigned>{10, 11},
+            .requested_num_instances = 2,
+            .expected_result = true},
+        InstanceIdTestInput{.cmd_args = "--instance_nums 2",
+                            .cuttlefish_instance = std::nullopt,
+                            .expected_ids = std::vector<unsigned>{2},
+                            .requested_num_instances = 1,
+                            .expected_result = true},
+        InstanceIdTestInput{.cmd_args = "--instance_nums 2,5,6",
+                            .cuttlefish_instance = std::nullopt,
+                            .expected_ids = std::vector<unsigned>{2, 5, 6},
+                            .requested_num_instances = 3,
+                            .expected_result = true},
+        InstanceIdTestInput{
+            .cmd_args = "--instance_nums 2,5,6 --num_instances=3",
+            .cuttlefish_instance = std::nullopt,
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = true},
+        InstanceIdTestInput{
+            .cmd_args = "--instance_nums 2,5,6 --num_instances=3",
+            .selector_args = "--instance_name=c-1,c-3,c-5",
+            .cuttlefish_instance = std::nullopt,
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = true},
+        InstanceIdTestInput{.selector_args = "--instance_name=c-1,c-3,c-5",
+                            .cuttlefish_instance = std::nullopt,
+                            .expected_ids = std::nullopt,
+                            .requested_num_instances = 3,
+                            .expected_result = true},
+        // CUTTLEFISH_INSTANCE should be ignored
+        InstanceIdTestInput{
+            .cmd_args = "--instance_nums 2,5,6 --num_instances=3",
+            .cuttlefish_instance = "8",
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = true},
+        // instance_nums and num_instances mismatch
+        InstanceIdTestInput{
+            .cmd_args = "--instance_nums 2,5,6 --num_instances=7",
+            .cuttlefish_instance = std::nullopt,
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = false},
+        // --instance_name requested 2 instances while instance_nums 3.
+        InstanceIdTestInput{
+            .cmd_args = "--num_instances=3 --instance_nums 2,5,6",
+            .selector_args = "--instance_name=c-1,c-3",
+            .cuttlefish_instance = std::nullopt,
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = false},
+        // --base_instance_num is not allowed with --instance_nums
+        InstanceIdTestInput{
+            .cmd_args = "--instance_nums 2,5,6 --base_instance_num=7",
+            .cuttlefish_instance = std::nullopt,
+            .expected_ids = std::vector<unsigned>{2, 5, 6},
+            .requested_num_instances = 3,
+            .expected_result = false}));
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_names_helper.cpp b/host/commands/cvd/unittests/selector/parser_names_helper.cpp
new file mode 100644
index 0000000..812ef25
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_names_helper.cpp
@@ -0,0 +1,39 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/selector/parser_names_helper.h"
+
+#include <android-base/strings.h>
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/selector_option_parser_utils.h"
+
+namespace cuttlefish {
+namespace selector {
+
+ValidNamesTest::ValidNamesTest() { Init(); }
+
+void ValidNamesTest::Init() {
+  auto [input, expected_output] = GetParam();
+  selector_args_ = android::base::Tokenize(input, " ");
+  expected_output_ = std::move(expected_output);
+}
+
+InvalidNamesTest::InvalidNamesTest() {
+  selector_args_ = android::base::Tokenize(GetParam(), " ");
+}
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_names_helper.h b/host/commands/cvd/unittests/selector/parser_names_helper.h
new file mode 100644
index 0000000..ee06838
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_names_helper.h
@@ -0,0 +1,60 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/selector/start_selector_parser.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace selector {
+
+struct ExpectedOutput {
+  std::optional<std::vector<std::string>> names;
+  std::optional<std::string> group_name;
+  std::optional<std::vector<std::string>> per_instance_names;
+};
+
+struct InputOutput {
+  std::string input;
+  ExpectedOutput expected;
+};
+
+class ValidNamesTest : public testing::TestWithParam<InputOutput> {
+ protected:
+  ValidNamesTest();
+  void Init();
+
+  cvd_common::Args selector_args_;
+  ExpectedOutput expected_output_;
+};
+
+class InvalidNamesTest : public testing::TestWithParam<std::string> {
+ protected:
+  InvalidNamesTest();
+
+  cvd_common::Args selector_args_;
+};
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/selector/parser_names_test.cpp b/host/commands/cvd/unittests/selector/parser_names_test.cpp
new file mode 100644
index 0000000..b06c125
--- /dev/null
+++ b/host/commands/cvd/unittests/selector/parser_names_test.cpp
@@ -0,0 +1,91 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "host/commands/cvd/unittests/selector/parser_names_helper.h"
+
+namespace cuttlefish {
+namespace selector {
+
+TEST_P(ValidNamesTest, ValidInputs) {
+  const uid_t uid = getuid();
+  auto parser = StartSelectorParser::ConductSelectFlagsParser(
+      uid, selector_args_, cvd_common::Args{}, cvd_common::Envs{});
+
+  ASSERT_TRUE(parser.ok());
+}
+
+/**
+ * Note that invalid inputs must be tested at the InstanceDatabase level
+ */
+TEST_P(ValidNamesTest, FieldsNoSubstring) {
+  const uid_t uid = getuid();
+
+  auto parser = StartSelectorParser::ConductSelectFlagsParser(
+      uid, selector_args_, cvd_common::Args{}, cvd_common::Envs{});
+
+  ASSERT_TRUE(parser.ok());
+  ASSERT_EQ(parser->GroupName(), expected_output_.group_name);
+  ASSERT_EQ(parser->PerInstanceNames(), expected_output_.per_instance_names);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CvdParser, ValidNamesTest,
+    testing::Values(
+        InputOutput{.input = "--group_name=cf",
+                    .expected = ExpectedOutput{.group_name = "cf"}},
+        InputOutput{.input = "--instance_name=cvd,cf",
+                    .expected = ExpectedOutput{.per_instance_names =
+                                                   std::vector<std::string>{
+                                                       "cvd", "cf"}}},
+        InputOutput{.input = "--instance_name=09-1,tv-2 --group_name cf",
+                    .expected = ExpectedOutput{.group_name = "cf",
+                                               .per_instance_names =
+                                                   std::vector<std::string>{
+                                                       "09-1", "tv-2"}}},
+        InputOutput{
+            .input = "--group_name=cf --instance_name 09",
+            .expected = ExpectedOutput{.group_name = "cf",
+                                       .per_instance_names =
+                                           std::vector<std::string>{"09"}}},
+        InputOutput{.input = "--group_name=my_cool --instance_name=phone-1,tv",
+                    .expected = ExpectedOutput{.group_name = "my_cool",
+                                               .per_instance_names =
+                                                   std::vector<std::string>{
+                                                       "phone-1", "tv"}}},
+        InputOutput{
+            .input = "--instance_name=my-cool",
+            .expected = ExpectedOutput{
+                .per_instance_names = std::vector<std::string>{"my-cool"}}}));
+
+TEST_P(InvalidNamesTest, InvalidInputs) {
+  const uid_t uid = getuid();
+
+  auto parser = StartSelectorParser::ConductSelectFlagsParser(
+      uid, selector_args_, cvd_common::Args{}, cvd_common::Envs{});
+
+  ASSERT_FALSE(parser.ok());
+}
+
+INSTANTIATE_TEST_SUITE_P(CvdParser, InvalidNamesTest,
+                         testing::Values("--group_name", "--group_name=?34",
+                                         "--group_name=ab-cd",
+                                         "--group_name=3a", "--instance_name",
+                                         "--instance_name=*7a"));
+
+}  // namespace selector
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/Android.bp b/host/commands/cvd/unittests/server/Android.bp
new file mode 100644
index 0000000..7ecc113
--- /dev/null
+++ b/host/commands/cvd/unittests/server/Android.bp
@@ -0,0 +1,97 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "cvd_test_cmd_utils",
+    srcs: [
+        "cmd_runner.cpp",
+        "utils.cpp",
+    ],
+    defaults: ["cvd_lib_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_server_test",
+    srcs: [
+        "autogen_ids_test.cpp",
+        "basic_test.cpp",
+        "clear_test.cpp",
+        "help_test.cpp",
+        "instance_ids_test.cpp",
+    ],
+    static_libs: [
+        "cvd_test_cmd_utils",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_acloud_local_test",
+    srcs: [
+        "local_instance_helper.cpp",
+        "local_instance_test.cpp",
+    ],
+    static_libs: [
+        "cvd_test_cmd_utils",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_flag_collector_test",
+    srcs: [
+        "collect_flags_test.cpp",
+    ],
+    static_libs: [
+        "cvd_test_cmd_utils",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_frontline_parser_test",
+    srcs: [
+        "frontline_parser_test.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_utils_test",
+    srcs: [
+        "common_utils_helper.cpp",
+        "common_utils_test.cpp",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    defaults: ["cvd_and_fetch_cvd_defaults"],
+}
diff --git a/host/commands/cvd/unittests/server/autogen_ids_test.cpp b/host/commands/cvd/unittests/server/autogen_ids_test.cpp
new file mode 100644
index 0000000..18e20fb
--- /dev/null
+++ b/host/commands/cvd/unittests/server/autogen_ids_test.cpp
@@ -0,0 +1,74 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+#include "host/commands/cvd/unittests/server/utils.h"
+
+namespace cuttlefish {
+
+TEST(CvdAutoGenId, CvdTwoFollowedByFive) {
+  cvd_common::Envs envs;
+  envs["HOME"] = StringFromEnv("HOME", "");
+  CmdRunner::Run("cvd reset -y", envs);
+
+  cvd_common::Args start_two_instances_args{
+      "cvd",
+      "--disable_default_group",
+      "start",
+      "--report_anonymous_usage_stats=yes",
+      "--daemon",
+      "--norestart_subprocesses",
+      "--num_instances=2"};
+  cvd_common::Args start_three_instances_args{
+      "cvd",
+      "--disable_default_group",
+      "start",
+      "--report_anonymous_usage_stats=yes",
+      "--daemon",
+      "--norestart_subprocesses",
+      "--num_instances=3"};
+
+  auto cmd_start_two = CmdRunner::Run(start_two_instances_args, envs);
+  ASSERT_TRUE(cmd_start_two.Success()) << cmd_start_two.Stderr();
+  auto cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_EQ(NumberOfOccurrences(cmd_fleet.Stdout(), "instance_name"), 2)
+      << cmd_fleet.Stdout();
+
+  auto cmd_start_three = CmdRunner::Run(start_three_instances_args, envs);
+  ASSERT_TRUE(cmd_start_three.Success()) << cmd_start_three.Stderr();
+  cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_EQ(NumberOfOccurrences(cmd_fleet.Stdout(), "instance_name"), 5)
+      << cmd_fleet.Stdout();
+
+  auto cmd_stop = CmdRunner::Run("cvd reset -y", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_FALSE(Contains(cmd_fleet.Stdout(), "instance_name"));
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/basic_test.cpp b/host/commands/cvd/unittests/server/basic_test.cpp
new file mode 100644
index 0000000..ad83ce4
--- /dev/null
+++ b/host/commands/cvd/unittests/server/basic_test.cpp
@@ -0,0 +1,53 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+
+namespace cuttlefish {
+
+TEST(CvdBasic, CvdDefaultStart) {
+  cvd_common::Envs envs;
+  const auto home_dir = StringFromEnv("HOME", "");
+  envs["HOME"] = home_dir;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  cvd_common::Args start_args{"cvd", "start",
+                              "--report_anonymous_usage_stats=yes", "--daemon"};
+
+  auto cmd_start = CmdRunner::Run(start_args, envs);
+  ASSERT_TRUE(cmd_start.Success()) << cmd_start.Stderr();
+
+  auto cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_TRUE(Contains(cmd_fleet.Stdout(), home_dir));
+
+  auto cmd_stop = CmdRunner::Run("cvd stop", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_FALSE(Contains(cmd_fleet.Stdout(), home_dir));
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/clear_test.cpp b/host/commands/cvd/unittests/server/clear_test.cpp
new file mode 100644
index 0000000..fe7142e
--- /dev/null
+++ b/host/commands/cvd/unittests/server/clear_test.cpp
@@ -0,0 +1,80 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+#include "host/commands/cvd/unittests/server/utils.h"
+
+namespace cuttlefish {
+
+TEST(CvdClear, ClearAfterThreeStarts) {
+  cvd_common::Envs envs;
+  envs["HOME"] = StringFromEnv("HOME", "");
+  CmdRunner::Run("cvd reset -y", envs);
+
+  cvd_common::Args start_two_instances_args{
+      "cvd",
+      "--disable_default_group",
+      "start",
+      "--report_anonymous_usage_stats=yes",
+      "--daemon",
+      "--norestart_subprocesses",
+      "--num_instances=2"};
+  cvd_common::Args start_three_instances_args{
+      "cvd",
+      "--disable_default_group",
+      "start",
+      "--report_anonymous_usage_stats=yes",
+      "--daemon",
+      "--norestart_subprocesses",
+      "--num_instances=3"};
+  cvd_common::Args start_one_instances_args{
+      "cvd",
+      "--disable_default_group",
+      "start",
+      "--report_anonymous_usage_stats=yes",
+      "--daemon",
+      "--norestart_subprocesses",
+      "--num_instances=1"};
+
+  auto cmd_start_two = CmdRunner::Run(start_two_instances_args, envs);
+  ASSERT_TRUE(cmd_start_two.Success()) << cmd_start_two.Stderr();
+  auto cmd_start_three = CmdRunner::Run(start_three_instances_args, envs);
+  ASSERT_TRUE(cmd_start_three.Success()) << cmd_start_three.Stderr();
+  auto cmd_start_one = CmdRunner::Run(start_one_instances_args, envs);
+  ASSERT_TRUE(cmd_start_one.Success()) << cmd_start_one.Stderr();
+
+  auto cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_EQ(NumberOfOccurrences(cmd_fleet.Stdout(), "instance_name"), 2 + 3 + 1)
+      << cmd_fleet.Stdout();
+
+  auto cmd_stop = CmdRunner::Run("cvd clear", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_FALSE(Contains(cmd_fleet.Stdout(), "instance_name"));
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/cmd_runner.cpp b/host/commands/cvd/unittests/server/cmd_runner.cpp
new file mode 100644
index 0000000..07e4e87
--- /dev/null
+++ b/host/commands/cvd/unittests/server/cmd_runner.cpp
@@ -0,0 +1,62 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+
+#include <android-base/strings.h>
+
+namespace cuttlefish {
+
+CmdResult::CmdResult(const std::string& stdout_str,
+                     const std::string& stderr_str, const int ret_code)
+    : stdout_{stdout_str}, stderr_{stderr_str}, code_{ret_code} {}
+
+CmdResult CmdRunner::Run(const cvd_common::Args& args,
+                         const cvd_common::Envs& envs) {
+  if (args.empty() || args.front().empty()) {
+    return CmdResult("", "Empty or invalid command", -1);
+  }
+  const auto& cmd = args.front();
+  cvd_common::Args cmd_args{args.begin() + 1, args.end()};
+  CmdRunner cmd_runner(Command(cmd), cmd_args, envs);
+  return cmd_runner.Run();
+}
+
+CmdResult CmdRunner::Run(const std::string& args,
+                         const cvd_common::Envs& envs) {
+  return CmdRunner::Run(android::base::Tokenize(args, " "), envs);
+}
+
+CmdRunner::CmdRunner(Command&& cmd, const cvd_common::Args& args,
+                     const cvd_common::Envs& envs)
+    : cmd_(std::move(cmd)) {
+  for (const auto& arg : args) {
+    cmd_.AddParameter(arg);
+  }
+  for (const auto& [key, value] : envs) {
+    cmd_.AddEnvironmentVariable(key, value);
+  }
+}
+
+CmdResult CmdRunner::Run() {
+  std::string stdout_str;
+  std::string stderr_str;
+  auto ret_code =
+      RunWithManagedStdio(std::move(cmd_), nullptr, std::addressof(stdout_str),
+                          std::addressof(stderr_str));
+  return CmdResult(stdout_str, stderr_str, ret_code);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/cmd_runner.h b/host/commands/cvd/unittests/server/cmd_runner.h
new file mode 100644
index 0000000..13eb943
--- /dev/null
+++ b/host/commands/cvd/unittests/server/cmd_runner.h
@@ -0,0 +1,72 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <vector>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+
+class CmdResult {
+ public:
+  CmdResult(const std::string& stdout, const std::string& stderr,
+            const int ret_code);
+  const std::string& Stdout() const { return stdout_; }
+  const std::string& Stderr() const { return stderr_; }
+  int Code() const { return code_; }
+  bool Success() const { return code_ == 0; }
+
+ private:
+  std::string stdout_;
+  std::string stderr_;
+  int code_;
+};
+
+class CmdRunner {
+ public:
+  template <
+      typename... CmdArgs,
+      typename std::enable_if<(sizeof...(CmdArgs) >= 1), bool>::type = true>
+  static CmdResult Run(const std::string& exec, const cvd_common::Envs& envs,
+                       CmdArgs&&... cmd_args) {
+    cvd_common::Args args;
+    args.reserve(sizeof...(CmdArgs));
+    (args.emplace_back(std::forward<CmdArgs>(cmd_args)), ...);
+    CmdRunner cmd_runner(Command(exec), args, envs);
+    return cmd_runner.Run();
+  }
+  static CmdResult Run(const cvd_common::Args& args,
+                       const cvd_common::Envs& envs);
+  static CmdResult Run(const std::string& args, const cvd_common::Envs& envs);
+
+ private:
+  CmdRunner(Command&& cmd, const cvd_common::Args& args,
+            const cvd_common::Envs& envs);
+
+  CmdResult Run();
+
+  Command cmd_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/collect_flags_test.cpp b/host/commands/cvd/unittests/server/collect_flags_test.cpp
new file mode 100644
index 0000000..7d283a1
--- /dev/null
+++ b/host/commands/cvd/unittests/server/collect_flags_test.cpp
@@ -0,0 +1,62 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include <android-base/file.h>
+#include <android-base/logging.h>
+
+#include "host/commands/cvd/server_command/flags_collector.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+
+namespace cuttlefish {
+
+TEST(CvdHelpFlagCollect, LauncCvd) {
+  cvd_common::Envs envs;
+  const auto home_dir = StringFromEnv("HOME", "");
+  envs["HOME"] = home_dir;
+  const auto android_host_out = StringFromEnv(
+      "ANDROID_HOST_OUT",
+      android::base::Dirname(android::base::GetExecutableDirectory()));
+  envs["ANDROID_HOST_OUT"] = android_host_out;
+  const auto launch_cvd_path = android_host_out + "/bin/launch_cvd";
+  if (!FileExists(launch_cvd_path)) {
+    GTEST_SKIP() << "Can't find " << launch_cvd_path << " for testing.";
+  }
+  CmdRunner::Run("cvd kill-server", envs);
+  cvd_common::Args helpxml_args{launch_cvd_path, "--helpxml"};
+
+  auto cmd_help_xml = CmdRunner::Run(helpxml_args, envs);
+  auto flags_opt = CollectFlagsFromHelpxml(cmd_help_xml.Stdout());
+
+  ASSERT_FALSE(cmd_help_xml.Stdout().empty()) << "output shouldn't be empty.";
+  ASSERT_TRUE(flags_opt);
+  auto& flags = *flags_opt;
+  auto daemon_flag_itr = std::find_if(
+      flags.cbegin(), flags.cend(),
+      [](const FlagInfoPtr& ptr) { return (ptr && ptr->Name() == "daemon"); });
+  auto bad_flag_itr = std::find_if(
+      flags.cbegin(), flags.cend(),
+      [](const FlagInfoPtr& ptr) { return (ptr && ptr->Name() == "@bad@"); });
+  ASSERT_NE(daemon_flag_itr, flags.cend());
+  ASSERT_EQ(bad_flag_itr, flags.cend());
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/common_utils_helper.cpp b/host/commands/cvd/unittests/server/common_utils_helper.cpp
new file mode 100644
index 0000000..0b29134
--- /dev/null
+++ b/host/commands/cvd/unittests/server/common_utils_helper.cpp
@@ -0,0 +1,37 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/server/common_utils_helper.h"
+
+namespace cuttlefish {
+
+EmulateAbsolutePathBase::EmulateAbsolutePathBase() {
+  input_path_ = GetParam().path_to_convert_;
+  expected_path_ = GetParam().expected_;
+}
+
+EmulateAbsolutePathWithPwd::EmulateAbsolutePathWithPwd() {
+  input_path_ = GetParam().path_to_convert_;
+  expected_path_ = GetParam().expected_;
+  current_dir_ = GetParam().working_dir_;
+}
+
+EmulateAbsolutePathWithHome::EmulateAbsolutePathWithHome() {
+  input_path_ = GetParam().path_to_convert_;
+  expected_path_ = GetParam().expected_;
+  home_dir_ = GetParam().home_dir_;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/common_utils_helper.h b/host/commands/cvd/unittests/server/common_utils_helper.h
new file mode 100644
index 0000000..54ee479
--- /dev/null
+++ b/host/commands/cvd/unittests/server/common_utils_helper.h
@@ -0,0 +1,60 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <optional>
+#include <string>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/common_utils.h"
+
+namespace cuttlefish {
+
+struct InputOutput {
+  std::string path_to_convert_;
+  std::string working_dir_;
+  std::string home_dir_;
+  std::string expected_;
+};
+
+class EmulateAbsolutePathBase : public testing::TestWithParam<InputOutput> {
+ protected:
+  EmulateAbsolutePathBase();
+
+  std::string input_path_;
+  std::string expected_path_;
+};
+
+class EmulateAbsolutePathWithPwd : public testing::TestWithParam<InputOutput> {
+ protected:
+  EmulateAbsolutePathWithPwd();
+
+  std::string input_path_;
+  std::string current_dir_;
+  std::string expected_path_;
+};
+
+class EmulateAbsolutePathWithHome : public EmulateAbsolutePathBase {
+ protected:
+  EmulateAbsolutePathWithHome();
+
+  std::string input_path_;
+  std::string home_dir_;
+  std::string expected_path_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/common_utils_test.cpp b/host/commands/cvd/unittests/server/common_utils_test.cpp
new file mode 100644
index 0000000..3b03dab
--- /dev/null
+++ b/host/commands/cvd/unittests/server/common_utils_test.cpp
@@ -0,0 +1,103 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/server/common_utils_helper.h"
+
+namespace cuttlefish {
+
+TEST_P(EmulateAbsolutePathBase, NoHomeNoPwd) {
+  const bool follow_symlink = false;
+  auto emulated_absolute_path =
+      EmulateAbsolutePath({.current_working_dir = std::nullopt,
+                           .home_dir = std::nullopt,
+                           .path_to_convert = input_path_,
+                           .follow_symlink = follow_symlink});
+
+  ASSERT_TRUE(emulated_absolute_path.ok())
+      << emulated_absolute_path.error().Trace();
+  ASSERT_EQ(*emulated_absolute_path, expected_path_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CommonUtilsTest, EmulateAbsolutePathBase,
+    testing::Values(InputOutput{.path_to_convert_ = "/", .expected_ = "/"},
+                    InputOutput{.path_to_convert_ = "", .expected_ = ""},
+                    InputOutput{.path_to_convert_ = "/a/b/c/",
+                                .expected_ = "/a/b/c"},
+                    InputOutput{.path_to_convert_ = "/a", .expected_ = "/a"}));
+
+TEST_P(EmulateAbsolutePathWithPwd, NoHomeYesPwd) {
+  const bool follow_symlink = false;
+  auto emulated_absolute_path =
+      EmulateAbsolutePath({.current_working_dir = current_dir_,
+                           .home_dir = "/a/b/c",
+                           .path_to_convert = input_path_,
+                           .follow_symlink = follow_symlink});
+
+  ASSERT_TRUE(emulated_absolute_path.ok())
+      << emulated_absolute_path.error().Trace();
+  ASSERT_EQ(*emulated_absolute_path, expected_path_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CommonUtilsTest, EmulateAbsolutePathWithPwd,
+    testing::Values(InputOutput{.working_dir_ = "/x/y/z",
+                                .path_to_convert_ = "",
+                                .expected_ = ""},
+                    InputOutput{.working_dir_ = "/x/y/z",
+                                .path_to_convert_ = "a",
+                                .expected_ = "/x/y/z/a"},
+                    InputOutput{.working_dir_ = "/x/y/z",
+                                .path_to_convert_ = ".",
+                                .expected_ = "/x/y/z"},
+                    InputOutput{.working_dir_ = "/x/y/z",
+                                .path_to_convert_ = "..",
+                                .expected_ = "/x/y"},
+                    InputOutput{.working_dir_ = "/x/y/z",
+                                .path_to_convert_ = "./k/../../t/./q",
+                                .expected_ = "/x/y/t/q"}));
+
+TEST_P(EmulateAbsolutePathWithHome, YesHomeNoPwd) {
+  const bool follow_symlink = false;
+  auto emulated_absolute_path =
+      EmulateAbsolutePath({.current_working_dir = std::nullopt,
+                           .home_dir = home_dir_,
+                           .path_to_convert = input_path_,
+                           .follow_symlink = follow_symlink});
+
+  ASSERT_TRUE(emulated_absolute_path.ok())
+      << emulated_absolute_path.error().Trace();
+  ASSERT_EQ(*emulated_absolute_path, expected_path_);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+    CommonUtilsTest, EmulateAbsolutePathWithHome,
+    testing::Values(InputOutput{.home_dir_ = "/x/y/z",
+                                .path_to_convert_ = "~",
+                                .expected_ = "/x/y/z"},
+                    InputOutput{.home_dir_ = "/x/y/z",
+                                .path_to_convert_ = "~/a",
+                                .expected_ = "/x/y/z/a"},
+                    InputOutput{.home_dir_ = "/x/y/z",
+                                .path_to_convert_ = "~/.",
+                                .expected_ = "/x/y/z"},
+                    InputOutput{.home_dir_ = "/x/y/z",
+                                .path_to_convert_ = "~/..",
+                                .expected_ = "/x/y"},
+                    InputOutput{.home_dir_ = "/x/y/z",
+                                .path_to_convert_ = "~/k/../../t/./q",
+                                .expected_ = "/x/y/t/q"}));
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/frontline_parser_test.cpp b/host/commands/cvd/unittests/server/frontline_parser_test.cpp
new file mode 100644
index 0000000..4509e17
--- /dev/null
+++ b/host/commands/cvd/unittests/server/frontline_parser_test.cpp
@@ -0,0 +1,69 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+#include <optional>
+#include <string>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/frontline_parser.h"
+
+namespace std {
+
+template <typename T>
+static std::ostream& operator<<(std::ostream& out, const std::vector<T>& v) {
+  if (v.empty()) {
+    out << "{}";
+    return out;
+  }
+  if (v.size() == 1) {
+    out << "{" << v.front() << "}";
+    return out;
+  }
+  out << "{";
+  for (size_t i = 0; i != v.size() - 1; i++) {
+    out << v.at(i) << ", ";
+  }
+  out << v.back() << "}";
+  return out;
+}
+
+}  // namespace std
+
+namespace cuttlefish {
+
+TEST(FrontlineParserTest, CvdOnly) {
+  cvd_common::Args input{"cvd"};
+  FlagCollection empty_flags;
+  FrontlineParser::ParserParam parser_param{.server_supported_subcmds = {},
+                                            .internal_cmds = {},
+                                            .all_args = {"cvd"},
+                                            .cvd_flags = empty_flags};
+
+  auto result = FrontlineParser::Parse(parser_param);
+
+  ASSERT_TRUE(result.ok()) << result.error().Trace();
+  auto& parser_ptr = *result;
+  ASSERT_TRUE(parser_ptr);
+  ASSERT_EQ("cvd", parser_ptr->ProgPath());
+  ASSERT_EQ(std::nullopt, parser_ptr->SubCmd())
+      << (parser_ptr->SubCmd() ? std::string("nullopt")
+                               : *parser_ptr->SubCmd());
+  ASSERT_EQ(cvd_common::Args{}, parser_ptr->SubCmdArgs());
+  ASSERT_EQ(cvd_common::Args{}, parser_ptr->CvdArgs());
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/help_test.cpp b/host/commands/cvd/unittests/server/help_test.cpp
new file mode 100644
index 0000000..a5de705
--- /dev/null
+++ b/host/commands/cvd/unittests/server/help_test.cpp
@@ -0,0 +1,140 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+
+namespace cuttlefish {
+namespace {
+
+bool ContainsAll(const std::string& stream,
+                 const std::vector<std::string>& tokens) {
+  for (const auto& token : tokens) {
+    if (!Contains(stream, token)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+/*
+ * Sees if this might be cvd --help output.
+ *
+ * Not very accurate.
+ */
+bool MaybeCvdHelp(const CmdResult& result) {
+  const auto& stdout = result.Stdout();
+  return ContainsAll(stdout, {"help", "start", "stop", "fleet"});
+}
+
+bool MaybeCvdStop(const CmdResult& result) {
+  const auto& stderr = result.Stderr();
+  const auto& stdout = result.Stdout();
+  return Contains(stderr, "cvd_internal_stop") ||
+         Contains(stdout, "cvd_internal_stop") ||
+         Contains(stderr, "stop_cvd") || Contains(stdout, "stop_cvd");
+}
+
+bool MaybeCvdStart(const CmdResult& result) {
+  const auto& stdout = result.Stdout();
+  return ContainsAll(stdout, {"vhost", "modem", "daemon", "adb"});
+}
+
+}  // namespace
+
+TEST(CvdDriver, CvdHelp) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  auto cmd_help = CmdRunner::Run("cvd help", envs);
+  auto cmd_dash_help = CmdRunner::Run("cvd --help", envs);
+
+  ASSERT_TRUE(cmd_help.Success()) << cmd_help.Stderr();
+  ASSERT_TRUE(MaybeCvdHelp(cmd_help));
+  ASSERT_TRUE(cmd_dash_help.Success()) << cmd_dash_help.Stderr();
+  ASSERT_TRUE(MaybeCvdHelp(cmd_dash_help));
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+TEST(CvdDriver, CvdOnly) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  auto cmd_help = CmdRunner::Run("cvd help", envs);
+  auto cmd_only = CmdRunner::Run("cvd", envs);
+
+  ASSERT_TRUE(cmd_help.Success()) << cmd_help.Stderr();
+  ASSERT_TRUE(cmd_only.Success()) << cmd_only.Stderr();
+  ASSERT_EQ(cmd_help.Stdout(), cmd_only.Stdout());
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+// this test is expected to fail. included proactively.
+TEST(CvdDriver, CvdHelpWrong) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  auto cmd_help_ref = CmdRunner::Run("cvd help", envs);
+  auto cmd_help_wrong = CmdRunner::Run("cvd help not_exist", envs);
+
+  EXPECT_TRUE(cmd_help_ref.Success()) << cmd_help_ref.Stderr();
+  EXPECT_TRUE(cmd_help_wrong.Success()) << cmd_help_wrong.Stderr();
+  EXPECT_EQ(cmd_help_ref.Stdout(), cmd_help_wrong.Stdout());
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+TEST(CvdSubtool, CvdStopHelp) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  auto cmd_stop_help = CmdRunner::Run("cvd help stop", envs);
+
+  ASSERT_TRUE(cmd_stop_help.Success()) << cmd_stop_help.Stderr();
+  ASSERT_TRUE(MaybeCvdStop(cmd_stop_help))
+      << "stderr: " << cmd_stop_help.Stderr()
+      << "stdout: " << cmd_stop_help.Stdout();
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+TEST(CvdSubtool, CvdStartHelp) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  auto cmd_start_help = CmdRunner::Run("cvd help start", envs);
+
+  ASSERT_TRUE(cmd_start_help.Success()) << cmd_start_help.Stderr();
+  ASSERT_TRUE(MaybeCvdStart(cmd_start_help))
+      << "stderr: " << cmd_start_help.Stderr()
+      << "stdout: " << cmd_start_help.Stdout();
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/instance_ids_test.cpp b/host/commands/cvd/unittests/server/instance_ids_test.cpp
new file mode 100644
index 0000000..345d9fe
--- /dev/null
+++ b/host/commands/cvd/unittests/server/instance_ids_test.cpp
@@ -0,0 +1,83 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+#include "host/commands/cvd/unittests/server/utils.h"
+
+namespace cuttlefish {
+
+TEST(CvdInstanceIds, CvdTakenInstanceIds) {
+  cvd_common::Envs envs;
+  envs["HOME"] = StringFromEnv("HOME", "");
+  CmdRunner::Run("cvd reset -y", envs);
+
+  cvd_common::Args start_1_2_args{"cvd",
+                                  "--disable_default_group",
+                                  "start",
+                                  "--report_anonymous_usage_stats=yes",
+                                  "--daemon",
+                                  "--norestart_subprocesses",
+                                  "--instance_nums=1,2"};
+  cvd_common::Args start_3_args{"cvd",
+                                "--disable_default_group",
+                                "start",
+                                "--report_anonymous_usage_stats=yes",
+                                "--daemon",
+                                "--norestart_subprocesses",
+                                "--instance_nums=3"};
+  cvd_common::Args start_4_5_6_args{"cvd",
+                                    "--disable_default_group",
+                                    "start",
+                                    "--report_anonymous_usage_stats=yes",
+                                    "--daemon",
+                                    "--norestart_subprocesses",
+                                    "--instance_nums=4,5,6"};
+  cvd_common::Args start_5_7_args{"cvd",
+                                  "--disable_default_group",
+                                  "start",
+                                  "--report_anonymous_usage_stats=yes",
+                                  "--daemon",
+                                  "--norestart_subprocesses",
+                                  "--instance_nums=4,5,6"};
+
+  auto cmd_start_1_2 = CmdRunner::Run(start_1_2_args, envs);
+  auto cmd_start_3 = CmdRunner::Run(start_3_args, envs);
+  auto cmd_start_4_5_6 = CmdRunner::Run(start_4_5_6_args, envs);
+  ASSERT_TRUE(cmd_start_1_2.Success()) << cmd_start_1_2.Stderr();
+  ASSERT_TRUE(cmd_start_3.Success()) << cmd_start_3.Stderr();
+  ASSERT_TRUE(cmd_start_4_5_6.Success()) << cmd_start_4_5_6.Stderr();
+
+  auto cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_EQ(NumberOfOccurrences(cmd_fleet.Stdout(), "instance_name"), 6)
+      << cmd_fleet.Stdout();
+
+  auto cmd_3_to_fail = CmdRunner::Run(start_3_args, envs);
+  auto cmd_5_7_to_fail = CmdRunner::Run(start_5_7_args, envs);
+  ASSERT_TRUE(cmd_start_3.Success()) << cmd_start_3.Stderr();
+  ASSERT_TRUE(cmd_start_4_5_6.Success()) << cmd_start_4_5_6.Stderr();
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/local_instance_helper.cpp b/host/commands/cvd/unittests/server/local_instance_helper.cpp
new file mode 100644
index 0000000..4ef5bad
--- /dev/null
+++ b/host/commands/cvd/unittests/server/local_instance_helper.cpp
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/server/local_instance_helper.h"
+
+#include "host/commands/cvd/types.h"
+
+namespace cuttlefish {
+namespace acloud {
+
+CvdInstanceLocalTest::CvdInstanceLocalTest() { InitCmd(); }
+
+CmdResult CvdInstanceLocalTest::Execute(const std::string& cmd_) {
+  cvd_common::Envs envs;
+  CmdResult result = CmdRunner::Run(cmd_, envs);
+
+  auto cmd_stop = CmdRunner::Run("cvd stop", envs);
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+
+  return result;
+}
+
+void CvdInstanceLocalTest::InitCmd() {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+}  // namespace acloud
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/local_instance_helper.h b/host/commands/cvd/unittests/server/local_instance_helper.h
new file mode 100644
index 0000000..0ec9694
--- /dev/null
+++ b/host/commands/cvd/unittests/server/local_instance_helper.h
@@ -0,0 +1,41 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string>
+
+#include <gtest/gtest.h>
+
+#include "host/commands/cvd/unittests/server/cmd_runner.h"
+
+namespace cuttlefish {
+namespace acloud {
+
+/**
+ * Creates a mock cmd_ command line, and execute the test flow
+ *
+ */
+class CvdInstanceLocalTest : public ::testing::Test {
+ protected:
+  CvdInstanceLocalTest();
+  CmdResult Execute(const std::string& cmd_);
+
+ private:
+  void InitCmd();
+};
+
+}  // namespace acloud
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/local_instance_test.cpp b/host/commands/cvd/unittests/server/local_instance_test.cpp
new file mode 100644
index 0000000..af8137c
--- /dev/null
+++ b/host/commands/cvd/unittests/server/local_instance_test.cpp
@@ -0,0 +1,142 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <random>
+#include <string>
+#include <vector>
+
+#include <gtest/gtest.h>
+
+#include "common/libs/utils/contains.h"
+#include "host/commands/cvd/types.h"
+#include "host/commands/cvd/unittests/server/local_instance_helper.h"
+
+namespace cuttlefish {
+namespace acloud {
+
+TEST(CvdDriver, CvdLocalInstance) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  // 1st test normal case
+  auto cmd_local_instance_local_image =
+      CmdRunner::Run("cvd acloud create --local-instance --local-image", envs);
+  ASSERT_TRUE(cmd_local_instance_local_image.Success())
+      << cmd_local_instance_local_image.Stderr();
+  auto cmd_stop = CmdRunner::Run("cvd stop", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  // 2nd test random id input
+  std::random_device rd;
+  std::default_random_engine mt(rd());
+  std::uniform_int_distribution<int> dist(1, 10);
+
+  // randomly generate instance id within 1-10, id 0 has been used
+  std::string id = std::to_string(dist(mt));
+  std::string cmd_str = "cvd acloud create --local-instance " + id;
+  cmd_str += " --local-image";
+  auto cmd_id = CmdRunner::Run(cmd_str, envs);
+  ASSERT_TRUE(cmd_id.Success()) << cmd_id.Stderr();
+
+  auto cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_TRUE(Contains(cmd_fleet.Stdout(), "cvd-" + id));
+
+  cmd_stop = CmdRunner::Run("cvd stop", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  cmd_fleet = CmdRunner::Run("cvd fleet", envs);
+  ASSERT_TRUE(cmd_fleet.Success()) << cmd_fleet.Stderr();
+  ASSERT_FALSE(Contains(cmd_fleet.Stdout(), "cvd-" + id));
+
+  // 3rd test local instance --local-boot-image
+  const auto product_out_dir = StringFromEnv("ANDROID_PRODUCT_OUT", "");
+  cmd_str =
+      "cvd acloud create --local-instance --local-image --local-boot-image " +
+      product_out_dir;
+  cmd_str += "/boot.img";
+  auto cmd_local_boot_image = CmdRunner::Run(cmd_str, envs);
+  ASSERT_TRUE(cmd_local_boot_image.Success()) << cmd_local_boot_image.Stderr();
+  cmd_stop = CmdRunner::Run("cvd stop", envs);
+  ASSERT_TRUE(cmd_stop.Success()) << cmd_stop.Stderr();
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+TEST_F(CvdInstanceLocalTest, CvdLocalInstanceRemoteImage) {
+  // 4th test local instance, remote image, --branch, --build-id flags
+  auto cmd_result = Execute("cvd acloud create --local-instance --build-id "
+      "9759836 --branch git_master --build-target cf_x86_64_phone-userdebug "
+      "--bootloader-branch aosp_u-boot-mainline --bootloader-build-id "
+      "9602025 --bootloader-build-target u-boot_crosvm_x86_64");
+  ASSERT_TRUE(cmd_result.Success()) << cmd_result.Stderr();
+}
+
+TEST(CvdDriver, CvdLocalInstanceRemoteImageKernelImage) {
+  cvd_common::Envs envs;
+  CmdRunner::Run("cvd reset -y", envs);
+
+  // 5th test local instance, remote image, --kernel-branch, --kernel-build-id,
+  // --kernel-build-target, --image-download-dir --build-target flags
+  auto cmd_kernel_build = CmdRunner::Run(
+      "cvd acloud create --local-instance --branch "
+      "git_master --build-target cf_x86_64_phone-userdebug --kernel-branch "
+      "aosp_kernel-common-android13-5.10 --kernel-build-id 9600402 "
+      "--kernel-build-target kernel_virt_x86_64 --image-download-dir "
+      "/tmp/acloud_cvd_temp/test123",
+      envs);
+  ASSERT_TRUE(cmd_kernel_build.Success()) << cmd_kernel_build.Stderr();
+  auto cmd_stop = CmdRunner::Run("cvd stop", envs);
+  // after this command, the 5.10 kernel image should be downloaded at
+  // /tmp/acloud_cvd_temp/test123/acloud_image_artifacts/9594220cf_x86_64_phone-userdebug
+  // I will re-use this pre-built kernel image for later testing
+
+  // 6th test local instance, local-kernel-image, --branch
+  auto cmd_local_kernel_image = CmdRunner::Run(
+      "cvd acloud create --local-instance --branch git_master  --build-target "
+      "cf_x86_64_phone-userdebug --local-kernel-image "
+      "/tmp/acloud_cvd_temp/test123/acloud_image_artifacts/"
+      "9695745cf_x86_64_phone-userdebug",
+      envs);
+  ASSERT_TRUE(cmd_local_kernel_image.Success())
+      << cmd_local_kernel_image.Stderr();
+  cmd_stop = CmdRunner::Run("cvd stop", envs);
+
+  // clean up for the next test
+  CmdRunner::Run("cvd reset -y", envs);
+}
+
+// CvdInstanceLocalTest is testing different flags with "cvd acloud create --local-instance"
+TEST_F(CvdInstanceLocalTest, CvdLocalInstanceRemoteImageBootloader) {
+  // 7th test --bootloader-branch --bootloader-build-id
+  // --bootloader-build-target
+  auto cmd_result = Execute("cvd acloud create --local-instance "
+      "--branch git_master --build-target cf_x86_64_phone-userdebug "
+      "--bootloader-branch aosp_u-boot-mainline --bootloader-build-id 9602025 "
+      "--bootloader-build-target u-boot_crosvm_x86_64");
+  ASSERT_TRUE(cmd_result.Success()) << cmd_result.Stderr();
+}
+
+TEST_F(CvdInstanceLocalTest, CvdLocalInstanceRemoteImageSystem) {
+  // 8th --system-branch, --system-build-id, --system-build-target
+  auto cmd_result = Execute("cvd acloud create --local-instance --branch git_master "
+      "--build-target cf_x86_64_phone-userdebug --system-branch git_master "
+      "--system-build-id 9684420 --system-build-target aosp_x86_64-userdebug");
+  ASSERT_TRUE(cmd_result.Success()) << cmd_result.Stderr();
+}
+
+}  // namespace acloud
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/utils.cpp b/host/commands/cvd/unittests/server/utils.cpp
new file mode 100644
index 0000000..2f211c1
--- /dev/null
+++ b/host/commands/cvd/unittests/server/utils.cpp
@@ -0,0 +1,30 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/cvd/unittests/server/utils.h"
+
+namespace cuttlefish {
+
+int NumberOfOccurrences(const std::string& str, const std::string& substr) {
+  int cnt = 0;
+  int pos = str.find(substr, 0);
+  while (pos != std::string::npos) {
+    ++cnt;
+    pos = str.find(substr, pos + 1);
+  }
+  return cnt;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd/unittests/server/utils.h b/host/commands/cvd/unittests/server/utils.h
new file mode 100644
index 0000000..4bddab0
--- /dev/null
+++ b/host/commands/cvd/unittests/server/utils.h
@@ -0,0 +1,24 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string>
+
+namespace cuttlefish {
+
+int NumberOfOccurrences(const std::string& str, const std::string& substr);
+
+}  // namespace cuttlefish
diff --git a/host/commands/cvd_env/Android.bp b/host/commands/cvd_env/Android.bp
new file mode 100644
index 0000000..6a96d51
--- /dev/null
+++ b/host/commands/cvd_env/Android.bp
@@ -0,0 +1,46 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_binary_host {
+    name: "cvd_internal_env",
+    srcs: [
+        "main.cc",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libfruit",
+        "libjsoncpp",
+        "libgrpc++",
+        "libprotobuf-cpp-full",
+    ],
+    static_libs: [
+        "libc++fs",
+        "libcuttlefish_host_config",
+        "libcuttlefish_vm_manager",
+        "libgflags",
+        "grpc_cli_libs",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+    ],
+    cpp_std: "c++17",
+    defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
+}
diff --git a/host/commands/cvd_env/main.cc b/host/commands/cvd_env/main.cc
new file mode 100644
index 0000000..ac14580
--- /dev/null
+++ b/host/commands/cvd_env/main.cc
@@ -0,0 +1,356 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <filesystem>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <gflags/gflags.h>
+#include <grpcpp/ext/proto_server_reflection_plugin.h>
+#include <grpcpp/grpcpp.h>
+#include <test/cpp/util/grpc_tool.h>
+#include <test/cpp/util/test_config.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+using grpc::InsecureChannelCredentials;
+
+namespace cuttlefish {
+namespace {
+
+constexpr char kCvdEnvHelpMessage[] =
+    "cvd env: cuttlefish environment controller\n"
+    "Basic usage: cvd [selector options] env [sub_command] [args] [options]\n"
+    "Sub commands:\n"
+    "  ls: list services and methods for given arguments\n"
+    "    Usage: cvd [selector options] env ls [service] [method] [-l]\n"
+    "      service(optional) : gRPC service name\n"
+    "      method(optional)  : method name for given service\n"
+    "      -l(optional)      : Use a long listing format\n"
+    "  type: get detailed information for given request/reply type\n"
+    "    Usage: cvd [selector options] env type [service] [method] [type]\n"
+    "      service           : gRPC service name\n"
+    "      method            : method name in given service\n"
+    "      type              : Protocol buffer type name in given method\n"
+    "  call: request a rpc with given method\n"
+    "    Usage: cvd [selector options] env call [service] [method] [request]\n"
+    "      service           : gRPC service name\n"
+    "      method            : method name in given service\n"
+    "      request           : Protobuffer with text format\n\n"
+    "* \"cvd [selector_options] env\" can be replaced with:\n"
+    "    \"cvd_internal_env [internal device name]\"\n";
+
+bool PrintStream(std::stringstream* ss, const grpc::string& output) {
+  (*ss) << output;
+  return true;
+}
+
+class InsecureCliCredentials final : public grpc::testing::CliCredentials {
+ public:
+  std::shared_ptr<grpc::ChannelCredentials> GetChannelCredentials()
+      const override {
+    return InsecureChannelCredentials();
+  }
+  const grpc::string GetCredentialUsage() const override { return ""; }
+};
+
+std::vector<char*> ConvertToCharVec(const std::vector<std::string>& str_vec) {
+  std::vector<char*> char_vec;
+  char_vec.reserve(str_vec.size());
+  for (const auto& str : str_vec) {
+    char_vec.push_back(const_cast<char*>(str.c_str()));
+  }
+  return char_vec;
+}
+
+void RunGrpcCommand(const std::vector<std::string>& arguments,
+                    std::stringstream& output_stream) {
+  int grpc_cli_argc = arguments.size();
+  auto new_arguments = ConvertToCharVec(arguments);
+  char** grpc_cli_argv = new_arguments.data();
+
+  grpc::testing::InitTest(&grpc_cli_argc, &grpc_cli_argv, true);
+  grpc::testing::GrpcToolMainLib(
+      grpc_cli_argc, (const char**)grpc_cli_argv, InsecureCliCredentials(),
+      std::bind(PrintStream, &output_stream, std::placeholders::_1));
+}
+
+std::string RunGrpcCommand(const std::vector<std::string>& arguments) {
+  std::stringstream output_stream;
+  RunGrpcCommand(arguments, output_stream);
+  return output_stream.str();
+}
+
+std::vector<std::string> GetServiceList(const std::string& server_address) {
+  std::vector<std::string> service_list;
+  std::stringstream output_stream;
+
+  std::vector<std::string> arguments{"grpc_cli", "ls", server_address};
+  RunGrpcCommand(arguments, output_stream);
+
+  std::string service_name;
+  while (std::getline(output_stream, service_name)) {
+    if (service_name.compare("grpc.reflection.v1alpha.ServerReflection") == 0) {
+      continue;
+    }
+    service_list.emplace_back(service_name);
+  }
+
+  return service_list;
+}
+
+Result<std::string> GetServerAddress(
+    const std::vector<std::string>& server_address_list,
+    const std::string& service_name) {
+  std::vector<std::string> candidates;
+  for (const auto& server_address : server_address_list) {
+    for (auto& full_service_name : GetServiceList(server_address)) {
+      if (android::base::EndsWith(full_service_name, service_name)) {
+        candidates.emplace_back(server_address);
+        break;
+      }
+    }
+  }
+
+  CF_EXPECT(candidates.size() > 0, service_name + " is not found.");
+  CF_EXPECT(candidates.size() < 2, service_name + " is ambiguous.");
+
+  return candidates[0];
+}
+
+Result<std::string> GetFullServiceName(const std::string& server_address,
+                                       const std::string& service_name) {
+  std::vector<std::string> candidates;
+  for (auto& full_service_name : GetServiceList(server_address)) {
+    if (android::base::EndsWith(full_service_name, service_name)) {
+      candidates.emplace_back(full_service_name);
+    }
+  }
+
+  CF_EXPECT(candidates.size() > 0, service_name + " is not found.");
+  CF_EXPECT(candidates.size() < 2, service_name + " is ambiguous.");
+
+  return candidates[0];
+}
+
+Result<std::string> GetFullMethodName(const std::string& server_address,
+                                      const std::string& service_name,
+                                      const std::string& method_name) {
+  const auto& full_service_name =
+      CF_EXPECT(GetFullServiceName(server_address, service_name));
+  return full_service_name + "/" + method_name;
+}
+
+Result<std::string> GetFullTypeName(const std::string& server_address,
+                                    const std::string& service_name,
+                                    const std::string& method_name,
+                                    const std::string& type_name) {
+  // Run grpc_cli ls -l for given method to extract full type name.
+  // Example output:
+  //   rpc OpenwrtIpaddr(google.protobuf.Empty) returns
+  //   (openwrtcontrolserver.OpenwrtIpaddrReply) {}
+  const auto& full_method_name =
+      CF_EXPECT(GetFullMethodName(server_address, service_name, method_name));
+  std::vector<std::string> grpc_arguments{"grpc_cli", "ls", "-l",
+                                          server_address, full_method_name};
+  auto grpc_result = RunGrpcCommand(grpc_arguments);
+
+  std::vector<std::string> candidates;
+  for (auto& full_type_name : android::base::Split(grpc_result, "()")) {
+    if (android::base::EndsWith(full_type_name, type_name)) {
+      candidates.emplace_back(full_type_name);
+    }
+  }
+
+  CF_EXPECT(candidates.size() > 0, service_name + " is not found.");
+  CF_EXPECT(candidates.size() < 2, service_name + " is ambiguous.");
+
+  return candidates[0];
+}
+
+Result<void> HandleLsCmd(const std::vector<std::string>& server_address_list,
+                         const std::vector<std::string>& args,
+                         const std::vector<std::string>& options) {
+  CF_EXPECT(args.size() < 3, "too many arguments");
+
+  if (args.size() > 0) {
+    std::vector<std::string> grpc_arguments{"grpc_cli", "ls"};
+
+    const auto& service_name = args[0];
+    const auto& server_address =
+        CF_EXPECT(GetServerAddress(server_address_list, service_name));
+    grpc_arguments.push_back(server_address);
+    if (args.size() > 1) {
+      // ls subcommand with 2 arguments; service_name and method_name
+      const auto& method_name = args[1];
+      const auto& full_method_name = CF_EXPECT(
+          GetFullMethodName(server_address, service_name, method_name));
+      grpc_arguments.push_back(full_method_name);
+    } else {
+      // ls subcommand with 1 argument; service_name
+      const auto& full_service_name =
+          CF_EXPECT(GetFullServiceName(server_address, service_name));
+      grpc_arguments.push_back(full_service_name);
+    }
+    grpc_arguments.insert(grpc_arguments.end(), options.begin(), options.end());
+
+    std::cout << RunGrpcCommand(grpc_arguments);
+  } else {
+    // ls subcommand with no arguments
+    for (const auto& server_address : server_address_list) {
+      std::vector<std::string> grpc_arguments{"grpc_cli", "ls", server_address};
+      grpc_arguments.insert(grpc_arguments.end(), options.begin(),
+                            options.end());
+
+      std::cout << RunGrpcCommand(grpc_arguments);
+    }
+  }
+
+  return {};
+}
+
+Result<void> HandleTypeCmd(const std::vector<std::string>& server_address_list,
+                           const std::vector<std::string>& args,
+                           const std::vector<std::string>& options) {
+  CF_EXPECT(args.size() > 2,
+            "need to specify a service name, a method name, and type_name");
+  CF_EXPECT(args.size() < 4, "too many arguments");
+
+  std::vector<std::string> grpc_arguments{"grpc_cli", "type"};
+  const auto& service_name = args[0];
+  const auto& method_name = args[1];
+  const auto& type_name = args[2];
+
+  const auto& server_address =
+      CF_EXPECT(GetServerAddress(server_address_list, service_name));
+  grpc_arguments.push_back(server_address);
+  const auto& full_type_name = CF_EXPECT(
+      GetFullTypeName(server_address, service_name, method_name, type_name));
+  grpc_arguments.push_back(full_type_name);
+
+  grpc_arguments.insert(grpc_arguments.end(), options.begin(), options.end());
+
+  std::cout << RunGrpcCommand(grpc_arguments);
+
+  return {};
+}
+
+Result<void> HandleCallCmd(const std::vector<std::string>& server_address_list,
+                           const std::vector<std::string>& args,
+                           const std::vector<std::string>& options) {
+  CF_EXPECT(args.size() > 2,
+            "need to specify a service name, a method name, and text-formatted "
+            "proto");
+  CF_EXPECT(args.size() < 4, "too many arguments");
+
+  std::vector<std::string> grpc_arguments{"grpc_cli", "call"};
+  // TODO(b/265384449): support the case without text-formatted proto.
+  const auto& service_name = args[0];
+  const auto& method_name = args[1];
+  const auto& proto_text_format = args[2];
+
+  const auto& server_address =
+      CF_EXPECT(GetServerAddress(server_address_list, service_name));
+  grpc_arguments.push_back(server_address);
+  const auto& full_method_name =
+      CF_EXPECT(GetFullMethodName(server_address, service_name, method_name));
+  grpc_arguments.push_back(full_method_name);
+  grpc_arguments.push_back(proto_text_format);
+
+  grpc_arguments.insert(grpc_arguments.end(), options.begin(), options.end());
+
+  std::cout << RunGrpcCommand(grpc_arguments);
+
+  return {};
+}
+
+bool ContainHelpOption(int argc, char** argv) {
+  for (int i = 0; i < argc; i++) {
+    if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0) {
+      return true;
+    }
+  }
+  return false;
+}
+
+Result<void> CvdEnvMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  if (ContainHelpOption(argc, argv)) {
+    std::cout << kCvdEnvHelpMessage;
+    return {};
+  }
+
+  CF_EXPECT(argc >= 3, " need to specify a receiver and a command");
+  const auto& receiver = argv[1];
+  const auto& cmd = argv[2];
+
+  std::vector<std::string> options;
+  std::vector<std::string> args;
+  for (int i = 3; i < argc; i++) {
+    if (android::base::StartsWith(argv[i], '-')) {
+      options.push_back(argv[i]);
+    } else {
+      args.push_back(argv[i]);
+    }
+  }
+
+  const auto* config = CuttlefishConfig::Get();
+  CF_EXPECT(config != nullptr, "Unable to find the config");
+  std::vector<std::string> server_address_list;
+  const auto& instances = config->Instances();
+  auto receiver_instance = std::find_if(
+      begin(instances), end(instances), [&receiver](const auto& instance) {
+        return receiver == instance.instance_name();
+      });
+
+  CF_EXPECT(receiver_instance != std::end(instances),
+            "there is no instance of which name is "
+                << receiver << ". please check instance name by cvd fleet");
+
+  for (const auto& entry : std::filesystem::directory_iterator(
+           receiver_instance->grpc_socket_path())) {
+    LOG(DEBUG) << "loading " << entry.path();
+    server_address_list.emplace_back("unix:" + entry.path().string());
+  }
+
+  auto command_map =
+      std::unordered_map<std::string, std::function<Result<void>(
+                                          const std::vector<std::string>&,
+                                          const std::vector<std::string>&,
+                                          const std::vector<std::string>&)>>{{
+          {"call", HandleCallCmd},
+          {"ls", HandleLsCmd},
+          {"type", HandleTypeCmd},
+      }};
+
+  CF_EXPECT(Contains(command_map, cmd), cmd << " isn't supported");
+
+  CF_EXPECT(command_map[cmd](server_address_list, args, options));
+
+  return {};
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) {
+  const auto& ret = cuttlefish::CvdEnvMain(argc, argv);
+  CHECK(ret.ok()) << ret.error().Message();
+  return 0;
+}
diff --git a/host/commands/cvd_import_locations/Android.bp b/host/commands/cvd_import_locations/Android.bp
new file mode 100644
index 0000000..e472e89
--- /dev/null
+++ b/host/commands/cvd_import_locations/Android.bp
@@ -0,0 +1,85 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_defaults {
+    name: "cvd_import_locations_defaults",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "liblog",
+        "libicuuc",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+cc_binary {
+    name: "cvd_import_locations",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+        "libxml2",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+        "libgflags",
+        "libcvd_gnss_grpc_proxy",
+        "liblocation",
+    ],
+    srcs: [
+        "main.cc",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+    ],
+    defaults: ["cvd_import_locations_defaults"],
+}
+
+cc_test_host {
+    name: "cvd_import_locations_test",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libxml2",
+    ],
+    static_libs: [
+        "liblocation",
+        "libgmock",
+    ],
+    srcs: [
+        "unittest/main_test.cc",
+        "unittest/kml_parser_test.cc",
+        "unittest/gpx_parser_test.cc",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+    ],
+    defaults: ["cvd_import_locations_defaults"],
+}
\ No newline at end of file
diff --git a/host/commands/cvd_import_locations/main.cc b/host/commands/cvd_import_locations/main.cc
new file mode 100644
index 0000000..393f73f
--- /dev/null
+++ b/host/commands/cvd_import_locations/main.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+#include <common/libs/fs/shared_buf.h>
+#include <gflags/gflags.h>
+#include <host/libs/config/logging.h>
+#include "host/libs/config/cuttlefish_config.h"
+
+#include "host/libs/location/GnssClient.h"
+#include "host/libs/location/GpxParser.h"
+#include "host/libs/location/KmlParser.h"
+
+DEFINE_int32(instance_num, 1, "Which instance to read the configs from");
+DEFINE_double(delay, 1.0, "delay interval between different coordinates");
+
+DEFINE_string(format, "", "supported file format, either kml or gpx");
+DEFINE_string(file_path, "", "path to input file location {Kml or gpx} format");
+
+const char* kUsageMessage = R""""(gps locations import commandline utility
+
+Usage: cvd_import_locations [option] command [args...]
+
+arguments:
+
+  --format=[format_string]
+    input file format for cvd_import_locations
+        "gpx" for gpx input data file
+        "kml" for kml input data file
+
+  --file_path=[path]
+    gps locations input file path
+    if path is not specified, error will be reported
+
+  --delay=[delay_value]
+    delay between different gps locations ( double , default value is 1.0 second)
+
+  --instance_num=[integer_value]
+    running instance number , starts from 1 ( integer , default value is 1)
+
+examples:
+
+    cvd_import_locations --format="gpx" --file_path="input.gpx"
+    cvd_import_locations --format="kml" --file_path="input.kml"
+
+    cvd_import_locations --format="gpx" --file_path="input.gpx" --delay=.5
+    cvd_import_locations --format="kml" --file_path="input.kml" --delay=.5
+
+    cvd_import_locations --format="gpx" --file_path="input.gpx" --delay=.5 --instance_num=2
+
+)"""";
+namespace cuttlefish {
+namespace {
+
+int ImportLocationsCvdMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  auto config = CuttlefishConfig::Get();
+  if (!config) {
+    LOG(ERROR) << "Failed to obtain config object";
+    return 1;
+  }
+  std::set<std::string> supportedFormat = {"gpx", "GPX", "kml", "KML"};
+
+  if (supportedFormat.count(FLAGS_format) == 0) {
+    LOG(ERROR) << "Unsupported parsing format" << std::endl;
+    return 1;
+  }
+  LOG(INFO) << FLAGS_format << " Supported format" << std::endl;
+  auto instance = config->ForInstance(FLAGS_instance_num);
+  auto server_port = instance.gnss_grpc_proxy_server_port();
+  std::string socket_name =
+      std::string("localhost:") + std::to_string(server_port);
+  GnssClient gpsclient(
+      grpc::CreateChannel(socket_name, grpc::InsecureChannelCredentials()));
+
+  GpsFixArray coordinates;
+  std::string error;
+  bool isOk = false;
+
+  LOG(INFO) << "Server port: " << server_port << " socket: " << socket_name
+            << std::endl;
+  if (FLAGS_format == "gpx" || FLAGS_format == "GPX") {
+    isOk =
+        GpxParser::parseFile(FLAGS_file_path.c_str(), &coordinates, &error);
+  } else if (FLAGS_format == "kml" || FLAGS_format == "KML") {
+    isOk =
+        KmlParser::parseFile(FLAGS_file_path.c_str(), &coordinates, &error);
+  }
+
+  LOG(INFO) << "Number of parsed points: " << coordinates.size() << std::endl;
+
+  if (!isOk) {
+    LOG(ERROR) << " Parsing Error: " << error << std::endl;
+    return 1;
+  }
+
+  int delay = (int)(1000 * FLAGS_delay);
+  auto status = gpsclient.SendGpsLocations(delay,coordinates);
+  CHECK(status.ok()) << "Failed to send gps location data \n";
+  if (!status.ok()) {
+    return 1;
+  }
+  std::this_thread::sleep_for(std::chrono::milliseconds(delay));
+  return 0;
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) {
+  gflags::SetUsageMessage(kUsageMessage);
+  return cuttlefish::ImportLocationsCvdMain(argc, argv);
+}
\ No newline at end of file
diff --git a/host/commands/cvd_import_locations/unittest/gpx_parser_test.cc b/host/commands/cvd_import_locations/unittest/gpx_parser_test.cc
new file mode 100644
index 0000000..3baacb0
--- /dev/null
+++ b/host/commands/cvd_import_locations/unittest/gpx_parser_test.cc
@@ -0,0 +1,381 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+#include <fstream>
+#include "host/libs/location/GpsFix.h"
+#include "host/libs/location/GpxParser.h"
+#include "host/libs/location/StringParse.h"
+
+namespace cuttlefish {
+
+namespace {
+bool ParseGpxFile(GpsFixArray* locations, char* text, std::string* error) {
+  bool result;
+  TemporaryDir myDir;
+  std::string path = std::string(myDir.path) + "/" + "test.gpx";
+
+  std::ofstream myfile;
+  myfile.open(path.c_str());
+  myfile << text;
+  myfile.close();
+  result = GpxParser::parseFile(path.c_str(), locations, error);
+  return result;
+}
+
+bool ParseGpxString(GpsFixArray* locations, char* text, std::string* error) {
+  bool result;
+  result = GpxParser::parseString(text, strlen(text), locations, error);
+  return result;
+}
+
+}  // namespace
+
+TEST(GpxParser, ParseFileNotFound) {
+  GpsFixArray locations;
+  std::string error;
+  bool isOk = GpxParser::parseFile("i_dont_exist.gpx", &locations, &error);
+  EXPECT_FALSE(isOk);
+}
+
+char kEmptyText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "</gpx>";
+TEST(GpxParser, ParseEmptyFile) {
+  std::string error;
+  bool isOk;
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kEmptyText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(0U, locations.size());
+}
+
+TEST(GpxParser, ParseEmptyString) {
+  std::string error;
+  bool isOk;
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kEmptyText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(0U, locations.size());
+}
+
+char kEmptyRteTrkText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<rte>"
+    "</rte>"
+    "<trk>"
+    "<trkseg>"
+    "</trkseg>"
+    "</trk>"
+    "</gpx>";
+TEST(GpxParser, ParseEmptyRteTrkFile) {
+  std::string error;
+  bool isOk;
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kEmptyRteTrkText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(0U, locations.size());
+}
+
+TEST(GpxParser, ParseEmptyRteTrkString) {
+  std::string error;
+  bool isOk;
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kEmptyRteTrkText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(0U, locations.size());
+}
+
+char kValidText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lon=\"0\" lat=\"0\">"
+    "<name>Wpt 1</name>"
+    "</wpt>"
+    "<wpt lon=\"0\" lat=\"0\">"
+    "<name>Wpt 2</name>"
+    "</wpt>"
+    "<rte>"
+    "<rtept lon=\"0\" lat=\"0\">"
+    "<name>Rtept 1</name>"
+    "</rtept>"
+    "<rtept lon=\"0\" lat=\"0\">"
+    "<name>Rtept 2</name>"
+    "</rtept>"
+    "</rte>"
+    "<trk>"
+    "<trkseg>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 1-1</name>"
+    "</trkpt>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 1-2</name>"
+    "</trkpt>"
+    "</trkseg>"
+    "<trkseg>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 2-1</name>"
+    "</trkpt>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 2-2</name>"
+    "</trkpt>"
+    "</trkseg>"
+    "</trk>"
+    "</gpx>";
+TEST(GpxParser, ParseValidFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kValidText, &error);
+  EXPECT_TRUE(isOk);
+  ASSERT_EQ(8U, locations.size());
+  EXPECT_EQ("Wpt 1", locations[0].name);
+  EXPECT_EQ("Wpt 2", locations[1].name);
+  EXPECT_EQ("Rtept 1", locations[2].name);
+  EXPECT_EQ("Rtept 2", locations[3].name);
+  EXPECT_EQ("Trkpt 1-1", locations[4].name);
+  EXPECT_EQ("Trkpt 1-2", locations[5].name);
+  EXPECT_EQ("Trkpt 2-1", locations[6].name);
+  EXPECT_EQ("Trkpt 2-2", locations[7].name);
+}
+
+TEST(GpxParser, ParseValidString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kValidText, &error);
+  EXPECT_TRUE(isOk);
+  ASSERT_EQ(8U, locations.size());
+  EXPECT_EQ("Wpt 1", locations[0].name);
+  EXPECT_EQ("Wpt 2", locations[1].name);
+  EXPECT_EQ("Rtept 1", locations[2].name);
+  EXPECT_EQ("Rtept 2", locations[3].name);
+  EXPECT_EQ("Trkpt 1-1", locations[4].name);
+  EXPECT_EQ("Trkpt 1-2", locations[5].name);
+  EXPECT_EQ("Trkpt 2-1", locations[6].name);
+  EXPECT_EQ("Trkpt 2-2", locations[7].name);
+}
+
+char kNullAttributeText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lon=\"0\" lat=\"0\">"
+    "<name/>"
+    "</wpt>"
+    "</gpx>";
+TEST(GpxParser, ParseFileNullAttributeFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kNullAttributeText, &error);
+  // This test only checks if GpxParser doesn't crash on null attributes
+  // So if we're here it's already Ok - these tests aren't that relevant.
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_STREQ("", locations[0].name.c_str());
+  EXPECT_TRUE(error.empty());
+}
+
+TEST(GpxParser, ParseFileNullAttributeString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kNullAttributeText, &error);
+  // This test only checks if GpxParser doesn't crash on null attributes
+  // So if we're here it's already Ok - these tests aren't that relevant.
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_STREQ("", locations[0].name.c_str());
+  EXPECT_TRUE(error.empty());
+}
+
+char kLocationMissingLongitudeLatitudeText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lon=\"9.81\">"
+    "<ele>6.02</ele>"
+    "<name>Name</name>"
+    "<desc>Desc</desc>"
+    "</wpt>"
+    "</gpx>";
+TEST(GpxParser, ParseLocationMissingLatitudeFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk =
+      ParseGpxFile(&locations, kLocationMissingLongitudeLatitudeText, &error);
+  EXPECT_FALSE(isOk);
+}
+
+TEST(GpxParser, ParseLocationMissingLatitudeString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk =
+      ParseGpxString(&locations, kLocationMissingLongitudeLatitudeText, &error);
+  EXPECT_FALSE(isOk);
+}
+
+char kLocationMissingLongitudeText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lat=\"3.1415\">"
+    "<ele>6.02</ele>"
+    "<name>Name</name>"
+    "<desc>Desc</desc>"
+    "</wpt>"
+    "</gpx>";
+TEST(GpxParser, ParseLocationMissingLongitudeFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kLocationMissingLongitudeText, &error);
+  EXPECT_FALSE(isOk);
+}
+
+TEST(GpxParser, ParseLocationMissingLongitudeString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kLocationMissingLongitudeText, &error);
+  EXPECT_FALSE(isOk);
+}
+
+char kValidLocationText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lon=\"9.81\" lat=\"3.1415\">"
+    "<ele>6.02</ele>"
+    "<name>Name</name>"
+    "<desc>Desc</desc>"
+    "</wpt>"
+    "</gpx>";
+TEST(GpxParser, ParseValidLocationFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kValidLocationText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(1U, locations.size());
+  const GpsFix& wpt = locations[0];
+  EXPECT_EQ("Desc", wpt.description);
+  EXPECT_FLOAT_EQ(6.02, wpt.elevation);
+  EXPECT_FLOAT_EQ(3.1415, wpt.latitude);
+  EXPECT_FLOAT_EQ(9.81, wpt.longitude);
+  EXPECT_EQ("Name", wpt.name);
+}
+
+TEST(GpxParser, ParseValidLocationString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kValidLocationText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(1U, locations.size());
+  const GpsFix& wpt = locations[0];
+  EXPECT_EQ("Desc", wpt.description);
+  EXPECT_FLOAT_EQ(6.02, wpt.elevation);
+  EXPECT_FLOAT_EQ(3.1415, wpt.latitude);
+  EXPECT_FLOAT_EQ(9.81, wpt.longitude);
+  EXPECT_EQ("Name", wpt.name);
+}
+
+char kValidDocumentText[] =
+    "<?xml version=\"1.0\"?>"
+    "<gpx>"
+    "<wpt lon=\"0\" lat=\"0\">"
+    "<name>Wpt 1</name>"
+    "</wpt>"
+    "<wpt lon=\"0\" lat=\"0\">"
+    "<name>Wpt 2</name>"
+    "</wpt>"
+    "<rte>"
+    "<rtept lon=\"0\" lat=\"0\">"
+    "<name>Rtept 1</name>"
+    "</rtept>"
+    "<rtept lon=\"0\" lat=\"0\">"
+    "<name>Rtept 2</name>"
+    "</rtept>"
+    "</rte>"
+    "<trk>"
+    "<trkseg>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 1-1</name>"
+    "</trkpt>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 1-2</name>"
+    "</trkpt>"
+    "</trkseg>"
+    "<trkseg>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 2-1</name>"
+    "</trkpt>"
+    "<trkpt lon=\"0\" lat=\"0\">"
+    "<name>Trkpt 2-2</name>"
+    "</trkpt>"
+    "</trkseg>"
+    "</trk>"
+    "</gpx>";
+TEST(GpxParser, ParseValidDocumentFile) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxFile(&locations, kValidDocumentText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(8U, locations.size());
+  EXPECT_EQ("Wpt 1", locations[0].name);
+  EXPECT_EQ("Wpt 2", locations[1].name);
+  EXPECT_EQ("Rtept 1", locations[2].name);
+  EXPECT_EQ("Rtept 2", locations[3].name);
+  EXPECT_EQ("Trkpt 1-1", locations[4].name);
+  EXPECT_EQ("Trkpt 1-2", locations[5].name);
+  EXPECT_EQ("Trkpt 2-1", locations[6].name);
+  EXPECT_EQ("Trkpt 2-2", locations[7].name);
+}
+
+TEST(GpxParser, ParseValidDocumentString) {
+  std::string error;
+  bool isOk;
+
+  GpsFixArray locations;
+  isOk = ParseGpxString(&locations, kValidDocumentText, &error);
+  EXPECT_TRUE(isOk);
+  EXPECT_EQ(8U, locations.size());
+  EXPECT_EQ("Wpt 1", locations[0].name);
+  EXPECT_EQ("Wpt 2", locations[1].name);
+  EXPECT_EQ("Rtept 1", locations[2].name);
+  EXPECT_EQ("Rtept 2", locations[3].name);
+  EXPECT_EQ("Trkpt 1-1", locations[4].name);
+  EXPECT_EQ("Trkpt 1-2", locations[5].name);
+  EXPECT_EQ("Trkpt 2-1", locations[6].name);
+  EXPECT_EQ("Trkpt 2-2", locations[7].name);
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/cvd_import_locations/unittest/kml_parser_test.cc b/host/commands/cvd_import_locations/unittest/kml_parser_test.cc
new file mode 100644
index 0000000..7328f31
--- /dev/null
+++ b/host/commands/cvd_import_locations/unittest/kml_parser_test.cc
@@ -0,0 +1,661 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+#include <fstream>
+#include "host/libs/location/GpsFix.h"
+#include "host/libs/location/KmlParser.h"
+#include "host/libs/location/StringParse.h"
+
+namespace cuttlefish {
+namespace {
+bool ParseKmlFile(GpsFixArray* locations, char* text, std::string* error) {
+  bool result;
+  TemporaryDir myDir;
+  std::string path = std::string(myDir.path) + "/" + "test.kml";
+
+  std::ofstream myfile;
+  myfile.open(path.c_str());
+  myfile << text;
+  myfile.close();
+  result = KmlParser::parseFile(path.c_str(), locations, error);
+  return result;
+}
+
+bool ParseKmlString(GpsFixArray* locations, char* text, std::string* error) {
+  bool result;
+  result = KmlParser::parseString(text, strlen(text), locations, error);
+  return result;
+}
+}  // namespace
+
+TEST(KmlParser, ParseNonexistentFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_FALSE(KmlParser::parseFile("", &locations, &error));
+  ASSERT_EQ(0U, locations.size());
+  EXPECT_EQ(std::string("KML document not parsed successfully."), error);
+}
+
+char kEmptyKmlText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "</kml>";
+TEST(KmlParser, ParseEmptyKmlFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlFile(&locations, kEmptyKmlText, &error));
+  EXPECT_EQ(0U, locations.size());
+  EXPECT_EQ("", error);
+}
+
+TEST(KmlParser, ParseEmptyKmlString) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlString(&locations, kEmptyKmlText, &error));
+  EXPECT_EQ(0U, locations.size());
+  EXPECT_EQ("", error);
+}
+
+char kValidkmlText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseValidKmlFile) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlFile(&locations, kValidkmlText, &error));
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_EQ("", error);
+}
+
+TEST(KmlParser, ParseValidKmlString) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlString(&locations, kValidkmlText, &error));
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_EQ("", error);
+}
+
+char kValidComplexText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Document>"
+    "<name>KML Samples</name>"
+    "<Style id=\"globeIcon\">"
+    "<IconStyle></IconStyle><LineStyle><width>2</width></LineStyle>"
+    "</Style>"
+    "<Folder>"
+    "<name>Placemarks</name>"
+    "<description>These are just some</description>"
+    "<LookAt>"
+    "<tilt>40.5575073395506</tilt><range>500.6566641072245</range>"
+    "</LookAt>"
+    "<Placemark>"
+    "<name>Tessellated</name>"
+    "<visibility>0</visibility>"
+    "<description>Black line (10 pixels wide), height tracks "
+    "terrain</description>"
+    "<LookAt><longitude>-122.0839597145766</longitude></LookAt>"
+    "<styleUrl>#downArrowIcon</styleUrl>"
+    "<Point>"
+    "<altitudeMode>relativeToGround</altitudeMode>"
+    "<coordinates>-122.084075,37.4220033612141,50</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "<Placemark>"
+    "<name>Transparent</name>"
+    "<visibility>0</visibility>"
+    "<styleUrl>#transRedPoly</styleUrl>"
+    "<Polygon>"
+    "<extrude>1</extrude>"
+    "<altitudeMode>relativeToGround</altitudeMode>"
+    "<outerBoundaryIs>"
+    "<LinearRing>"
+    "<coordinates> -122.084075,37.4220033612141,50</coordinates>"
+    "</LinearRing>"
+    "</outerBoundaryIs>"
+    "</Polygon>"
+    "</Placemark>"
+    "</Folder>"
+    "<Placemark>"
+    "<name>Fruity</name>"
+    "<visibility>0</visibility>"
+    "<description><![CDATA[If the <tessellate> tag has a value of "
+    "n]]></description>"
+    "<LookAt><longitude>-112.0822680013139</longitude></LookAt>"
+    "<LineString>"
+    "<tessellate>1</tessellate>"
+    "<coordinates> -122.084075,37.4220033612141,50 </coordinates>"
+    "</LineString>"
+    "</Placemark>"
+    "</Document>"
+    "</kml>";
+TEST(KmlParser, ParseValidComplexFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlFile(&locations, kValidComplexText, &error));
+
+  EXPECT_EQ("", error);
+  EXPECT_EQ(3U, locations.size());
+
+  EXPECT_EQ("Tessellated", locations[0].name);
+  EXPECT_EQ("Black line (10 pixels wide), height tracks terrain",
+            locations[0].description);
+  EXPECT_EQ("Transparent", locations[1].name);
+  EXPECT_EQ("", locations[1].description);
+  EXPECT_EQ("Fruity", locations[2].name);
+  EXPECT_EQ("If the <tessellate> tag has a value of n",
+            locations[2].description);
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_FLOAT_EQ(-122.084075, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.4220033612141, locations[i].latitude);
+    EXPECT_FLOAT_EQ(50, locations[i].elevation);
+  }
+}
+
+TEST(KmlParser, ParseValidComplexString) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlString(&locations, kValidComplexText, &error));
+
+  EXPECT_EQ("", error);
+  EXPECT_EQ(3U, locations.size());
+
+  EXPECT_EQ("Tessellated", locations[0].name);
+  EXPECT_EQ("Black line (10 pixels wide), height tracks terrain",
+            locations[0].description);
+  EXPECT_EQ("Transparent", locations[1].name);
+  EXPECT_EQ("", locations[1].description);
+  EXPECT_EQ("Fruity", locations[2].name);
+  EXPECT_EQ("If the <tessellate> tag has a value of n",
+            locations[2].description);
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_FLOAT_EQ(-122.084075, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.4220033612141, locations[i].latitude);
+    EXPECT_FLOAT_EQ(50, locations[i].elevation);
+  }
+}
+
+char kOneCoordinateText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseOneCoordinateFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlFile(&locations, kOneCoordinateText, &error));
+
+  EXPECT_EQ("", error);
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0.0, locations[0].elevation);
+}
+
+TEST(KmlParser, ParseOneCoordinateString) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlString(&locations, kOneCoordinateText, &error));
+
+  EXPECT_EQ("", error);
+  EXPECT_EQ(1U, locations.size());
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0.0, locations[0].elevation);
+}
+
+char kMultipleCoordinatesText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<LineString>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0 \
+                  10.4,39.,20\t\t0,21.4,1"
+    "</coordinates>"
+    "</LineString>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseMultipleCoordinatesFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlFile(&locations, kMultipleCoordinatesText, &error));
+
+  EXPECT_EQ("", error);
+  ASSERT_EQ(3U, locations.size());
+
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0, locations[0].elevation);
+  EXPECT_FLOAT_EQ(10.4, locations[1].longitude);
+  EXPECT_FLOAT_EQ(39., locations[1].latitude);
+  EXPECT_FLOAT_EQ(20, locations[1].elevation);
+  EXPECT_FLOAT_EQ(0, locations[2].longitude);
+  EXPECT_FLOAT_EQ(21.4, locations[2].latitude);
+  EXPECT_FLOAT_EQ(1, locations[2].elevation);
+}
+
+TEST(KmlParser, ParseMultipleCoordinatesString) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_TRUE(ParseKmlString(&locations, kMultipleCoordinatesText, &error));
+
+  EXPECT_EQ("", error);
+  ASSERT_EQ(3U, locations.size());
+
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0, locations[0].elevation);
+  EXPECT_FLOAT_EQ(10.4, locations[1].longitude);
+  EXPECT_FLOAT_EQ(39., locations[1].latitude);
+  EXPECT_FLOAT_EQ(20, locations[1].elevation);
+  EXPECT_FLOAT_EQ(0, locations[2].longitude);
+  EXPECT_FLOAT_EQ(21.4, locations[2].latitude);
+  EXPECT_FLOAT_EQ(1, locations[2].elevation);
+}
+
+char kBadCoordinatesText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<LineString>"
+    "<coordinates>-122.0822035425683, 37.42228990140251, 0 \
+                  10.4,39.20\t021.41"
+    "</coordinates>"
+    "</LineString>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseBadCoordinatesFile) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_FALSE(ParseKmlFile(&locations, kBadCoordinatesText, &error));
+}
+
+TEST(KmlParser, ParseBadCoordinatesString) {
+  GpsFixArray locations;
+  std::string error;
+  ASSERT_FALSE(ParseKmlString(&locations, kBadCoordinatesText, &error));
+}
+
+char kLocationNormalText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseLocationNormalFile) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlFile(&locations, kLocationNormalText, &error));
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_EQ("Simple placemark", locations[i].name);
+    EXPECT_EQ("Attached to the ground.", locations[i].description);
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+TEST(KmlParser, ParseLocationNormalString) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlString(&locations, kLocationNormalText, &error));
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_EQ("Simple placemark", locations[i].name);
+    EXPECT_EQ("Attached to the ground.", locations[i].description);
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+char kLocationMissingFieldsText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseLocationNormalMissingOptionalFieldsFile) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlFile(&locations, kLocationMissingFieldsText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(1U, locations.size());
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_EQ("", locations[i].name);
+    EXPECT_EQ("", locations[i].description);
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+TEST(KmlParser, ParseLocationNormalMissingOptionalFieldsString) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_TRUE(ParseKmlString(&locations, kLocationMissingFieldsText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(1U, locations.size());
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_EQ("", locations[i].name);
+    EXPECT_EQ("", locations[i].description);
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+char kLocationMissingRequiredFieldsText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseLocationMissingRequiredFieldsFile) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_FALSE(
+      ParseKmlFile(&locations, kLocationMissingRequiredFieldsText, &error));
+  EXPECT_EQ("Location found with missing or malformed coordinates", error);
+}
+
+TEST(KmlParser, ParseLocationMissingRequiredFieldsString) {
+  GpsFixArray locations;
+  std::string error;
+
+  ASSERT_FALSE(
+      ParseKmlString(&locations, kLocationMissingRequiredFieldsText, &error));
+  EXPECT_EQ("Location found with missing or malformed coordinates", error);
+}
+
+char kLocationNameOnlyFirstText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name>Simple placemark</name>kk0"
+    "<description>Attached to the ground.</description>"
+    "<LineString>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0\
+                  -122.0822035425683,37.42228990140251,0</coordinates>"
+    "</LineString>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseLocationNameOnlyFirstFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kLocationNameOnlyFirstText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(2U, locations.size());
+
+  EXPECT_EQ("Simple placemark", locations[0].name);
+  EXPECT_EQ("Attached to the ground.", locations[0].description);
+  EXPECT_EQ("", locations[1].name);
+  EXPECT_EQ("", locations[1].description);
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+TEST(KmlParser, ParseLocationNameOnlyFirstString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kLocationNameOnlyFirstText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(2U, locations.size());
+
+  EXPECT_EQ("Simple placemark", locations[0].name);
+  EXPECT_EQ("Attached to the ground.", locations[0].description);
+  EXPECT_EQ("", locations[1].name);
+  EXPECT_EQ("", locations[1].description);
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+char kMultipleLocationsText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0\
+                  -122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParseMultipleLocationsFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kMultipleLocationsText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(4U, locations.size());
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    if (i != 2) {
+      EXPECT_EQ("Simple placemark", locations[i].name);
+      EXPECT_EQ("Attached to the ground.", locations[i].description);
+    } else {
+      EXPECT_EQ("", locations[i].name);
+      EXPECT_EQ("", locations[i].description);
+    }
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+TEST(KmlParser, ParseMultipleLocationsString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kMultipleLocationsText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(4U, locations.size());
+
+  for (unsigned i = 0; i < locations.size(); ++i) {
+    if (i != 2) {
+      EXPECT_EQ("Simple placemark", locations[i].name);
+      EXPECT_EQ("Attached to the ground.", locations[i].description);
+    } else {
+      EXPECT_EQ("", locations[i].name);
+      EXPECT_EQ("", locations[i].description);
+    }
+    EXPECT_FLOAT_EQ(-122.0822035425683, locations[i].longitude);
+    EXPECT_FLOAT_EQ(37.42228990140251, locations[i].latitude);
+    EXPECT_FLOAT_EQ(0, locations[i].elevation);
+  }
+}
+
+char kTraverseEmptyDocText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\"></kml>";
+TEST(KmlParser, TraverseEmptyDocFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kTraverseEmptyDocText, &error));
+  EXPECT_EQ("", error);
+  EXPECT_EQ(0U, locations.size());
+}
+
+TEST(KmlParser, TraverseEmptyDocString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kTraverseEmptyDocText, &error));
+  EXPECT_EQ("", error);
+  EXPECT_EQ(0U, locations.size());
+}
+
+char kNoPlacemarksText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<LineString></LineString>"
+    "<name></name>"
+    "</kml>";
+TEST(KmlParser, TraverseDocNoPlacemarksFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kNoPlacemarksText, &error));
+  EXPECT_EQ("", error);
+  EXPECT_EQ(0U, locations.size());
+}
+
+TEST(KmlParser, TraverseDocNoPlacemarksString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kNoPlacemarksText, &error));
+  EXPECT_EQ("", error);
+  EXPECT_EQ(0U, locations.size());
+}
+
+char kNestedDocText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Document>"
+    "<Folder>"
+    "<name>Placemarks</name>"
+    "<description>These are just some of the different kinds of placemarks "
+    "with"
+    "which you can mark your favorite places</description>"
+    "<LookAt>"
+    "<longitude>-122.0839597145766</longitude>"
+    "<latitude>37.42222904525232</latitude>"
+    "<altitude>0</altitude>"
+    "<heading>-148.4122922628044</heading>"
+    "<tilt>40.5575073395506</tilt>"
+    "<range>500.6566641072245</range>"
+    "</LookAt>"
+    "<Placemark>"
+    "<name>Simple placemark</name>"
+    "<description>Attached to the ground.</description>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</Folder>"
+    "</Document>"
+    "</kml>";
+TEST(KmlParser, TraverseNestedDocFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kNestedDocText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(1U, locations.size());
+
+  EXPECT_EQ("Simple placemark", locations[0].name);
+  EXPECT_EQ("Attached to the ground.", locations[0].description);
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0, locations[0].elevation);
+}
+
+TEST(KmlParser, TraverseNestedDocString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kNestedDocText, &error));
+  EXPECT_EQ("", error);
+  ASSERT_EQ(1U, locations.size());
+
+  EXPECT_EQ("Simple placemark", locations[0].name);
+  EXPECT_EQ("Attached to the ground.", locations[0].description);
+  EXPECT_FLOAT_EQ(-122.0822035425683, locations[0].longitude);
+  EXPECT_FLOAT_EQ(37.42228990140251, locations[0].latitude);
+  EXPECT_FLOAT_EQ(0, locations[0].elevation);
+}
+
+char kNullNameNoCrashText[] =
+    "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+    "<kml xmlns=\"http://earth.google.com/kml/2.x\">"
+    "<Placemark>"
+    "<name/>"
+    "<description/>"
+    "<Point>"
+    "<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>"
+    "</Point>"
+    "</Placemark>"
+    "</kml>";
+TEST(KmlParser, ParsePlacemarkNullNameNoCrashFile) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlFile(&locations, kNullNameNoCrashText, &error));
+  ASSERT_EQ(1U, locations.size());
+  EXPECT_STREQ("", locations.front().name.c_str());
+  EXPECT_STREQ("", locations.front().description.c_str());
+}
+
+TEST(KmlParser, ParsePlacemarkNullNameNoCrashString) {
+  GpsFixArray locations;
+  std::string error;
+  EXPECT_TRUE(ParseKmlString(&locations, kNullNameNoCrashText, &error));
+  ASSERT_EQ(1U, locations.size());
+  EXPECT_STREQ("", locations.front().name.c_str());
+  EXPECT_STREQ("", locations.front().description.c_str());
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/commands/cvd_import_locations/unittest/main_test.cc b/host/commands/cvd_import_locations/unittest/main_test.cc
new file mode 100644
index 0000000..77cc9bc
--- /dev/null
+++ b/host/commands/cvd_import_locations/unittest/main_test.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/Android.bp b/host/commands/cvd_load_tester/Android.bp
new file mode 100644
index 0000000..d8ed6da
--- /dev/null
+++ b/host/commands/cvd_load_tester/Android.bp
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_binary {
+    name: "test_cvd_load_parser",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libgrpc++_unsecure",
+        "libxml2",
+    ],
+    static_libs: [
+        "libprotobuf-cpp-full",
+        "libcuttlefish_launch_cvd_proto",
+        "libcvd_parser",
+        "libcuttlefish_host_config",
+        "libgflags",
+    ],
+    srcs: [
+        "main.cc",
+    ],
+    defaults: ["cvd_load_defaults"],
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/defaults.json b/host/commands/cvd_load_tester/defaults.json
new file mode 100644
index 0000000..5a77af1
--- /dev/null
+++ b/host/commands/cvd_load_tester/defaults.json
@@ -0,0 +1,43 @@
+{
+        "instances" :
+        [
+            {
+                "vm": {
+                    "memory_mb": 2048
+                }
+            },
+            {
+                "vm": {
+                    "memory_mb": 4096
+                },
+                "graphics":{
+                    "displays":[
+                        {
+                            "width": 1080,
+                            "height": 600,
+                            "dpi": 120
+                        },
+                        {
+                            "width": 400,
+                            "height": 600,
+                            "dpi": 150
+                        }
+                    ]
+                    }
+            },
+            {
+                "vm": {
+                    "memory_mb": 4096
+                },
+                "graphics":{
+                    "displays":[
+                        {
+                            "width": 2560,
+                            "height": 1800,
+                            "dpi": 320
+                        }
+                    ]
+                    }
+            }
+        ]
+    }
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/end_to_end_test/multi/TC1.json b/host/commands/cvd_load_tester/end_to_end_test/multi/TC1.json
new file mode 100644
index 0000000..a0cb406
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/multi/TC1.json
@@ -0,0 +1,43 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            }
+        },
+        {
+            "vm": {
+                "memory_mb": 4096
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 1080,
+                        "height": 600,
+                        "dpi": 120
+                    },
+                    {
+                        "width": 400,
+                        "height": 600,
+                        "dpi": 150
+                    }
+                ]
+                }
+        },
+        {
+            "vm": {
+                "memory_mb": 4096
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 2560,
+                        "height": 1800,
+                        "dpi": 320
+                    }
+                ]
+                }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_auto.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_auto.json
new file mode 100644
index 0000000..8f787fb
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_auto.json
@@ -0,0 +1,24 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 4096
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 1080,
+                        "height": 600,
+                        "dpi": 120
+                    },
+                    {
+                        "width": 400,
+                        "height": 600,
+                        "dpi": 120
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_foldable.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_foldable.json
new file mode 100644
index 0000000..5419c22
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_foldable.json
@@ -0,0 +1,65 @@
+{
+        "instances" :
+        [
+            {
+                "vm": {
+                        "memory_mb": 4096,
+                        "custom_actions" : [
+                                {
+                                        "device_states": [
+                                                {
+                                                        "lid_switch_open": false,
+                                                        "hinge_angle_value": 0
+                                                }
+                                        ],
+                                        "button":{
+                                                "command":"device_state_closed",
+                                                "title":"Device State Closed",
+                                                "icon_name":"smartphone"
+                                        }
+                                },
+                                {
+                                        "device_states": [
+                                                {
+                                                        "lid_switch_open": true,
+                                                        "hinge_angle_value": 90
+                                                }
+                                        ],
+                                        "button":{
+                                                "command":"device_state_half_opened",
+                                                "title":"Device State Half-Opened",
+                                                "icon_name":"laptop"
+                                        }
+                                },
+                                {
+                                        "device_states": [
+                                                {
+                                                        "lid_switch_open": true,
+                                                        "hinge_angle_value": 180
+                                                }
+                                        ],
+                                        "button":{
+                                                "command":"device_state_opened",
+                                                "title":"Device State Opened",
+                                                "icon_name":"tablet"
+                                        }
+                                }
+                        ]
+                },
+                "graphics":{
+                        "displays":[
+                            {
+                                "width": 1768,
+                                "height": 2208,
+                                "dpi": 374
+                            },
+                            {
+                                "width": 832,
+                                "height": 2268,
+                                "dpi": 387
+                            }
+                        ]
+                }
+            }
+        ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_go.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_go.json
new file mode 100644
index 0000000..ef24d44
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_go.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 720,
+                        "height": 1280,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_phone.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_phone.json
new file mode 100644
index 0000000..ef24d44
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_phone.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 720,
+                        "height": 1280,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_slim.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_slim.json
new file mode 100644
index 0000000..19c4760
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_slim.json
@@ -0,0 +1,20 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048,
+				"use_sdcard" : false
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 720,
+                        "height": 1280,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_tablet.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_tablet.json
new file mode 100644
index 0000000..fe6217e
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_tablet.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 4096
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 2560,
+                        "height": 1800,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_tv.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_tv.json
new file mode 100644
index 0000000..7f288f4
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_tv.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 2048
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 1920,
+                        "height": 1080,
+                        "dpi": 213
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/end_to_end_test/single/TC_wear.json b/host/commands/cvd_load_tester/end_to_end_test/single/TC_wear.json
new file mode 100644
index 0000000..311c01f
--- /dev/null
+++ b/host/commands/cvd_load_tester/end_to_end_test/single/TC_wear.json
@@ -0,0 +1,20 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 1536,
+				"use_sdcard" : false
+            },
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 450,
+                        "height": 450,
+                        "dpi": 320
+                    }
+                ]
+			}
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/main.cc b/host/commands/cvd_load_tester/main.cc
new file mode 100644
index 0000000..0a9fff2
--- /dev/null
+++ b/host/commands/cvd_load_tester/main.cc
@@ -0,0 +1,66 @@
+
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fstream>
+#include <iostream>
+#include <string>
+
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+
+#include "host/commands/cvd/parser/load_configs_parser.h"
+
+DEFINE_string(config_file_path, "", "config file path for default configs");
+
+namespace cuttlefish {
+int CvdLoadParserMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  auto json_configs = cuttlefish::ParseJsonFile(FLAGS_config_file_path);
+  if (!json_configs.ok()) {
+    LOG(INFO) << "parsing input file failed";
+    return 1;
+  }
+
+  auto cvd_flags = cuttlefish::ParseCvdConfigs(*json_configs);
+  if (!cvd_flags.ok()) {
+    LOG(INFO) << "parsing json configs failed";
+    return 1;
+  }
+  LOG(INFO) << "Parsing succeeded";
+  for (auto& parsed_launch_flag : cvd_flags->launch_cvd_flags) {
+    LOG(INFO) << parsed_launch_flag;
+  }
+
+  LOG(INFO) << "credential = " << cvd_flags->fetch_cvd_flags.credential;
+
+  int i = 0;
+  for (auto& parsed_fetch_instance_flag :
+       cvd_flags->fetch_cvd_flags.instances) {
+    LOG(INFO) << i << " -- " << parsed_fetch_instance_flag.default_build << ","
+              << parsed_fetch_instance_flag.system_build << ","
+              << parsed_fetch_instance_flag.kernel_build << ","
+              << parsed_fetch_instance_flag.use_fetch_artifact;
+    i++;
+  }
+
+  return 0;
+}
+}  // namespace cuttlefish
+int main(int argc, char** argv) {
+  return cuttlefish::CvdLoadParserMain(argc, argv);
+}
diff --git a/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC1.json b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC1.json
new file mode 100644
index 0000000..3718bb7
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC1.json
@@ -0,0 +1,13 @@
+{
+    "instances" :
+    [
+        {
+            "graphics": {
+            }
+        },
+        {
+            "graphics": {
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC2.json b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC2.json
new file mode 100644
index 0000000..c147f4a
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC2.json
@@ -0,0 +1,23 @@
+{
+    "instances" :
+    [
+        {
+            "graphics":{
+                "displays":[
+                    {
+                    }
+                ]
+                }
+        },
+        {
+            "graphics":{
+                "displays":[
+                    {
+                    },
+                    {
+                    }
+                ]
+                }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC3.json b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC3.json
new file mode 100644
index 0000000..4e9195e
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/graphics/displays/TC3.json
@@ -0,0 +1,35 @@
+{
+    "instances" :
+    [
+        {
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 1080,
+                        "height": 600,
+                        "dpi": 120,
+                        "refresh_rate_hertz": 60
+                    },
+                    {
+                        "width": 400,
+                        "height": 600,
+                        "dpi": 120,
+                        "refresh_rate_hertz": 60
+                    }
+                ]
+                }
+        },
+        {
+            "graphics":{
+                "displays":[
+                    {
+                        "width": 2560,
+                        "height": 1800,
+                        "dpi": 320,
+                        "refresh_rate_hertz": 60
+                    }
+                ]
+                }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC1.json b/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC1.json
new file mode 100644
index 0000000..da19f0f
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC1.json
@@ -0,0 +1,17 @@
+
+{
+    "instances" :
+    [
+        {
+            "security": {
+            }
+        },
+        {
+            "vm": {
+            },
+            "security": {
+                "guest_enforce_security": false
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC2.json b/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC2.json
new file mode 100644
index 0000000..a237082
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/security/guest_enforce_security/TC2.json
@@ -0,0 +1,15 @@
+{
+    "instances" :
+    [
+        {
+            "security": {
+                "guest_enforce_security": false
+            }
+        },
+        {
+            "security": {
+                "guest_enforce_security": false
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC1.json
new file mode 100644
index 0000000..7c8cf89
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC1.json
@@ -0,0 +1,15 @@
+
+{
+    "instances" :
+    [
+        {
+            "vm": {
+            }
+        },
+        {
+            "vm": {
+                "cpus": 4
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC2.json b/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC2.json
new file mode 100644
index 0000000..a975433
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/cpus/TC2.json
@@ -0,0 +1,15 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "cpus": 4
+            }
+        },
+        {
+            "vm": {
+                "cpus": 6
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/custom_actions/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/custom_actions/TC1.json
new file mode 100644
index 0000000..239e0d4
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/custom_actions/TC1.json
@@ -0,0 +1,40 @@
+{
+        "instances" :
+        [
+            {
+                "vm": {
+                    "memory_mb": 2048
+                }
+            },
+            {
+                "vm": {
+                    "memory_mb": 2048,
+                    "custom_actions" : [
+                            {
+                                    "shell_command":"am start -a android.intent.action.VIEW -d https://www.android1.com/",
+                                    "button":{
+                                            "command":"web",
+                                            "title":"Web Page",
+                                            "icon_name":"language"
+                                    }
+                            },
+                            {
+                                    "server":"cuttlefish_example_action_server",
+                                    "buttons":[
+                                            {
+                                                    "command":"settings",
+                                                    "title":"Quick Settings",
+                                                    "icon_name":"settings"
+                                            },
+                                            {
+                                                    "command":"alert",
+                                                    "title":"Do Not Disturb",
+                                                    "icon_name":"notifications_paused"
+                                            }
+                                    ]
+                            }
+                    ]
+                }
+            }
+        ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC1.json
new file mode 100644
index 0000000..090e179
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC1.json
@@ -0,0 +1,15 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC2.json b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC2.json
new file mode 100644
index 0000000..d88c7b9
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC2.json
@@ -0,0 +1,19 @@
+
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC3.json b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC3.json
new file mode 100644
index 0000000..03055b9
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/enable_sandbox/TC3.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                    "enable_sandbox": true
+                }
+            }
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC1.json
new file mode 100644
index 0000000..a4156a7
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC1.json
@@ -0,0 +1,15 @@
+
+{
+    "instances" :
+    [
+        {
+            "vm": {
+            }
+        },
+        {
+            "vm": {
+                "memory_mb": 4096
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC2.json b/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC2.json
new file mode 100644
index 0000000..c2a0730
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/memory_mb/TC2.json
@@ -0,0 +1,15 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "memory_mb": 4096
+            }
+        },
+        {
+            "vm": {
+                "memory_mb": 8192
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC1.json
new file mode 100644
index 0000000..aba722d
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC1.json
@@ -0,0 +1,13 @@
+
+  {
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "ENABLED"
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC2.json b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC2.json
new file mode 100644
index 0000000..eb8815d
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC2.json
@@ -0,0 +1,18 @@
+
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC3.json b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC3.json
new file mode 100644
index 0000000..4b19f60
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC3.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "REQUIRED"
+            }
+        }
+    ]
+}
+
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC4.json b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC4.json
new file mode 100644
index 0000000..d052087
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/setupwizard_mode/TC4.json
@@ -0,0 +1,19 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "OPTIONAL"
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                },
+                "setupwizard_mode": "REQUIRED"
+            }
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC1.json b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC1.json
new file mode 100644
index 0000000..01e4963
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC1.json
@@ -0,0 +1,11 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+            }
+        },
+        {
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC2.json b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC2.json
new file mode 100644
index 0000000..9b25147
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC2.json
@@ -0,0 +1,17 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "qemu":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC3.json b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC3.json
new file mode 100644
index 0000000..fe2d4ec
--- /dev/null
+++ b/host/commands/cvd_load_tester/mvp_features/vm/vm_manager/TC3.json
@@ -0,0 +1,17 @@
+{
+    "instances" :
+    [
+        {
+            "vm": {
+                "crosvm":{
+                }
+            }
+        },
+        {
+            "vm": {
+                "gem5":{
+                }
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/host/commands/cvd_load_tester/templates_inheritance/custom/TC1.json b/host/commands/cvd_load_tester/templates_inheritance/custom/TC1.json
new file mode 100644
index 0000000..60fae2c
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/custom/TC1.json
@@ -0,0 +1,11 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "phone",
+            "vm": {
+                "memory_mb": 4096
+            }
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/custom/TC2.json b/host/commands/cvd_load_tester/templates_inheritance/custom/TC2.json
new file mode 100644
index 0000000..280d7f3
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/custom/TC2.json
@@ -0,0 +1,18 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "phone",
+            "vm": {
+                "memory_mb": 4096
+            }
+        },
+        {
+            "@import" : "tablet",
+            "vm": {
+                "memory_mb": 8192,
+                "cpus": 4
+            }
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/multi/TC1.json b/host/commands/cvd_load_tester/templates_inheritance/multi/TC1.json
new file mode 100644
index 0000000..7c4b158
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/multi/TC1.json
@@ -0,0 +1,26 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "auto"
+        },
+        {
+            "@import" : "go"
+        },
+        {
+            "@import" : "phone"
+        },
+        {
+            "@import" : "slim"
+        },
+                {
+            "@import" : "tablet"
+        },
+                {
+            "@import" : "tv"
+        },
+        {
+            "@import" : "wearable"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/multi/TC2.json b/host/commands/cvd_load_tester/templates_inheritance/multi/TC2.json
new file mode 100644
index 0000000..bb4c7fb
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/multi/TC2.json
@@ -0,0 +1,29 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "auto"
+        },
+                {
+            "@import" : "foldable"
+        },
+        {
+            "@import" : "go"
+        },
+        {
+            "@import" : "phone"
+        },
+        {
+            "@import" : "slim"
+        },
+                {
+            "@import" : "tablet"
+        },
+                {
+            "@import" : "tv"
+        },
+        {
+            "@import" : "wearable"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_auto.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_auto.json
new file mode 100644
index 0000000..ded6c15
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_auto.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "auto"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_foldable.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_foldable.json
new file mode 100644
index 0000000..1df8e1a
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_foldable.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "foldable"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_go.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_go.json
new file mode 100644
index 0000000..b554eaa
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_go.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "go"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_phone.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_phone.json
new file mode 100644
index 0000000..13c5f4b
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_phone.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "phone"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_slim.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_slim.json
new file mode 100644
index 0000000..a0ab45f
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_slim.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "slim"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_tablet.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_tablet.json
new file mode 100644
index 0000000..f62fcee
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_tablet.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "tablet"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_tv.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_tv.json
new file mode 100644
index 0000000..bfbae34
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_tv.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "tv"
+        }
+    ]
+}
diff --git a/host/commands/cvd_load_tester/templates_inheritance/single/TC_wear.json b/host/commands/cvd_load_tester/templates_inheritance/single/TC_wear.json
new file mode 100644
index 0000000..06c85ab
--- /dev/null
+++ b/host/commands/cvd_load_tester/templates_inheritance/single/TC_wear.json
@@ -0,0 +1,8 @@
+{
+    "instances" :
+    [
+        {
+            "@import" : "wearable"
+        }
+    ]
+}
diff --git a/host/commands/cvd_send_sms/main.cc b/host/commands/cvd_send_sms/main.cc
index 058b355..ef6d932 100644
--- a/host/commands/cvd_send_sms/main.cc
+++ b/host/commands/cvd_send_sms/main.cc
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "gflags/gflags.h"
 
 #include "android-base/logging.h"
diff --git a/host/commands/cvd_send_sms/pdu_format_builder.cc b/host/commands/cvd_send_sms/pdu_format_builder.cc
index 943819d..81c1e46 100644
--- a/host/commands/cvd_send_sms/pdu_format_builder.cc
+++ b/host/commands/cvd_send_sms/pdu_format_builder.cc
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "host/commands/cvd_send_sms/pdu_format_builder.h"
 
 #include <algorithm>
diff --git a/host/commands/cvd_send_sms/sms_sender.cc b/host/commands/cvd_send_sms/sms_sender.cc
index 5c8b8db..8021244 100644
--- a/host/commands/cvd_send_sms/sms_sender.cc
+++ b/host/commands/cvd_send_sms/sms_sender.cc
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "host/commands/cvd_send_sms/sms_sender.h"
 
 #include <algorithm>
diff --git a/host/commands/cvd_update_location/Android.bp b/host/commands/cvd_update_location/Android.bp
new file mode 100644
index 0000000..13c9d2e
--- /dev/null
+++ b/host/commands/cvd_update_location/Android.bp
@@ -0,0 +1,59 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_defaults {
+    name: "cvd_update_location_defaults",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "liblog",
+        "libicuuc",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+cc_binary {
+    name: "cvd_update_location",
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+        "libgflags",
+        "libcvd_gnss_grpc_proxy",
+        "liblocation",
+    ],
+    srcs: [
+        "main.cc",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+    ],
+    defaults: ["cvd_update_location_defaults"],
+}
\ No newline at end of file
diff --git a/host/commands/cvd_update_location/main.cc b/host/commands/cvd_update_location/main.cc
new file mode 100644
index 0000000..66e00c3
--- /dev/null
+++ b/host/commands/cvd_update_location/main.cc
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/location/GnssClient.h"
+
+DEFINE_int32(instance_num, 1, "Which instance to read the configs from");
+DEFINE_double(latitude, 37.8000064, "location latitude");
+DEFINE_double(longitude, -122.3989209, "location longitude");
+DEFINE_double(elevation, 2.5, "location elevation/altitude");
+
+namespace cuttlefish {
+namespace {
+
+int UpdateLocationCvdMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  auto config = CuttlefishConfig::Get();
+  if (!config) {
+    LOG(ERROR) << "Failed to obtain config object";
+    return 1;
+  }
+
+  auto instance = config->ForInstance(FLAGS_instance_num);
+  auto server_port = instance.gnss_grpc_proxy_server_port();
+  std::string socket_name =
+      std::string("localhost:") + std::to_string(server_port);
+  LOG(INFO) << "Server port: " << server_port << " socket: " << socket_name
+            << std::endl;
+
+  GnssClient gpsclient(
+      grpc::CreateChannel(socket_name, grpc::InsecureChannelCredentials()));
+
+  GpsFixArray coordinates;
+  GpsFix location;
+  location.longitude=FLAGS_longitude;
+  location.latitude=FLAGS_latitude;
+  location.elevation=FLAGS_elevation;
+  coordinates.push_back(location);
+  auto status = gpsclient.SendGpsLocations(1000,coordinates);
+  CHECK(status.ok()) << "Failed to send gps location data \n";
+  if (!status.ok()) {
+    return 1;
+  }
+  return 0;
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) {
+  return cuttlefish::UpdateLocationCvdMain(argc, argv);
+}
diff --git a/host/commands/display/Android.bp b/host/commands/display/Android.bp
new file mode 100644
index 0000000..4d3093d
--- /dev/null
+++ b/host/commands/display/Android.bp
@@ -0,0 +1,37 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_binary_host {
+    name: "cvd_internal_display",
+    srcs: [
+        "main.cpp",
+    ],
+    stl: "libc++_static",
+    static_libs: [
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libcuttlefish_display_flags",
+        "libcuttlefish_host_config",
+        "libgflags",
+        "libjsoncpp",
+        "liblog",
+    ],
+    defaults: ["cuttlefish_host"],
+}
\ No newline at end of file
diff --git a/host/commands/display/main.cpp b/host/commands/display/main.cpp
new file mode 100644
index 0000000..375c31d
--- /dev/null
+++ b/host/commands/display/main.cpp
@@ -0,0 +1,304 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <ostream>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <gflags/gflags.h>
+
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/assemble_cvd/display_flags.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+DEFINE_uint32(instance_num, 1, "Which instance to read the configs from");
+DEFINE_uint32(width, 0,
+              "When adding a display, the width of the display in pixels");
+DEFINE_uint32(height, 0,
+              "When adding a display, the height of the display in pixels");
+DEFINE_uint32(dpi, 0,
+              "When adding a display, the pixels per inch of the display");
+DEFINE_uint32(refresh_rate_hz, 0,
+              "When adding a display, the refresh rate of the display in "
+              "Hertz");
+
+DEFINE_string(display0, "", cuttlefish::kDisplayHelp);
+DEFINE_string(display1, "", cuttlefish::kDisplayHelp);
+DEFINE_string(display2, "", cuttlefish::kDisplayHelp);
+DEFINE_string(display3, "", cuttlefish::kDisplayHelp);
+
+namespace cuttlefish {
+namespace {
+
+static const std::string kUsage =
+    R"(Cuttlefish Virtual Device (CVD) Display CLI.
+
+usage: cvd display <command> <args>
+
+Commands:
+    help                Print this message.
+    help <command>      Print help for a command.
+    add                 Adds a new display to a given device.
+    list                Prints the currently connected displays.
+    remove              Removes a display from a given device.
+)";
+
+static const std::string kAddUsage =
+    R"(Cuttlefish Virtual Device (CVD) Display CLI.
+
+Adds and connects a display to the given virtual device.
+
+usage: cvd display add --width=720 --height=1280
+
+       cvd display add \\
+        --display0=width=1280,height=800
+        --display1=width=1920,height=1080,refresh_rate_hz=60
+)";
+
+static const std::string kListUsage =
+    R"(Cuttlefish Virtual Device (CVD) Display CLI.
+
+Lists all of the displays currently connected to a given virtual device.
+
+usage: cvd display list
+)";
+
+static const std::string kRemoveUsage =
+    R"(Cuttlefish Virtual Device (CVD) Display CLI.
+
+Disconnects and removes a display from the given virtual device.
+
+usage: cvd display remove <display index>
+       cvd display remove <display index> <display index> ...
+)";
+
+static const std::unordered_map<std::string, std::string> kSubCommandUsages = {
+    {"add", kAddUsage},
+    {"list", kListUsage},
+    {"help", kUsage},
+    {"remove", kRemoveUsage},
+};
+
+Result<int> RunCrosvmDisplayCommand(const std::vector<std::string>& args) {
+  auto config = cuttlefish::CuttlefishConfig::Get();
+  if (!config) {
+    return CF_ERR("Failed to get Cuttlefish config.");
+  }
+  // TODO(b/260649774): Consistent executable API for selecting an instance
+  auto instance = config->ForInstance(FLAGS_instance_num);
+
+  const std::string crosvm_binary_path = instance.crosvm_binary();
+  const std::string crosvm_control_path =
+      instance.PerInstanceInternalUdsPath("crosvm_control.sock");
+
+  cuttlefish::Command command(crosvm_binary_path);
+  command.AddParameter("gpu");
+  for (const std::string& arg : args) {
+    command.AddParameter(arg);
+  }
+  command.AddParameter(crosvm_control_path);
+
+  std::string out;
+  std::string err;
+  auto ret = RunWithManagedStdio(std::move(command), NULL, &out, &err);
+  if (ret != 0) {
+    std::cerr << "Failed to run crosvm display command: ret code: " << ret
+              << "\n"
+              << out << "\n"
+              << err;
+    return ret;
+  }
+
+  std::cerr << err << std::endl;
+  std::cout << out << std::endl;
+  return 0;
+}
+
+Result<int> DoHelp(const std::vector<std::string>& args) {
+  if (args.empty()) {
+    std::cout << kUsage << std::endl;
+    return 0;
+  }
+
+  const std::string& subcommand_str = args[0];
+  auto subcommand_usage = kSubCommandUsages.find(subcommand_str);
+  if (subcommand_usage == kSubCommandUsages.end()) {
+    std::cerr << "Unknown subcommand '" << subcommand_str
+              << "'. See `cvd display help`" << std::endl;
+    return 1;
+  }
+
+  std::cout << subcommand_usage->second << std::endl;
+  return 0;
+}
+
+Result<std::optional<CuttlefishConfig::DisplayConfig>>
+ParseLegacyDisplayFlags() {
+  if (FLAGS_width == 0 && FLAGS_height == 0 && FLAGS_dpi == 0 &&
+      FLAGS_refresh_rate_hz == 0) {
+    return std::nullopt;
+  }
+
+  CF_EXPECT_GT(FLAGS_width, 0,
+               "Must specify valid --width flag. Usage:\n"
+                   << kAddUsage);
+  CF_EXPECT_GT(FLAGS_height, 0,
+               "Must specify valid --height flag. Usage:\n"
+                   << kAddUsage);
+  CF_EXPECT_GT(FLAGS_dpi, 0,
+               "Must specify valid --dpi flag. Usage:\n"
+                   << kAddUsage);
+  CF_EXPECT_GT(FLAGS_refresh_rate_hz, 0,
+               "Must specify valid --refresh_rate_hz flag. Usage:\n"
+                   << kAddUsage);
+
+  const int display_width = FLAGS_width;
+  const int display_height = FLAGS_height;
+  const int display_dpi = FLAGS_dpi > 0 ? FLAGS_dpi : CF_DEFAULTS_DISPLAY_DPI;
+  const int display_rr = FLAGS_refresh_rate_hz > 0
+                             ? FLAGS_refresh_rate_hz
+                             : CF_DEFAULTS_DISPLAY_REFRESH_RATE;
+
+  return CuttlefishConfig::DisplayConfig{
+      .width = display_width,
+      .height = display_height,
+      .dpi = display_dpi,
+      .refresh_rate_hz = display_rr,
+  };
+}
+
+Result<int> DoAdd(const std::vector<std::string>&) {
+  std::vector<CuttlefishConfig::DisplayConfig> display_configs;
+
+  auto display = CF_EXPECT(ParseLegacyDisplayFlags());
+  if (display) {
+    display_configs.push_back(*display);
+  }
+  auto display0 = CF_EXPECT(ParseDisplayConfig(FLAGS_display0));
+  if (display0) {
+    display_configs.push_back(*display0);
+  }
+  auto display1 = CF_EXPECT(ParseDisplayConfig(FLAGS_display1));
+  if (display1) {
+    display_configs.push_back(*display1);
+  }
+  auto display2 = CF_EXPECT(ParseDisplayConfig(FLAGS_display2));
+  if (display2) {
+    display_configs.push_back(*display2);
+  }
+  auto display3 = CF_EXPECT(ParseDisplayConfig(FLAGS_display3));
+  if (display3) {
+    display_configs.push_back(*display3);
+  }
+
+  if (display_configs.empty()) {
+    return CF_ERR("No displays params provided. Usage:\n" << kAddUsage);
+  }
+
+  std::vector<std::string> add_displays_command_args;
+  add_displays_command_args.push_back("add-displays");
+
+  for (const auto& display_config : display_configs) {
+    const std::string w = std::to_string(display_config.width);
+    const std::string h = std::to_string(display_config.height);
+    const std::string dpi = std::to_string(display_config.dpi);
+    const std::string rr = std::to_string(display_config.refresh_rate_hz);
+
+    const std::string add_display_flag =
+        "--gpu-display=" + android::base::Join(
+                               std::vector<std::string>{
+                                   "mode=windowed[" + w + "," + h + "]",
+                                   "dpi=[" + dpi + "," + dpi + "]",
+                                   "refresh-rate=" + rr,
+                               },
+                               ",");
+
+    add_displays_command_args.push_back(add_display_flag);
+  }
+
+  return CF_EXPECT(RunCrosvmDisplayCommand(add_displays_command_args));
+}
+
+Result<int> DoList(const std::vector<std::string>&) {
+  return CF_EXPECT(RunCrosvmDisplayCommand({"list-displays"}));
+}
+
+Result<int> DoRemove(const std::vector<std::string>& args) {
+  if (args.empty()) {
+    std::cerr << "Must specify the display id to remove. Usage:" << std::endl;
+    std::cerr << kRemoveUsage << std::endl;
+    return 1;
+  }
+
+  std::vector<std::string> remove_displays_command_args;
+  remove_displays_command_args.push_back("remove-displays");
+  for (const auto& arg : args) {
+    remove_displays_command_args.push_back("--display-id=" + arg);
+  }
+
+  return CF_EXPECT(RunCrosvmDisplayCommand(remove_displays_command_args));
+}
+
+using DisplaySubCommand = Result<int> (*)(const std::vector<std::string>&);
+
+int DisplayMain(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  ::gflags::SetUsageMessage(kUsage);
+  ::gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  std::vector<std::string> args;
+  for (int i = 1; i < argc; i++) {
+    args.push_back(argv[i]);
+  }
+
+  if (args.empty()) {
+    args.push_back("help");
+  }
+
+  const std::unordered_map<std::string, DisplaySubCommand> kSubCommands = {
+      {"add", DoAdd},
+      {"list", DoList},
+      {"help", DoHelp},
+      {"remove", DoRemove},
+  };
+
+  const auto command_str = args[0];
+  args.erase(args.begin());
+
+  auto command_func_it = kSubCommands.find(command_str);
+  if (command_func_it == kSubCommands.end()) {
+    std::cerr << "Unknown display command: '" << command_str << "'."
+              << std::endl;
+    return 1;
+  }
+
+  auto result = command_func_it->second(args);
+  if (!result.ok()) {
+    std::cerr << result.error().Message();
+    return 1;
+  }
+  return result.value();
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) { return cuttlefish::DisplayMain(argc, argv); }
diff --git a/host/commands/echo_server/Android.bp b/host/commands/echo_server/Android.bp
new file mode 100644
index 0000000..a69ba62
--- /dev/null
+++ b/host/commands/echo_server/Android.bp
@@ -0,0 +1,105 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libecho_server",
+    shared_libs: [
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    static_libs: [
+        "libgflags",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+    ],
+    generated_headers: [
+        "EchoServerProto_h",
+    ],
+    generated_sources: [
+        "EchoServerProto_cc",
+    ],
+    export_generated_headers: [
+        "EchoServerProto_h",
+    ],
+    defaults: ["cuttlefish_host"],
+    include_dirs: [
+        "external/grpc-grpc/include",
+        "external/protobuf/src",
+    ],
+}
+
+cc_binary_host {
+    name: "echo_server",
+    shared_libs: [
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+        "libgflags",
+        "libecho_server",
+        "libgrpc++_reflection",
+    ],
+    srcs: [
+        "main.cpp",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+    ],
+    defaults: ["cuttlefish_host"],
+}
+
+filegroup {
+    name: "EchoServerProto",
+    srcs: [
+        "echo.proto",
+    ],
+}
+
+genrule {
+    name: "EchoServerProto_h",
+    tools: [
+        "aprotoc",
+        "protoc-gen-grpc-cpp-plugin",
+    ],
+    cmd: "$(location aprotoc) -Idevice/google/cuttlefish/host/commands/echo_server -Iexternal/protobuf/src --plugin=protoc-gen-grpc=$(location protoc-gen-grpc-cpp-plugin) $(in) --grpc_out=$(genDir) --cpp_out=$(genDir)",
+    srcs: [
+        ":EchoServerProto",
+    ],
+    out: [
+        "echo.grpc.pb.h",
+        "echo.pb.h",
+    ],
+}
+
+genrule {
+    name: "EchoServerProto_cc",
+    tools: [
+        "aprotoc",
+        "protoc-gen-grpc-cpp-plugin",
+    ],
+    cmd: "$(location aprotoc) -Idevice/google/cuttlefish/host/commands/echo_server -Iexternal/protobuf/src --plugin=protoc-gen-grpc=$(location protoc-gen-grpc-cpp-plugin) $(in) --grpc_out=$(genDir) --cpp_out=$(genDir)",
+    srcs: [
+        ":EchoServerProto",
+    ],
+    out: [
+        "echo.grpc.pb.cc",
+        "echo.pb.cc",
+    ],
+}
diff --git a/host/commands/echo_server/echo.proto b/host/commands/echo_server/echo.proto
new file mode 100644
index 0000000..f3bfa25
--- /dev/null
+++ b/host/commands/echo_server/echo.proto
@@ -0,0 +1,29 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package echoserver;
+
+service EchoService {
+  rpc Echo (EchoRequest) returns (EchoReply) {}
+}
+
+message EchoRequest {
+  string message = 1;
+}
+
+message EchoReply {
+  string message = 1;
+}
diff --git a/host/commands/echo_server/main.cpp b/host/commands/echo_server/main.cpp
new file mode 100644
index 0000000..4d07723
--- /dev/null
+++ b/host/commands/echo_server/main.cpp
@@ -0,0 +1,75 @@
+/*
+ *
+ * Copyright 2015 gRPC authors.
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <iostream>
+#include <memory>
+#include <string>
+
+#include <gflags/gflags.h>
+#include <grpcpp/ext/proto_server_reflection_plugin.h>
+#include <grpcpp/grpcpp.h>
+#include <grpcpp/health_check_service_interface.h>
+
+#include "echo.grpc.pb.h"
+
+using echoserver::EchoReply;
+using echoserver::EchoRequest;
+using echoserver::EchoService;
+using grpc::Server;
+using grpc::ServerBuilder;
+using grpc::ServerContext;
+using grpc::Status;
+
+DEFINE_string(grpc_uds_path, "", "grpc_uds_path");
+
+class EchoServiceImpl final : public EchoService::Service {
+  Status Echo(ServerContext* context, const EchoRequest* request,
+              EchoReply* reply) override {
+    reply->set_message(request->message());
+    return Status::OK;
+  }
+};
+
+void RunServer() {
+  std::string server_address("unix:" + FLAGS_grpc_uds_path);
+  EchoServiceImpl service;
+
+  grpc::EnableDefaultHealthCheckService(true);
+  grpc::reflection::InitProtoReflectionServerBuilderPlugin();
+  ServerBuilder builder;
+  // Listen on the given address without any authentication mechanism.
+  builder.AddListeningPort(server_address, grpc::InsecureServerCredentials());
+  // Register "service" as the instance through which we'll communicate with
+  // clients. In this case it corresponds to an *synchronous* service.
+  builder.RegisterService(&service);
+  // Finally assemble the server.
+  std::unique_ptr<Server> server(builder.BuildAndStart());
+  std::cout << "Server listening on " << server_address << std::endl;
+
+  // Wait for the server to shutdown. Note that some other thread must be
+  // responsible for shutting down the server for this call to ever return.
+  server->Wait();
+}
+
+int main(int argc, char** argv) {
+  ::gflags::ParseCommandLineFlags(&argc, &argv, true);
+  RunServer();
+
+  return 0;
+}
\ No newline at end of file
diff --git a/host/commands/fetcher/Android.bp b/host/commands/fetcher/Android.bp
deleted file mode 100644
index 6555a74..0000000
--- a/host/commands/fetcher/Android.bp
+++ /dev/null
@@ -1,61 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-cc_binary {
-    name: "fetch_cvd",
-    srcs: [
-        "fetch_cvd.cc",
-    ],
-    static_libs: [
-        "libcuttlefish_web",
-        "libcuttlefish_host_config",
-        "libgflags",
-        "libext2_blkid",
-    ],
-    target: {
-        host: {
-            stl: "libc++_static",
-            static_libs: [
-                "libbase",
-                "libcuttlefish_fs",
-                "libcuttlefish_utils",
-                "libcurl",
-                "libcrypto",
-                "liblog",
-                "libssl",
-                "libz",
-                "libjsoncpp",
-            ],
-        },
-        android: {
-            shared_libs: [
-                "libbase",
-                "libcuttlefish_fs",
-                "libcuttlefish_utils",
-                "libcurl",
-                "libcrypto",
-                "liblog",
-                "libssl",
-                "libz",
-                "libjsoncpp",
-            ],
-        },
-    },
-    defaults: ["cuttlefish_host"],
-}
diff --git a/host/commands/fetcher/fetch_cvd.cc b/host/commands/fetcher/fetch_cvd.cc
deleted file mode 100644
index 2154233..0000000
--- a/host/commands/fetcher/fetch_cvd.cc
+++ /dev/null
@@ -1,606 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include <fstream>
-#include <iostream>
-#include <iterator>
-#include <string>
-
-#include <curl/curl.h>
-#include <sys/stat.h>
-#include <unistd.h>
-
-#include "android-base/logging.h"
-#include "android-base/strings.h"
-#include "gflags/gflags.h"
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/archive.h"
-#include "common/libs/utils/environment.h"
-#include "common/libs/utils/files.h"
-#include "common/libs/utils/subprocess.h"
-
-#include "host/libs/config/fetcher_config.h"
-
-#include "host/libs/web/build_api.h"
-#include "host/libs/web/credential_source.h"
-#include "host/libs/web/install_zip.h"
-
-namespace {
-
-const std::string DEFAULT_BRANCH = "aosp-master";
-const std::string DEFAULT_BUILD_TARGET = "aosp_cf_x86_64_phone-userdebug";
-}
-
-using cuttlefish::CurrentDirectory;
-
-DEFINE_string(api_key, "", "API key for the Android Build API");
-DEFINE_string(default_build, DEFAULT_BRANCH + "/" + DEFAULT_BUILD_TARGET,
-              "source for the cuttlefish build to use (vendor.img + host)");
-DEFINE_string(system_build, "", "source for system.img and product.img");
-DEFINE_string(kernel_build, "", "source for the kernel or gki target");
-DEFINE_string(bootloader_build, "", "source for the bootloader target");
-DEFINE_string(otatools_build, "", "source for the host ota tools");
-
-DEFINE_bool(download_img_zip, true, "Whether to fetch the -img-*.zip file.");
-DEFINE_bool(download_target_files_zip, false, "Whether to fetch the "
-                                              "-target_files-*.zip file.");
-
-DEFINE_string(credential_source, "", "Build API credential source");
-DEFINE_string(directory, CurrentDirectory(), "Target directory to fetch "
-                                             "files into");
-DEFINE_bool(run_next_stage, false, "Continue running the device through the next stage.");
-DEFINE_string(wait_retry_period, "20", "Retry period for pending builds given "
-                                       "in seconds. Set to 0 to not wait.");
-
-namespace cuttlefish {
-namespace {
-
-const std::string HOST_TOOLS = "cvd-host_package.tar.gz";
-const std::string OTA_TOOLS = "otatools.zip";
-const std::string OTA_TOOLS_DIR = "/otatools/";
-
-/** Returns the name of one of the artifact target zip files.
- *
- * For example, for a target "aosp_cf_x86_phone-userdebug" at a build "5824130",
- * the image zip file would be "aosp_cf_x86_phone-img-5824130.zip"
- */
-std::string TargetBuildZipFromArtifacts(
-    const Build& build, const std::string& name,
-    const std::vector<Artifact>& artifacts) {
-  std::string product = std::visit([](auto&& arg) { return arg.product; }, build);
-  auto id = std::visit([](auto&& arg) { return arg.id; }, build);
-  auto match = product + "-" + name + "-" + id + ".zip";
-  for (const auto& artifact : artifacts) {
-    if (artifact.Name() == match) {
-      return artifact.Name();
-    }
-  }
-  return "";
-}
-
-std::vector<std::string> download_images(BuildApi* build_api,
-                                         const Build& build,
-                                         const std::string& target_directory,
-                                         const std::vector<std::string>& images) {
-  auto artifacts = build_api->Artifacts(build);
-  std::string img_zip_name = TargetBuildZipFromArtifacts(build, "img", artifacts);
-  if (img_zip_name.size() == 0) {
-    LOG(ERROR) << "Target " << build << " did not have an img zip";
-    return {};
-  }
-  std::string local_path = target_directory + "/" + img_zip_name;
-  if (!build_api->ArtifactToFile(build, img_zip_name, local_path)) {
-    LOG(ERROR) << "Unable to download " << build << ":" << img_zip_name << " to "
-               << local_path;
-    return {};
-  }
-
-  std::vector<std::string> files = ExtractImages(local_path, target_directory, images);
-  if (files.empty()) {
-    LOG(ERROR) << "Could not extract " << local_path;
-    return {};
-  }
-  if (unlink(local_path.c_str()) != 0) {
-    LOG(ERROR) << "Could not delete " << local_path;
-    files.push_back(local_path);
-  }
-  return files;
-}
-std::vector<std::string> download_images(BuildApi* build_api,
-                                         const Build& build,
-                                         const std::string& target_directory) {
-  return download_images(build_api, build, target_directory, {});
-}
-
-std::vector<std::string> download_target_files(BuildApi* build_api,
-                                               const Build& build,
-                                               const std::string& target_directory) {
-  auto artifacts = build_api->Artifacts(build);
-  std::string target_zip = TargetBuildZipFromArtifacts(build, "target_files", artifacts);
-  if (target_zip.size() == 0) {
-    LOG(ERROR) << "Target " << build << " did not have a target files zip";
-    return {};
-  }
-  std::string local_path = target_directory + "/" + target_zip;
-  if (!build_api->ArtifactToFile(build, target_zip, local_path)) {
-    LOG(ERROR) << "Unable to download " << build << ":" << target_zip << " to "
-               << local_path;
-    return {};
-  }
-  return {local_path};
-}
-
-std::vector<std::string> download_host_package(BuildApi* build_api,
-                                               const Build& build,
-                                               const std::string& target_directory) {
-  auto artifacts = build_api->Artifacts(build);
-  bool has_host_package = false;
-  for (const auto& artifact : artifacts) {
-    has_host_package |= artifact.Name() == HOST_TOOLS;
-  }
-  if (!has_host_package) {
-    LOG(ERROR) << "Target " << build << " did not have " << HOST_TOOLS;
-    return {};
-  }
-  std::string local_path = target_directory + "/" + HOST_TOOLS;
-
-  if (!build_api->ArtifactToFile(build, HOST_TOOLS, local_path)) {
-    LOG(ERROR) << "Unable to download " << build << ":" << HOST_TOOLS << " to "
-               << local_path;
-    return {};
-  }
-
-  Archive archive(local_path);
-  if (!archive.ExtractAll(target_directory)) {
-    LOG(ERROR) << "Could not extract " << local_path;
-    return {};
-  }
-  std::vector<std::string> files = archive.Contents();
-  for (auto& file : files) {
-    file = target_directory + "/" + file;
-  }
-  if (unlink(local_path.c_str()) != 0) {
-    LOG(ERROR) << "Could not delete " << local_path;
-    files.push_back(local_path);
-  }
-  return files;
-}
-
-std::vector<std::string> download_ota_tools(BuildApi* build_api,
-                                            const Build& build,
-                                            const std::string& target_directory) {
-  auto artifacts = build_api->Artifacts(build);
-  bool has_host_package = false;
-  for (const auto& artifact : artifacts) {
-    has_host_package |= artifact.Name() == OTA_TOOLS;
-  }
-  if (!has_host_package) {
-    LOG(ERROR) << "Target " << build << " did not have " << OTA_TOOLS;
-    return {};
-  }
-  std::string local_path = target_directory + "/" + OTA_TOOLS;
-
-  if (!build_api->ArtifactToFile(build, OTA_TOOLS, local_path)) {
-    LOG(ERROR) << "Unable to download " << build << ":" << OTA_TOOLS << " to "
-        << local_path;
-    return {};
-  }
-
-  std::string otatools_dir = target_directory + OTA_TOOLS_DIR;
-  if (!DirectoryExists(otatools_dir) && mkdir(otatools_dir.c_str(), 0777) != 0) {
-    LOG(ERROR) << "Could not create " << otatools_dir;
-    return {};
-  }
-  Archive archive(local_path);
-  if (!archive.ExtractAll(otatools_dir)) {
-    LOG(ERROR) << "Could not extract " << local_path;
-    return {};
-  }
-  std::vector<std::string> files = archive.Contents();
-  for (auto& file : files) {
-    file = target_directory + OTA_TOOLS_DIR + file;
-  }
-  files.push_back(local_path);
-  return files;
-}
-
-void AddFilesToConfig(FileSource purpose, const Build& build,
-                      const std::vector<std::string>& paths,
-                      FetcherConfig* config,
-                      const std::string& directory_prefix,
-                      bool override_entry = false) {
-  for (const std::string& path : paths) {
-    std::string_view local_path(path);
-    if (!android::base::ConsumePrefix(&local_path, directory_prefix)) {
-      LOG(ERROR) << "Failed to remove prefix " << directory_prefix << " from "
-                 << local_path;
-    }
-    while (android::base::StartsWith(local_path, "/")) {
-      android::base::ConsumePrefix(&local_path, "/");
-    }
-    // TODO(schuffelen): Do better for local builds here.
-    auto id = std::visit([](auto&& arg) { return arg.id; }, build);
-    auto target = std::visit([](auto&& arg) { return arg.target; }, build);
-    CvdFile file(purpose, id, target, std::string(local_path));
-    bool added = config->add_cvd_file(file, override_entry);
-    if (!added) {
-      LOG(ERROR) << "Duplicate file " << file;
-      LOG(ERROR) << "Existing file: " << config->get_cvd_files()[path];
-      LOG(FATAL) << "Failed to add path " << path;
-    }
-  }
-}
-
-std::string USAGE_MESSAGE =
-    "<flags>\n"
-    "\n"
-    "\"*_build\" flags accept values in the following format:\n"
-    "\"branch/build_target\" - latest build of \"branch\" for \"build_target\"\n"
-    "\"build_id/build_target\" - build \"build_id\" for \"build_target\"\n"
-    "\"branch\" - latest build of \"branch\" for \"aosp_cf_x86_phone-userdebug\"\n"
-    "\"build_id\" - build \"build_id\" for \"aosp_cf_x86_phone-userdebug\"\n";
-
-std::unique_ptr<CredentialSource> TryOpenServiceAccountFile(
-    CurlWrapper& curl, const std::string& path) {
-  LOG(VERBOSE) << "Attempting to open service account file \"" << path << "\"";
-  Json::CharReaderBuilder builder;
-  std::ifstream ifs(path);
-  Json::Value content;
-  std::string errorMessage;
-  if (!Json::parseFromStream(builder, ifs, &content, &errorMessage)) {
-    LOG(VERBOSE) << "Could not read config file \"" << path
-                 << "\": " << errorMessage;
-    return {};
-  }
-  static constexpr char BUILD_SCOPE[] =
-      "https://www.googleapis.com/auth/androidbuild.internal";
-  auto result =
-      ServiceAccountOauthCredentialSource::FromJson(curl, content, BUILD_SCOPE);
-  if (!result.ok()) {
-    LOG(VERBOSE) << "Failed to load service account json file: \n"
-                 << result.error();
-    return {};
-  }
-  return std::unique_ptr<CredentialSource>(
-      new ServiceAccountOauthCredentialSource(std::move(*result)));
-}
-
-} // namespace
-
-int FetchCvdMain(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
-  gflags::SetUsageMessage(USAGE_MESSAGE);
-  gflags::ParseCommandLineFlags(&argc, &argv, true);
-
-  FetcherConfig config;
-  config.RecordFlags();
-
-  std::string target_dir = AbsolutePath(FLAGS_directory);
-  if (!DirectoryExists(target_dir) && mkdir(target_dir.c_str(), 0777) != 0) {
-    LOG(FATAL) << "Could not create " << target_dir;
-  }
-  std::string target_dir_slash = target_dir;
-  std::chrono::seconds retry_period(std::stoi(FLAGS_wait_retry_period));
-
-  curl_global_init(CURL_GLOBAL_DEFAULT);
-  {
-    auto curl = CurlWrapper::Create();
-    auto retrying_curl = CurlWrapper::WithServerErrorRetry(
-        *curl, 10, std::chrono::milliseconds(5000));
-    std::unique_ptr<CredentialSource> credential_source;
-    if (auto crds = TryOpenServiceAccountFile(*curl, FLAGS_credential_source)) {
-      credential_source = std::move(crds);
-    } else if (FLAGS_credential_source == "gce") {
-      credential_source = GceMetadataCredentialSource::make(*retrying_curl);
-    } else if (FLAGS_credential_source == "") {
-      std::string file = StringFromEnv("HOME", ".") + "/.acloud_oauth2.dat";
-      LOG(VERBOSE) << "Probing acloud credentials at " << file;
-      if (FileExists(file)) {
-        std::ifstream stream(file);
-        auto attempt_load =
-            RefreshCredentialSource::FromOauth2ClientFile(*curl, stream);
-        if (attempt_load.ok()) {
-          credential_source.reset(
-              new RefreshCredentialSource(std::move(*attempt_load)));
-        } else {
-          LOG(VERBOSE) << "Failed to load acloud credentials: "
-                       << attempt_load.error();
-        }
-      } else {
-        LOG(INFO) << "\"" << file << "\" missing, running without credentials";
-      }
-    } else {
-      credential_source = FixedCredentialSource::make(FLAGS_credential_source);
-    }
-    BuildApi build_api(*retrying_curl, credential_source.get(), FLAGS_api_key);
-
-    auto default_build = ArgumentToBuild(&build_api, FLAGS_default_build,
-                                         DEFAULT_BUILD_TARGET,
-                                         retry_period);
-
-    std::vector<std::string> host_package_files =
-        download_host_package(&build_api, default_build, target_dir);
-    if (host_package_files.empty()) {
-      LOG(FATAL) << "Could not download host package for " << default_build;
-    }
-    AddFilesToConfig(FileSource::DEFAULT_BUILD, default_build,
-                     host_package_files, &config, target_dir);
-
-    if (FLAGS_system_build != "" || FLAGS_kernel_build != "" || FLAGS_otatools_build != "") {
-      auto ota_build = default_build;
-      if (FLAGS_otatools_build != "") {
-        ota_build = ArgumentToBuild(&build_api, FLAGS_otatools_build,
-                                    DEFAULT_BUILD_TARGET, retry_period);
-      } else if (FLAGS_system_build != "") {
-        ota_build = ArgumentToBuild(&build_api, FLAGS_system_build,
-                                    DEFAULT_BUILD_TARGET, retry_period);
-      }
-      std::vector<std::string> ota_tools_files =
-          download_ota_tools(&build_api, ota_build, target_dir);
-      if (ota_tools_files.empty()) {
-        LOG(FATAL) << "Could not download ota tools for " << ota_build;
-      }
-      AddFilesToConfig(FileSource::DEFAULT_BUILD, default_build,
-                       ota_tools_files, &config, target_dir);
-    }
-    if (FLAGS_download_img_zip) {
-      std::vector<std::string> image_files =
-          download_images(&build_api, default_build, target_dir);
-      if (image_files.empty()) {
-        LOG(FATAL) << "Could not download images for " << default_build;
-      }
-      LOG(INFO) << "Adding img-zip files for default build";
-      for (auto& file : image_files) {
-        LOG(INFO) << file;
-      }
-      AddFilesToConfig(FileSource::DEFAULT_BUILD, default_build, image_files,
-                       &config, target_dir);
-    }
-    if (FLAGS_system_build != "" || FLAGS_download_target_files_zip) {
-      std::string default_target_dir = target_dir + "/default";
-      if (mkdir(default_target_dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) < 0) {
-        LOG(FATAL) << "Could not create " << default_target_dir;
-      }
-      std::vector<std::string> target_files =
-          download_target_files(&build_api, default_build, default_target_dir);
-      if (target_files.empty()) {
-        LOG(FATAL) << "Could not download target files for " << default_build;
-      }
-      LOG(INFO) << "Adding target files for default build";
-      AddFilesToConfig(FileSource::DEFAULT_BUILD, default_build, target_files,
-                       &config, target_dir);
-    }
-
-    if (FLAGS_system_build != "") {
-      auto system_build = ArgumentToBuild(&build_api, FLAGS_system_build,
-                                          DEFAULT_BUILD_TARGET,
-                                          retry_period);
-      bool system_in_img_zip = true;
-      if (FLAGS_download_img_zip) {
-        std::vector<std::string> image_files =
-            download_images(&build_api, system_build, target_dir,
-                            {"system.img", "product.img"});
-        if (image_files.empty()) {
-          LOG(INFO) << "Could not find system image for " << system_build
-                    << "in the img zip. Assuming a super image build, which will "
-                    << "get the system image from the target zip.";
-          system_in_img_zip = false;
-        } else {
-          LOG(INFO) << "Adding img-zip files for system build";
-          AddFilesToConfig(FileSource::SYSTEM_BUILD, system_build, image_files,
-                           &config, target_dir, true);
-        }
-      }
-      std::string system_target_dir = target_dir + "/system";
-      if (mkdir(system_target_dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) < 0) {
-        LOG(FATAL) << "Could not create " << system_target_dir;
-      }
-      std::vector<std::string> target_files =
-          download_target_files(&build_api, system_build, system_target_dir);
-      if (target_files.empty()) {
-        LOG(FATAL) << "Could not download target files for " << system_build;
-        return -1;
-      }
-      AddFilesToConfig(FileSource::SYSTEM_BUILD, system_build, target_files,
-                       &config, target_dir);
-      if (!system_in_img_zip) {
-        if (ExtractImages(target_files[0], target_dir, {"IMAGES/system.img"})
-            != std::vector<std::string>{}) {
-          std::string extracted_system = target_dir + "/IMAGES/system.img";
-          std::string target_system = target_dir + "/system.img";
-          if (rename(extracted_system.c_str(), target_system.c_str())) {
-            int error_num = errno;
-            LOG(FATAL) << "Could not replace system.img in target directory: "
-                       << strerror(error_num);
-            return -1;
-          }
-	} else {
-          LOG(FATAL) << "Could not get system.img from the target zip";
-          return -1;
-        }
-	if (ExtractImages(target_files[0], target_dir, {"IMAGES/product.img"})
-	  != std::vector<std::string>{}) {
-          std::string extracted_product = target_dir + "/IMAGES/product.img";
-          std::string target_product = target_dir + "/product.img";
-          if (rename(extracted_product.c_str(), target_product.c_str())) {
-            int error_num = errno;
-            LOG(FATAL) << "Could not replace product.img in target directory"
-                       << strerror(error_num);
-            return -1;
-          }
-	}
-        if (ExtractImages(target_files[0], target_dir, {"IMAGES/system_ext.img"})
-            != std::vector<std::string>{}) {
-          std::string extracted_system_ext = target_dir + "/IMAGES/system_ext.img";
-          std::string target_system_ext = target_dir + "/system_ext.img";
-          if (rename(extracted_system_ext.c_str(), target_system_ext.c_str())) {
-            int error_num = errno;
-            LOG(FATAL) << "Could not move system_ext.img in target directory: "
-                       << strerror(error_num);
-            return -1;
-          }
-        }
-        if (ExtractImages(target_files[0], target_dir, {"IMAGES/vbmeta_system.img"})
-            != std::vector<std::string>{}) {
-          std::string extracted_vbmeta_system = target_dir + "/IMAGES/vbmeta_system.img";
-          std::string target_vbmeta_system = target_dir + "/vbmeta_system.img";
-          if (rename(extracted_vbmeta_system.c_str(), target_vbmeta_system.c_str())) {
-            int error_num = errno;
-            LOG(FATAL) << "Could not move vbmeta_system.img in target directory: "
-                       << strerror(error_num);
-            return -1;
-          }
-        }
-        // This should technically call AddFilesToConfig with the produced files,
-        // but it will conflict with the ones produced from the default system image
-        // and pie doesn't care about the produced file list anyway.
-      }
-    }
-
-    if (FLAGS_kernel_build != "") {
-      auto kernel_build = ArgumentToBuild(&build_api, FLAGS_kernel_build,
-                                          "kernel", retry_period);
-
-      std::string local_path = target_dir + "/kernel";
-      if (build_api.ArtifactToFile(kernel_build, "bzImage", local_path)) {
-        AddFilesToConfig(FileSource::KERNEL_BUILD, kernel_build, {local_path},
-                         &config, target_dir);
-      }
-      // If the kernel is from an arm/aarch64 build, the artifact will be called
-      // Image.
-      else if (build_api.ArtifactToFile(kernel_build, "Image", local_path)) {
-        AddFilesToConfig(FileSource::KERNEL_BUILD, kernel_build, {local_path},
-                         &config, target_dir);
-      } else {
-        LOG(FATAL) << "Could not download " << kernel_build << ":bzImage to "
-            << local_path;
-      }
-      std::vector<Artifact> kernel_artifacts = build_api.Artifacts(kernel_build);
-      for (const auto& artifact : kernel_artifacts) {
-        if (artifact.Name() != "initramfs.img") {
-          continue;
-        }
-        bool downloaded = build_api.ArtifactToFile(
-            kernel_build, "initramfs.img", target_dir + "/initramfs.img");
-        if (!downloaded) {
-          LOG(FATAL) << "Could not download " << kernel_build << ":initramfs.img to "
-                     << target_dir + "/initramfs.img";
-        }
-        AddFilesToConfig(FileSource::KERNEL_BUILD, kernel_build,
-                         {target_dir + "/initramfs.img"}, &config, target_dir);
-      }
-    }
-
-    if (FLAGS_bootloader_build != "") {
-      auto bootloader_build = ArgumentToBuild(&build_api,
-                                              FLAGS_bootloader_build,
-                                              "u-boot_crosvm_x86_64",
-					      retry_period);
-
-      std::string local_path = target_dir + "/bootloader";
-      if (build_api.ArtifactToFile(bootloader_build, "u-boot.rom", local_path)) {
-        AddFilesToConfig(FileSource::BOOTLOADER_BUILD, bootloader_build,
-                         {local_path}, &config, target_dir, true);
-      }
-      // If the bootloader is from an arm/aarch64 build, the artifact will be of
-      // filetype bin.
-      else if (build_api.ArtifactToFile(bootloader_build, "u-boot.bin",
-                                        local_path)) {
-        AddFilesToConfig(FileSource::BOOTLOADER_BUILD, bootloader_build,
-                         {local_path}, &config, target_dir, true);
-      } else {
-        LOG(FATAL) << "Could not download " << bootloader_build << ":u-boot.rom to "
-            << local_path;
-      }
-    }
-  }
-  curl_global_cleanup();
-
-  // Due to constraints of the build system, artifacts intentionally cannot determine
-  // their own build id. So it's unclear which build number fetch_cvd itself was built at.
-  // https://android.googlesource.com/platform/build/+/979c9f3/Changes.md#build_number
-  std::string fetcher_path = target_dir + "/fetcher_config.json";
-  AddFilesToConfig(GENERATED, DeviceBuild("", ""), {fetcher_path}, &config,
-                   target_dir);
-  config.SaveToFile(fetcher_path);
-
-  for (const auto& file : config.get_cvd_files()) {
-    std::cout << target_dir << "/" << file.second.file_path << "\n";
-  }
-  std::cout << std::flush;
-
-  if (!FLAGS_run_next_stage) {
-    return 0;
-  }
-
-  // Ignore return code. We want to make sure there is no running instance,
-  // and stop_cvd will exit with an error code if there is already no running instance.
-  Command stop_cmd(target_dir + "/bin/stop_cvd");
-  stop_cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut,
-                         Subprocess::StdIOChannel::kStdErr);
-  stop_cmd.Start().Wait();
-
-  // gflags::ParseCommandLineFlags will remove fetch_cvd's flags from this.
-  // This depends the remove_flags argument (3rd) is "true".
-
-  auto filelist_fd = SharedFD::MemfdCreate("files_list");
-  if (!filelist_fd->IsOpen()) {
-    LOG(FATAL) << "Unable to create temp file to write file list. "
-               << filelist_fd->StrError() << " (" << filelist_fd->GetErrno() << ")";
-  }
-
-  for (const auto& file : config.get_cvd_files()) {
-    std::string file_entry = file.second.file_path + "\n";
-    auto chars_written = filelist_fd->Write(file_entry.c_str(), file_entry.size());
-    if (chars_written != file_entry.size()) {
-      LOG(FATAL) << "Unable to write entry to file list. Expected to write "
-                 << file_entry.size() << " but wrote " << chars_written << ". "
-                 << filelist_fd->StrError() << " (" << filelist_fd->GetErrno() << ")";
-    }
-  }
-  auto seek_result = filelist_fd->LSeek(0, SEEK_SET);
-  if (seek_result != 0) {
-    LOG(FATAL) << "Unable to seek on file list file. Expected 0, received " << seek_result
-               << filelist_fd->StrError() << " (" << filelist_fd->GetErrno() << ")";
-  }
-
-  if (filelist_fd->UNMANAGED_Dup2(0) == -1) {
-    LOG(FATAL) << "Unable to set file list to stdin. "
-               << filelist_fd->StrError() << " (" << filelist_fd->GetErrno() << ")";
-  }
-
-  // TODO(b/139199114): Go into assemble_cvd when the interface is stable and implemented.
-
-  std::string next_stage = target_dir + "/bin/launch_cvd";
-  std::vector<const char*> next_stage_argv = {"launch_cvd"};
-  LOG(INFO) << "Running " << next_stage;
-  for (int i = 1; i < argc; i++) {
-    LOG(INFO) << argv[i];
-    next_stage_argv.push_back(argv[i]);
-  }
-  next_stage_argv.push_back(nullptr);
-  execv(next_stage.c_str(), const_cast<char* const*>(next_stage_argv.data()));
-  int error = errno;
-  LOG(FATAL) << "execv returned with errno " << error << ":" << strerror(error);
-
-  return -1;
-}
-
-} // namespace cuttlefish
-
-int main(int argc, char** argv) {
-  return cuttlefish::FetchCvdMain(argc, argv);
-}
diff --git a/host/commands/gnss_grpc_proxy/Android.bp b/host/commands/gnss_grpc_proxy/Android.bp
index f490238..d7bc89f 100644
--- a/host/commands/gnss_grpc_proxy/Android.bp
+++ b/host/commands/gnss_grpc_proxy/Android.bp
@@ -52,7 +52,7 @@
     ],
 }
 
-cc_binary {
+cc_binary_host {
     name: "gnss_grpc_proxy",
     shared_libs: [
         "libext2_blkid",
@@ -67,6 +67,7 @@
         "libcuttlefish_host_config",
         "libgflags",
         "libcvd_gnss_grpc_proxy",
+        "libgrpc++_reflection",
     ],
     srcs: [
         "gnss_grpc_proxy.cpp",
diff --git a/host/commands/gnss_grpc_proxy/README.md b/host/commands/gnss_grpc_proxy/README.md
new file mode 100644
index 0000000..ed026b0
--- /dev/null
+++ b/host/commands/gnss_grpc_proxy/README.md
@@ -0,0 +1,10 @@
+Virtual device location data manager serving a gRPC interface.
+
+The [`cvd_import_locations`] and [`cvd_update_location`] executables are
+clients that use this service to set either a sequence of locations or a new
+fixed location for the device.
+
+[![Linkage diagram](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/gnss_grpc_proxy/doc/linkage.svg)
+
+[`cvd_import_locations`]: https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_import_locations/
+[`cvd_update_location`]: https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_update_location/
diff --git a/host/commands/gnss_grpc_proxy/doc/linkage.dot b/host/commands/gnss_grpc_proxy/doc/linkage.dot
new file mode 100644
index 0000000..ec3acbc
--- /dev/null
+++ b/host/commands/gnss_grpc_proxy/doc/linkage.dot
@@ -0,0 +1,58 @@
+digraph {
+  browser [label = "Browser"]
+  cli [label = "User CLI"]
+  cvd_import_locations [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_import_locations/"]
+  cvd_update_location [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_update_location/"]
+  gnss_grpc_proxy [label = < <B>gnss_grpc_proxy</B> >]
+  gnss_grpc_server [label = "TCP gRPC", shape = "rectangle"]
+  run_cvd [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/run_cvd/"]
+  vmm [label = "crosvm / qemu"]
+  webrtc
+  subgraph fifos {
+    rank = same;
+    host_gnss_console_in [color = "green", label = "internal/gnsshvc_fifo_vm.in", shape = "rectangle"]
+    host_gnss_console_out [color = "green", label = "internal/gnsshvc_fifo_vm.out", shape = "rectangle"]
+    host_fixed_location_console_in [color = "blue", label = "internal/locationhvc_fifo_vm.in", shape = "rectangle"]
+    host_fixed_location_console_out [color = "blue", label = "internal/locationhvc_fifo_vm.out", shape = "rectangle"]
+  }
+  subgraph cluster_android {
+    label = "Android"
+
+    gnss_hal [label = "vendor.gnss-default", URL = "https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/gnss/aidl/default/"]
+    subgraph consoles {
+      rank = same;
+      fixed_location_console [color = "blue", label = "/dev/hvc6 | /dev/gnss1", shape = "rectangle"]
+      gnss_console [color = "green", label = "/dev/hvc5 | /dev/gnss0", shape = "rectangle"]
+    }
+  }
+
+  cli -> cvd_import_locations
+  cli -> cvd_update_location
+  browser -> webrtc
+
+  run_cvd -> gnss_grpc_proxy
+
+  fixed_location_console -> gnss_hal [color = "blue", dir = "both"]
+  gnss_console -> gnss_hal [color = "green", dir = "both"]
+
+  cvd_import_locations -> gnss_grpc_server [dir = "both"]
+  cvd_update_location -> gnss_grpc_server [dir = "both"]
+  webrtc -> gnss_grpc_server [dir = "both"]
+
+  gnss_grpc_server -> gnss_grpc_proxy [dir = "both"]
+
+  gnss_grpc_proxy -> host_gnss_console_in [color = "green"]
+  host_gnss_console_out -> gnss_grpc_proxy [color = "green"]
+
+  vmm -> host_gnss_console_out [color = "green"]
+  host_gnss_console_in -> vmm [color = "green"]
+
+  host_fixed_location_console_in -> gnss_grpc_proxy [color = "blue", dir = "back"]
+  host_fixed_location_console_out -> gnss_grpc_proxy [color = "blue"]
+
+  host_fixed_location_console_out -> vmm [color = "blue"]
+  host_fixed_location_console_in -> vmm [color = "blue", dir = "back"]
+
+  vmm -> fixed_location_console [color = "blue", dir = "both"]
+  vmm -> gnss_console [color = "green", dir = "both"]
+}
diff --git a/host/commands/gnss_grpc_proxy/doc/linkage.png b/host/commands/gnss_grpc_proxy/doc/linkage.png
new file mode 100644
index 0000000..11f5541
--- /dev/null
+++ b/host/commands/gnss_grpc_proxy/doc/linkage.png
Binary files differ
diff --git a/host/commands/gnss_grpc_proxy/doc/linkage.svg b/host/commands/gnss_grpc_proxy/doc/linkage.svg
new file mode 100644
index 0000000..3e2466e
--- /dev/null
+++ b/host/commands/gnss_grpc_proxy/doc/linkage.svg
@@ -0,0 +1,256 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="798pt" height="567pt"
+ viewBox="0.00 0.00 797.50 567.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 563)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-563 793.5,-563 793.5,4 -4,4"/>
+<g id="clust2" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="223,-8 223,-155 533,-155 533,-8 223,-8"/>
+<text text-anchor="middle" x="378" y="-139.8" font-family="Times,serif" font-size="14.00">Android</text>
+</g>
+<!-- browser -->
+<g id="node1" class="node">
+<title>browser</title>
+<ellipse fill="none" stroke="black" cx="190" cy="-541" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="190" y="-537.3" font-family="Times,serif" font-size="14.00">Browser</text>
+</g>
+<!-- webrtc -->
+<g id="node9" class="node">
+<title>webrtc</title>
+<ellipse fill="none" stroke="black" cx="190" cy="-469" rx="34.39" ry="18"/>
+<text text-anchor="middle" x="190" y="-465.3" font-family="Times,serif" font-size="14.00">webrtc</text>
+</g>
+<!-- browser&#45;&gt;webrtc -->
+<g id="edge3" class="edge">
+<title>browser&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M190,-522.7C190,-514.98 190,-505.71 190,-497.11"/>
+<polygon fill="black" stroke="black" points="193.5,-497.1 190,-487.1 186.5,-497.1 193.5,-497.1"/>
+</g>
+<!-- cli -->
+<g id="node2" class="node">
+<title>cli</title>
+<ellipse fill="none" stroke="black" cx="425" cy="-541" rx="44.39" ry="18"/>
+<text text-anchor="middle" x="425" y="-537.3" font-family="Times,serif" font-size="14.00">User CLI</text>
+</g>
+<!-- cvd_import_locations -->
+<g id="node3" class="node">
+<title>cvd_import_locations</title>
+<g id="a_node3"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_import_locations/" xlink:title="cvd_import_locations">
+<ellipse fill="none" stroke="black" cx="330" cy="-469" rx="87.99" ry="18"/>
+<text text-anchor="middle" x="330" y="-465.3" font-family="Times,serif" font-size="14.00">cvd_import_locations</text>
+</a>
+</g>
+</g>
+<!-- cli&#45;&gt;cvd_import_locations -->
+<g id="edge1" class="edge">
+<title>cli&#45;&gt;cvd_import_locations</title>
+<path fill="none" stroke="black" d="M404.39,-524.81C391.61,-515.4 374.97,-503.14 360.67,-492.6"/>
+<polygon fill="black" stroke="black" points="362.69,-489.74 352.57,-486.63 358.54,-495.38 362.69,-489.74"/>
+</g>
+<!-- cvd_update_location -->
+<g id="node4" class="node">
+<title>cvd_update_location</title>
+<g id="a_node4"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd_update_location/" xlink:title="cvd_update_location">
+<ellipse fill="none" stroke="black" cx="520" cy="-469" rx="83.69" ry="18"/>
+<text text-anchor="middle" x="520" y="-465.3" font-family="Times,serif" font-size="14.00">cvd_update_location</text>
+</a>
+</g>
+</g>
+<!-- cli&#45;&gt;cvd_update_location -->
+<g id="edge2" class="edge">
+<title>cli&#45;&gt;cvd_update_location</title>
+<path fill="none" stroke="black" d="M445.61,-524.81C458.48,-515.33 475.27,-502.96 489.64,-492.37"/>
+<polygon fill="black" stroke="black" points="491.8,-495.13 497.77,-486.38 487.64,-489.49 491.8,-495.13"/>
+</g>
+<!-- gnss_grpc_server -->
+<g id="node6" class="node">
+<title>gnss_grpc_server</title>
+<polygon fill="none" stroke="black" points="368.5,-415 291.5,-415 291.5,-379 368.5,-379 368.5,-415"/>
+<text text-anchor="middle" x="330" y="-393.3" font-family="Times,serif" font-size="14.00">TCP gRPC</text>
+</g>
+<!-- cvd_import_locations&#45;&gt;gnss_grpc_server -->
+<g id="edge7" class="edge">
+<title>cvd_import_locations&#45;&gt;gnss_grpc_server</title>
+<path fill="none" stroke="black" d="M330,-440.67C330,-435.69 330,-430.49 330,-425.51"/>
+<polygon fill="black" stroke="black" points="326.5,-440.7 330,-450.7 333.5,-440.7 326.5,-440.7"/>
+<polygon fill="black" stroke="black" points="333.5,-425.1 330,-415.1 326.5,-425.1 333.5,-425.1"/>
+</g>
+<!-- cvd_update_location&#45;&gt;gnss_grpc_server -->
+<g id="edge8" class="edge">
+<title>cvd_update_location&#45;&gt;gnss_grpc_server</title>
+<path fill="none" stroke="black" d="M470.16,-449.64C441.76,-439.18 406.36,-426.13 378.23,-415.77"/>
+<polygon fill="black" stroke="black" points="469.11,-452.98 479.71,-453.15 471.53,-446.41 469.11,-452.98"/>
+<polygon fill="black" stroke="black" points="379.21,-412.4 368.61,-412.23 376.79,-418.97 379.21,-412.4"/>
+</g>
+<!-- gnss_grpc_proxy -->
+<g id="node5" class="node">
+<title>gnss_grpc_proxy</title>
+<ellipse fill="none" stroke="black" cx="378" cy="-325" rx="79.89" ry="18"/>
+<text text-anchor="start" x="324.5" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="328.5" y="-322.3" font-family="Times,serif" font-weight="bold" font-size="14.00">gnss_grpc_proxy</text>
+<text text-anchor="start" x="427.5" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- host_gnss_console_in -->
+<g id="node10" class="node">
+<title>host_gnss_console_in</title>
+<polygon fill="none" stroke="green" points="170,-271 0,-271 0,-235 170,-235 170,-271"/>
+<text text-anchor="middle" x="85" y="-249.3" font-family="Times,serif" font-size="14.00">internal/gnsshvc_fifo_vm.in</text>
+</g>
+<!-- gnss_grpc_proxy&#45;&gt;host_gnss_console_in -->
+<g id="edge11" class="edge">
+<title>gnss_grpc_proxy&#45;&gt;host_gnss_console_in</title>
+<path fill="none" stroke="green" d="M325.28,-311.4C281.03,-300.83 217.08,-285.55 166.28,-273.42"/>
+<polygon fill="green" stroke="green" points="166.84,-269.95 156.3,-271.03 165.21,-276.76 166.84,-269.95"/>
+</g>
+<!-- gnss_grpc_server&#45;&gt;gnss_grpc_proxy -->
+<g id="edge10" class="edge">
+<title>gnss_grpc_server&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="black" d="M347.58,-370.36C351.76,-364.26 356.23,-357.75 360.41,-351.65"/>
+<polygon fill="black" stroke="black" points="344.63,-368.47 341.87,-378.7 350.41,-372.43 344.63,-368.47"/>
+<polygon fill="black" stroke="black" points="363.5,-353.33 366.27,-343.1 357.73,-349.37 363.5,-353.33"/>
+</g>
+<!-- run_cvd -->
+<g id="node7" class="node">
+<title>run_cvd</title>
+<g id="a_node7"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/run_cvd/" xlink:title="run_cvd">
+<ellipse fill="none" stroke="black" cx="426" cy="-397" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="426" y="-393.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</a>
+</g>
+</g>
+<!-- run_cvd&#45;&gt;gnss_grpc_proxy -->
+<g id="edge4" class="edge">
+<title>run_cvd&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="black" d="M414.62,-379.41C408.91,-371.08 401.86,-360.8 395.48,-351.49"/>
+<polygon fill="black" stroke="black" points="398.17,-349.22 389.63,-342.96 392.4,-353.18 398.17,-349.22"/>
+</g>
+<!-- vmm -->
+<g id="node8" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="378" cy="-181" rx="64.19" ry="18"/>
+<text text-anchor="middle" x="378" y="-177.3" font-family="Times,serif" font-size="14.00">crosvm / qemu</text>
+</g>
+<!-- host_gnss_console_out -->
+<g id="node11" class="node">
+<title>host_gnss_console_out</title>
+<polygon fill="none" stroke="green" points="365.5,-271 188.5,-271 188.5,-235 365.5,-235 365.5,-271"/>
+<text text-anchor="middle" x="277" y="-249.3" font-family="Times,serif" font-size="14.00">internal/gnsshvc_fifo_vm.out</text>
+</g>
+<!-- vmm&#45;&gt;host_gnss_console_out -->
+<g id="edge13" class="edge">
+<title>vmm&#45;&gt;host_gnss_console_out</title>
+<path fill="none" stroke="green" d="M354.93,-197.99C341.65,-207.2 324.73,-218.92 310.05,-229.1"/>
+<polygon fill="green" stroke="green" points="307.93,-226.3 301.71,-234.88 311.92,-232.06 307.93,-226.3"/>
+</g>
+<!-- fixed_location_console -->
+<g id="node15" class="node">
+<title>fixed_location_console</title>
+<polygon fill="none" stroke="blue" points="525,-124 387,-124 387,-88 525,-88 525,-124"/>
+<text text-anchor="middle" x="456" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/hvc6 | /dev/gnss1</text>
+</g>
+<!-- vmm&#45;&gt;fixed_location_console -->
+<g id="edge19" class="edge">
+<title>vmm&#45;&gt;fixed_location_console</title>
+<path fill="none" stroke="blue" d="M403.17,-156.44C411.9,-148.27 421.69,-139.11 430.45,-130.91"/>
+<polygon fill="blue" stroke="blue" points="400.61,-154.05 395.7,-163.44 405.39,-159.16 400.61,-154.05"/>
+<polygon fill="blue" stroke="blue" points="432.92,-133.39 437.83,-124.01 428.14,-128.28 432.92,-133.39"/>
+</g>
+<!-- gnss_console -->
+<g id="node16" class="node">
+<title>gnss_console</title>
+<polygon fill="none" stroke="green" points="369,-124 231,-124 231,-88 369,-88 369,-124"/>
+<text text-anchor="middle" x="300" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/hvc5 | /dev/gnss0</text>
+</g>
+<!-- vmm&#45;&gt;gnss_console -->
+<g id="edge20" class="edge">
+<title>vmm&#45;&gt;gnss_console</title>
+<path fill="none" stroke="green" d="M352.83,-156.44C344.1,-148.27 334.31,-139.11 325.55,-130.91"/>
+<polygon fill="green" stroke="green" points="350.61,-159.16 360.3,-163.44 355.39,-154.05 350.61,-159.16"/>
+<polygon fill="green" stroke="green" points="327.86,-128.28 318.17,-124.01 323.08,-133.39 327.86,-128.28"/>
+</g>
+<!-- webrtc&#45;&gt;gnss_grpc_server -->
+<g id="edge9" class="edge">
+<title>webrtc&#45;&gt;gnss_grpc_server</title>
+<path fill="none" stroke="black" d="M223.3,-451.35C242.27,-441.87 266.25,-429.87 286.67,-419.66"/>
+<polygon fill="black" stroke="black" points="221.45,-448.36 214.07,-455.97 224.58,-454.62 221.45,-448.36"/>
+<polygon fill="black" stroke="black" points="288.4,-422.72 295.77,-415.11 285.26,-416.45 288.4,-422.72"/>
+</g>
+<!-- host_gnss_console_in&#45;&gt;vmm -->
+<g id="edge14" class="edge">
+<title>host_gnss_console_in&#45;&gt;vmm</title>
+<path fill="none" stroke="green" d="M156.3,-234.97C206.8,-222.9 273.66,-206.93 320.7,-195.69"/>
+<polygon fill="green" stroke="green" points="321.81,-199.02 330.72,-193.3 320.18,-192.21 321.81,-199.02"/>
+</g>
+<!-- host_gnss_console_out&#45;&gt;gnss_grpc_proxy -->
+<g id="edge12" class="edge">
+<title>host_gnss_console_out&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="green" d="M301.71,-271.12C315.05,-280.37 331.68,-291.9 346,-301.82"/>
+<polygon fill="green" stroke="green" points="344.36,-304.94 354.57,-307.76 348.35,-299.19 344.36,-304.94"/>
+</g>
+<!-- host_fixed_location_console_in -->
+<g id="node12" class="node">
+<title>host_fixed_location_console_in</title>
+<polygon fill="none" stroke="blue" points="574,-271 384,-271 384,-235 574,-235 574,-271"/>
+<text text-anchor="middle" x="479" y="-249.3" font-family="Times,serif" font-size="14.00">internal/locationhvc_fifo_vm.in</text>
+</g>
+<!-- host_fixed_location_console_in&#45;&gt;gnss_grpc_proxy -->
+<g id="edge15" class="edge">
+<title>host_fixed_location_console_in&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="blue" d="M446.02,-276.86C431.46,-286.95 414.68,-298.58 401.43,-307.76"/>
+<polygon fill="blue" stroke="blue" points="448.07,-279.7 454.29,-271.12 444.08,-273.94 448.07,-279.7"/>
+</g>
+<!-- host_fixed_location_console_in&#45;&gt;vmm -->
+<g id="edge18" class="edge">
+<title>host_fixed_location_console_in&#45;&gt;vmm</title>
+<path fill="none" stroke="blue" d="M445.95,-229.1C431.27,-218.92 414.35,-207.2 401.07,-197.99"/>
+<polygon fill="blue" stroke="blue" points="444.08,-232.06 454.29,-234.88 448.07,-226.3 444.08,-232.06"/>
+</g>
+<!-- host_fixed_location_console_out -->
+<g id="node13" class="node">
+<title>host_fixed_location_console_out</title>
+<polygon fill="none" stroke="blue" points="789.5,-271 592.5,-271 592.5,-235 789.5,-235 789.5,-271"/>
+<text text-anchor="middle" x="691" y="-249.3" font-family="Times,serif" font-size="14.00">internal/locationhvc_fifo_vm.out</text>
+</g>
+<!-- host_fixed_location_console_out&#45;&gt;gnss_grpc_proxy -->
+<g id="edge16" class="edge">
+<title>host_fixed_location_console_out&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="blue" d="M614.83,-271.03C562.08,-282.83 492.61,-298.37 442.57,-309.56"/>
+<polygon fill="blue" stroke="blue" points="441.52,-306.21 432.52,-311.81 443.04,-313.04 441.52,-306.21"/>
+</g>
+<!-- host_fixed_location_console_out&#45;&gt;vmm -->
+<g id="edge17" class="edge">
+<title>host_fixed_location_console_out&#45;&gt;vmm</title>
+<path fill="none" stroke="blue" d="M614.83,-234.97C559.87,-222.67 486.76,-206.32 436.39,-195.06"/>
+<polygon fill="blue" stroke="blue" points="437.15,-191.64 426.63,-192.88 435.62,-198.47 437.15,-191.64"/>
+</g>
+<!-- gnss_hal -->
+<g id="node14" class="node">
+<title>gnss_hal</title>
+<g id="a_node14"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/gnss/aidl/default/" xlink:title="vendor.gnss&#45;default">
+<ellipse fill="none" stroke="black" cx="378" cy="-34" rx="79.89" ry="18"/>
+<text text-anchor="middle" x="378" y="-30.3" font-family="Times,serif" font-size="14.00">vendor.gnss&#45;default</text>
+</a>
+</g>
+</g>
+<!-- fixed_location_console&#45;&gt;gnss_hal -->
+<g id="edge5" class="edge">
+<title>fixed_location_console&#45;&gt;gnss_hal</title>
+<path fill="none" stroke="blue" d="M429.18,-80.93C421.11,-73.68 412.26,-65.75 404.23,-58.54"/>
+<polygon fill="blue" stroke="blue" points="426.94,-83.62 436.72,-87.7 431.61,-78.41 426.94,-83.62"/>
+<polygon fill="blue" stroke="blue" points="406.3,-55.7 396.52,-51.62 401.63,-60.91 406.3,-55.7"/>
+</g>
+<!-- gnss_console&#45;&gt;gnss_hal -->
+<g id="edge6" class="edge">
+<title>gnss_console&#45;&gt;gnss_hal</title>
+<path fill="none" stroke="green" d="M326.82,-80.93C334.89,-73.68 343.74,-65.75 351.77,-58.54"/>
+<polygon fill="green" stroke="green" points="324.39,-78.41 319.28,-87.7 329.06,-83.62 324.39,-78.41"/>
+<polygon fill="green" stroke="green" points="354.37,-60.91 359.48,-51.62 349.7,-55.7 354.37,-60.91"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.cpp b/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.cpp
index c5ecc44..10f94f5 100644
--- a/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.cpp
+++ b/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.cpp
@@ -21,7 +21,13 @@
 #include <memory>
 #include <string>
 
-#include <grpcpp.h>
+#include <grpc/grpc.h>
+#include <grpcpp/ext/proto_server_reflection_plugin.h>
+#include <grpcpp/health_check_service_interface.h>
+#include <grpcpp/server.h>
+#include <grpcpp/server_builder.h>
+#include <grpcpp/server_context.h>
+#include <grpcpp/server_posix.h>
 
 #include "gnss_grpc_proxy.grpc.pb.h"
 
@@ -38,18 +44,22 @@
 #include <android-base/strings.h>
 #include <gflags/gflags.h>
 
-#include <common/libs/fs/shared_fd.h>
 #include <common/libs/fs/shared_buf.h>
+#include <common/libs/fs/shared_fd.h>
 #include <common/libs/fs/shared_select.h>
 #include <host/libs/config/cuttlefish_config.h>
+#include <host/libs/config/logging.h>
+#include <queue>
 
+using gnss_grpc_proxy::GnssGrpcProxy;
+using gnss_grpc_proxy::SendGpsReply;
+using gnss_grpc_proxy::SendGpsRequest;
+using gnss_grpc_proxy::SendGpsCoordinatesReply;
+using gnss_grpc_proxy::SendGpsCoordinatesRequest;
 using grpc::Server;
 using grpc::ServerBuilder;
 using grpc::ServerContext;
 using grpc::Status;
-using gnss_grpc_proxy::SendNmeaRequest;
-using gnss_grpc_proxy::SendNmeaReply;
-using gnss_grpc_proxy::GnssGrpcProxy;
 
 DEFINE_int32(gnss_in_fd,
              -1,
@@ -58,41 +68,94 @@
              -1,
              "File descriptor for the gnss's output channel");
 
+DEFINE_int32(fixed_location_in_fd, -1,
+             "File descriptor for the fixed location input channel");
+DEFINE_int32(fixed_location_out_fd, -1,
+             "File descriptor for the fixed location output channel");
+
 DEFINE_int32(gnss_grpc_port,
              -1,
              "Service port for gnss grpc");
+DEFINE_string(gnss_grpc_socket, "", "Service socket path for gnss grpc");
 
-DEFINE_string(gnss_file_path,
-              "",
-              "NMEA file path for gnss grpc");
+DEFINE_string(gnss_file_path, "",
+              "gnss raw measurement file path for gnss grpc");
+DEFINE_string(fixed_location_file_path, "",
+              "fixed location file path for gnss grpc");
 
 constexpr char CMD_GET_LOCATION[] = "CMD_GET_LOCATION";
 constexpr char CMD_GET_RAWMEASUREMENT[] = "CMD_GET_RAWMEASUREMENT";
 constexpr char END_OF_MSG_MARK[] = "\n\n\n\n";
 
 constexpr uint32_t GNSS_SERIAL_BUFFER_SIZE = 4096;
+
+std::string GenerateGpsLine(const std::string& dataPoint) {
+  std::string unix_time_millis =
+      std::to_string(std::chrono::duration_cast<std::chrono::milliseconds>(
+                         std::chrono::system_clock::now().time_since_epoch())
+                         .count());
+  std::string formatted_location =
+      std::string("Fix,GPS,") + dataPoint + "," +
+      std::string("0.000000,3.790092,0.000000,") + unix_time_millis + "," +
+      std::string("0.086023256,0.0,11529389988248");
+
+  return formatted_location;
+}
 // Logic and data behind the server's behavior.
 class GnssGrpcProxyServiceImpl final : public GnssGrpcProxy::Service {
   public:
-    GnssGrpcProxyServiceImpl(cuttlefish::SharedFD gnss_in,
-                     cuttlefish::SharedFD gnss_out) : gnss_in_(gnss_in),
-                                                  gnss_out_(gnss_out) {}
-    Status SendNmea(ServerContext* context, const SendNmeaRequest* request,
-                    SendNmeaReply* reply) override {
-      reply->set_reply("Received nmea record.");
-      auto buffer = request->nmea();
-      std::lock_guard<std::mutex> lock(cached_nmea_mutex);
-      cached_nmea = request->nmea();
-      return Status::OK;
-    }
+   GnssGrpcProxyServiceImpl(cuttlefish::SharedFD gnss_in,
+                            cuttlefish::SharedFD gnss_out,
+                            cuttlefish::SharedFD fixed_location_in,
+                            cuttlefish::SharedFD fixed_location_out)
+       : gnss_in_(gnss_in),
+         gnss_out_(gnss_out),
+         fixed_location_in_(fixed_location_in),
+         fixed_location_out_(fixed_location_out) {
+          //Set the default GPS delay to 1 second
+          fixed_locations_delay_=1000;
+         }
+
+
+   Status SendGps(ServerContext* context, const SendGpsRequest* request,
+                  SendGpsReply* reply) override {
+     reply->set_reply("Received gps record");
+     std::lock_guard<std::mutex> lock(cached_fixed_location_mutex);
+     cached_fixed_location = request->gps();
+     return Status::OK;
+   }
+
+
+  std::string ConvertCoordinate(gnss_grpc_proxy::GpsCoordinates coordinate){
+    std::string latitude = std::to_string(coordinate.latitude());
+    std::string longitude = std::to_string(coordinate.longitude());
+    std::string elevation = std::to_string(coordinate.elevation());
+    std::string result = latitude + "," + longitude + "," + elevation;
+    return result;
+  }
+
+   Status SendGpsVector(ServerContext* context,
+                        const SendGpsCoordinatesRequest* request,
+                        SendGpsCoordinatesReply* reply) override {
+     reply->set_status(SendGpsCoordinatesReply::OK);//update protobuf reply
+     {
+       std::lock_guard<std::mutex> lock(fixed_locations_queue_mutex_);
+       // Reset local buffers
+       fixed_locations_queue_ = {};
+       // Make a local copy of the input buffers
+       for (auto loc : request->coordinates()) {
+         fixed_locations_queue_.push(ConvertCoordinate(loc));
+       }
+       fixed_locations_delay_ = request->delay();
+     }
+
+     return Status::OK;
+   }
 
     void sendToSerial() {
-      std::lock_guard<std::mutex> lock(cached_nmea_mutex);
-      if (!isNMEA(cached_nmea)) {
-        return;
-      }
-      ssize_t bytes_written =
-          cuttlefish::WriteAll(gnss_in_, cached_nmea + END_OF_MSG_MARK);
+      std::lock_guard<std::mutex> lock(cached_fixed_location_mutex);
+      ssize_t bytes_written = cuttlefish::WriteAll(
+          fixed_location_in_, cached_fixed_location + END_OF_MSG_MARK);
       if (bytes_written < 0) {
           LOG(ERROR) << "Error writing to fd: " << gnss_in_->StrError();
       }
@@ -122,13 +185,18 @@
     void StartServer() {
       // Create a new thread to handle writes to the gnss and to the any client
       // connected to the socket.
-      read_thread_ = std::thread([this]() { ReadLoop(); });
+      fixed_location_write_thread_ =
+          std::thread([this]() { WriteFixedLocationFromQueue(); });
+      measurement_read_thread_ =
+          std::thread([this]() { ReadMeasurementLoop(); });
+      fixed_location_read_thread_ =
+          std::thread([this]() { ReadFixedLocLoop(); });
     }
 
-    void StartReadNmeaFileThread() {
-      // Create a new thread to read nmea data.
-      nmea_file_read_thread_ =
-          std::thread([this]() { ReadNmeaFromLocalFile(); });
+    void StartReadFixedLocationFileThread() {
+      // Create a new thread to read fixed_location data.
+      fixed_location_file_read_thread_ =
+          std::thread([this]() { ReadFixedLocationFromLocalFile(); });
     }
 
     void StartReadGnssRawMeasurementFileThread() {
@@ -137,36 +205,29 @@
           std::thread([this]() { ReadGnssRawMeasurement(); });
     }
 
-    void ReadNmeaFromLocalFile() {
-      std::ifstream file(FLAGS_gnss_file_path);
+    void ReadFixedLocationFromLocalFile() {
+      std::ifstream file(FLAGS_fixed_location_file_path);
       if (file.is_open()) {
-          std::string line;
-          std::string lastLine;
-          int count = 0;
-          while (std::getline(file, line)) {
-              count++;
-              /* Only support a lite version of NEMA format to make it simple.
-               * Records will only contains $GPGGA, $GPRMC,
-               * $GPGGA,213204.00,3725.371240,N,12205.589239,W,7,,0.38,-26.75,M,0.0,M,0.0,0000*78
-               * $GPRMC,213204.00,A,3725.371240,N,12205.589239,W,000.0,000.0,290819,,,A*49
-               * $GPGGA,....
-               * $GPRMC,....
-               * Sending at 1Hz, currently user should
-               * provide a NMEA file that has one location per second. need some extra work
-               * to make it more generic, i.e. align with the timestamp in the file.
-               */
-              if (count % 2 == 0) {
-                {
-                  std::lock_guard<std::mutex> lock(cached_nmea_mutex);
-                  cached_nmea = lastLine + '\n' + line;
-                }
-                std::this_thread::sleep_for(std::chrono::milliseconds(1000));
-              }
-              lastLine = line;
+        std::string line;
+        while (std::getline(file, line)) {
+          /* Only support fix location format to make it simple.
+           * Records will only contains 'Fix' prefix.
+           * Sample line:
+           * Fix,GPS,37.7925002,-122.3979132,13.462797,0.000000,48.000000,0.000000,1593029872254,0.581968,0.000000
+           * Sending at 1Hz, currently user should provide a fixed location
+           * file that has one location per second. need some extra work to
+           * make it more generic, i.e. align with the timestamp in the file.
+           */
+          {
+            std::lock_guard<std::mutex> lock(cached_fixed_location_mutex);
+            cached_fixed_location = line;
           }
+          std::this_thread::sleep_for(std::chrono::milliseconds(1000));
+        }
           file.close();
       } else {
-        LOG(ERROR) << "Can not open NMEA file: " << FLAGS_gnss_file_path ;
+        LOG(ERROR) << "Can not open fixed location file: "
+                   << FLAGS_gnss_file_path;
         return;
       }
     }
@@ -223,59 +284,90 @@
     }
 
     ~GnssGrpcProxyServiceImpl() {
-      if (nmea_file_read_thread_.joinable()) {
-        nmea_file_read_thread_.join();
+      if (fixed_location_file_read_thread_.joinable()) {
+        fixed_location_file_read_thread_.join();
+      }
+      if (fixed_location_write_thread_.joinable()) {
+        fixed_location_write_thread_.join();
       }
       if (measurement_file_read_thread_.joinable()) {
         measurement_file_read_thread_.join();
       }
-      if (read_thread_.joinable()) {
-        read_thread_.join();
+      if (measurement_read_thread_.joinable()) {
+        measurement_read_thread_.join();
+      }
+      if (fixed_location_read_thread_.joinable()) {
+        fixed_location_read_thread_.join();
       }
     }
 
   private:
-    [[noreturn]] void ReadLoop() {
-      cuttlefish::SharedFDSet read_set;
-      read_set.Set(gnss_out_);
-      std::vector<char> buffer(GNSS_SERIAL_BUFFER_SIZE);
-      int total_read = 0;
-      std::string gnss_cmd_str;
-      int flags = gnss_out_->Fcntl(F_GETFL, 0);
-      gnss_out_->Fcntl(F_SETFL, flags | O_NONBLOCK);
-      while (true) {
-        auto bytes_read = gnss_out_->Read(buffer.data(), buffer.size());
-        if (bytes_read > 0) {
-          std::string s(buffer.data(), bytes_read);
-          gnss_cmd_str += s;
-          // In case random string sent though /dev/gnss0, gnss_cmd_str will auto resize,
-          // to get rid of first page.
-          if (gnss_cmd_str.size() > GNSS_SERIAL_BUFFER_SIZE * 2) {
-            gnss_cmd_str = gnss_cmd_str.substr(gnss_cmd_str.size() - GNSS_SERIAL_BUFFER_SIZE);
-          }
-          total_read += bytes_read;
-          if (gnss_cmd_str.find(CMD_GET_LOCATION) != std::string::npos) {
-            sendToSerial();
-            gnss_cmd_str = "";
-            total_read = 0;
-          }
+   void SendCommand(std::string command, cuttlefish::SharedFD source_out,
+                    int out_fd) {
+     std::vector<char> buffer(GNSS_SERIAL_BUFFER_SIZE);
+     std::string cmd_str;
+     auto bytes_read = source_out->Read(buffer.data(), buffer.size());
+     if (bytes_read > 0) {
+       std::string s(buffer.data(), bytes_read);
+       cmd_str += s;
+       // In case random string sent though /dev/gnss1, cmd_str will
+       // auto resize, to get rid of first page.
+       if (cmd_str.size() > GNSS_SERIAL_BUFFER_SIZE * 2) {
+         cmd_str = cmd_str.substr(cmd_str.size() - GNSS_SERIAL_BUFFER_SIZE);
+       }
+       if (cmd_str.find(command) != std::string::npos) {
+         if (command == CMD_GET_RAWMEASUREMENT) {
+           sendGnssRawToSerial();
+         } else if (command == CMD_GET_LOCATION) {
+           sendToSerial();
+         }
+         cmd_str = "";
+       }
+     } else {
+       if (source_out->GetErrno() == EAGAIN ||
+           source_out->GetErrno() == EWOULDBLOCK) {
+         std::this_thread::sleep_for(std::chrono::milliseconds(100));
+       } else {
+         LOG(ERROR) << "Error reading fd " << out_fd << ": "
+                    << " Error code: " << source_out->GetErrno()
+                    << " Error sg:" << source_out->StrError();
+       }
+     }
+   }
 
-          if (gnss_cmd_str.find(CMD_GET_RAWMEASUREMENT) != std::string::npos) {
-            sendGnssRawToSerial();
-            gnss_cmd_str = "";
-            total_read = 0;
-          }
-        } else {
-          if (gnss_out_->GetErrno() == EAGAIN|| gnss_out_->GetErrno() == EWOULDBLOCK) {
-            std::this_thread::sleep_for(std::chrono::milliseconds(100));
-          } else {
-            LOG(ERROR) << "Error reading fd " << FLAGS_gnss_out_fd << ": "
-              << " Error code: " << gnss_out_->GetErrno()
-              << " Error sg:" << gnss_out_->StrError();
-          }
-        }
-      }
-    }
+   [[noreturn]] void ReadMeasurementLoop() {
+     int flags = gnss_out_->Fcntl(F_GETFL, 0);
+     gnss_out_->Fcntl(F_SETFL, flags | O_NONBLOCK);
+
+     while (true) {
+       SendCommand(CMD_GET_RAWMEASUREMENT, gnss_out_, FLAGS_gnss_out_fd);
+     }
+   }
+
+   [[noreturn]] void ReadFixedLocLoop() {
+     int flags2 = fixed_location_out_->Fcntl(F_GETFL, 0);
+     fixed_location_out_->Fcntl(F_SETFL, flags2 | O_NONBLOCK);
+     while (true) {
+       SendCommand(CMD_GET_LOCATION, fixed_location_out_,
+                   FLAGS_fixed_location_out_fd);
+     }
+   }
+
+   [[noreturn]] void WriteFixedLocationFromQueue() {
+      while (true) {
+         if (!fixed_locations_queue_.empty()) {
+         std::string dataPoint = fixed_locations_queue_.front();
+         std::string line = GenerateGpsLine(dataPoint);
+         std::lock_guard<std::mutex> lock(cached_fixed_location_mutex);
+         cached_fixed_location = line;
+         {
+           std::lock_guard<std::mutex> lock(fixed_locations_queue_mutex_);
+           fixed_locations_queue_.pop();
+         }
+       }
+       std::this_thread::sleep_for(std::chrono::milliseconds(fixed_locations_delay_));
+     }
+   }
 
     std::string getTimeNanosFromLine(const std::string& line) {
       // TimeNanos is in column #3.
@@ -288,27 +380,32 @@
       return !inputStr.empty() && android::base::StartsWith(inputStr, "# Raw");
     }
 
-    bool isNMEA(const std::string& inputStr) {
-      return !inputStr.empty() &&
-             (android::base::StartsWith(inputStr, "$GPRMC") ||
-              android::base::StartsWith(inputStr, "$GPRMA"));
-    }
-
     cuttlefish::SharedFD gnss_in_;
     cuttlefish::SharedFD gnss_out_;
-    std::thread read_thread_;
-    std::thread nmea_file_read_thread_;
+    cuttlefish::SharedFD fixed_location_in_;
+    cuttlefish::SharedFD fixed_location_out_;
+
+    std::thread measurement_read_thread_;
+    std::thread fixed_location_read_thread_;
+    std::thread fixed_location_file_read_thread_;
+    std::thread fixed_location_write_thread_;
     std::thread measurement_file_read_thread_;
 
-    std::string cached_nmea;
-    std::mutex cached_nmea_mutex;
+    std::string cached_fixed_location;
+    std::mutex cached_fixed_location_mutex;
 
     std::string cached_gnss_raw;
     std::string previous_cached_gnss_raw;
     std::mutex cached_gnss_raw_mutex;
+
+    std::queue<std::string> fixed_locations_queue_;
+    std::mutex fixed_locations_queue_mutex_;
+    int fixed_locations_delay_;
 };
 
 void RunServer() {
+  grpc::EnableDefaultHealthCheckService(true);
+  grpc::reflection::InitProtoReflectionServerBuilderPlugin();
   auto gnss_in = cuttlefish::SharedFD::Dup(FLAGS_gnss_in_fd);
   close(FLAGS_gnss_in_fd);
   if (!gnss_in->IsOpen()) {
@@ -325,12 +422,33 @@
                << gnss_out->StrError();
     return;
   }
+
+  auto fixed_location_in =
+      cuttlefish::SharedFD::Dup(FLAGS_fixed_location_in_fd);
+  close(FLAGS_fixed_location_in_fd);
+  if (!fixed_location_in->IsOpen()) {
+    LOG(ERROR) << "Error dupping fd " << FLAGS_fixed_location_in_fd << ": "
+               << fixed_location_in->StrError();
+    return;
+  }
+  close(FLAGS_fixed_location_in_fd);
+
+  auto fixed_location_out =
+      cuttlefish::SharedFD::Dup(FLAGS_fixed_location_out_fd);
+  close(FLAGS_fixed_location_out_fd);
+  if (!fixed_location_out->IsOpen()) {
+    LOG(ERROR) << "Error dupping fd " << FLAGS_fixed_location_out_fd << ": "
+               << fixed_location_out->StrError();
+    return;
+  }
+
   auto server_address("0.0.0.0:" + std::to_string(FLAGS_gnss_grpc_port));
-  GnssGrpcProxyServiceImpl service(gnss_in, gnss_out);
+  GnssGrpcProxyServiceImpl service(gnss_in, gnss_out, fixed_location_in,
+                                   fixed_location_out);
   service.StartServer();
   if (!FLAGS_gnss_file_path.empty()) {
     // TODO: On-demand start the read file threads according to data type.
-    service.StartReadNmeaFileThread();
+    service.StartReadFixedLocationFileThread();
     service.StartReadGnssRawMeasurementFileThread();
 
     // In the local mode, we are not start a grpc server, use a infinite loop instead
@@ -341,6 +459,10 @@
     ServerBuilder builder;
     // Listen on the given address without any authentication mechanism.
     builder.AddListeningPort(server_address, grpc::InsecureServerCredentials());
+    if (!FLAGS_gnss_grpc_socket.empty()) {
+      builder.AddListeningPort("unix:" + FLAGS_gnss_grpc_socket,
+                               grpc::InsecureServerCredentials());
+    }
     // Register "service" as the instance through which we'll communicate with
     // clients. In this case it corresponds to an *synchronous* service.
     builder.RegisterService(&service);
@@ -357,11 +479,11 @@
 
 
 int main(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  cuttlefish::DefaultSubprocessLogging(argv);
   ::gflags::ParseCommandLineFlags(&argc, &argv, true);
 
   LOG(DEBUG) << "Starting gnss grpc proxy server...";
   RunServer();
 
   return 0;
-}
\ No newline at end of file
+}
diff --git a/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.proto b/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.proto
index 4bfc854..20567b8 100644
--- a/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.proto
+++ b/host/commands/gnss_grpc_proxy/gnss_grpc_proxy.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto3";
 
 package gnss_grpc_proxy;
@@ -9,14 +24,70 @@
 service GnssGrpcProxy {
   // Sends NmeaRequest
   rpc SendNmea (SendNmeaRequest) returns (SendNmeaReply) {}
+
+  // Sends GpsRequest
+  rpc SendGps (SendGpsRequest) returns (SendGpsReply) {}
+
+  //// Sends GPS vector of data
+  rpc SendGpsVector (SendGpsCoordinatesRequest) returns (SendGpsCoordinatesReply) {}
 }
 
+
 // The request message containing nmea
 message SendNmeaRequest {
   string nmea = 1;
 }
 
-// The response message containing the return information
+// The response message containing the return nmea reply message
 message SendNmeaReply {
   string reply = 1;
 }
+
+
+// The request message containing gps location information
+message SendGpsRequest {
+  string gps = 1;
+}
+
+// The response message containing the return information
+message SendGpsReply {
+  string reply = 1;
+}
+
+message GpsCoordinates {
+  float latitude = 1;
+  float longitude = 2;
+  float elevation = 3;
+}
+
+// The request message containing array of gps locations
+message SendGpsCoordinatesRequest {
+  //Delay in millisecond
+  int32 delay =1;
+  repeated GpsCoordinates coordinates = 2;
+}
+
+// The response message containing the return status or error code if exists
+message SendGpsCoordinatesReply {
+  enum StatusCode {
+    OK = 0;
+    CANCELLED = 1;
+    UNKNOWN = 2;
+    INVALID_ARGUMENT = 3;
+    DEADLINE_EXCEEDED = 4;
+    NOT_FOUND = 5;
+    ALREADY_EXISTS = 6;
+    PERMISSION_DENIED = 7;
+    RESOURCE_EXHAUSTED = 8;
+    FAILED_PRECONDITION = 9;
+    ABORTED = 10;
+    OUT_OF_RANGE = 11;
+    UNIMPLEMENTED = 12;
+    INTERNAL = 13;
+    UNAVAILABLE = 14;
+    DATA_LOSS = 15;
+    UNAUTHENTICATED = 16;
+  }
+
+  StatusCode status = 1;
+}
\ No newline at end of file
diff --git a/host/commands/gnss_grpc_proxy/grpcpp.h b/host/commands/gnss_grpc_proxy/grpcpp.h
deleted file mode 100644
index e952806..0000000
--- a/host/commands/gnss_grpc_proxy/grpcpp.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- *
- * Copyright 2015 gRPC authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-/// \mainpage gRPC C++ API
-///
-/// The gRPC C++ API mainly consists of the following classes:
-/// <br>
-/// - grpc::Channel, which represents the connection to an endpoint. See [the
-/// gRPC Concepts page](https://grpc.io/docs/guides/concepts.html) for more
-/// details. Channels are created by the factory function grpc::CreateChannel.
-///
-/// - grpc::CompletionQueue, the producer-consumer queue used for all
-/// asynchronous communication with the gRPC runtime.
-///
-/// - grpc::ClientContext and grpc::ServerContext, where optional configuration
-/// for an RPC can be set, such as setting custom metadata to be conveyed to the
-/// peer, compression settings, authentication, etc.
-///
-/// - grpc::Server, representing a gRPC server, created by grpc::ServerBuilder.
-///
-/// Streaming calls are handled with the streaming classes in
-/// \ref sync_stream.h and
-/// \ref async_stream.h.
-///
-/// Refer to the
-/// [examples](https://github.com/grpc/)
-/// for code putting these pieces into play.
-
-#ifndef GRPCPP_GRPCPP_H
-#define GRPCPP_GRPCPP_H
-
-// Pragma for http://include-what-you-use.org/ tool, tells that following
-// headers are not private for grpcpp.h and are part of its interface.
-// IWYU pragma: begin_exports
-#include <grpc/grpc.h>
-
-#include <grpcpp/channel.h>
-#include <grpcpp/client_context.h>
-#include <grpcpp/completion_queue.h>
-#include <grpcpp/create_channel.h>
-#include <grpcpp/create_channel_posix.h>
-#include <grpcpp/server.h>
-#include <grpcpp/server_builder.h>
-#include <grpcpp/server_context.h>
-#include <grpcpp/server_posix.h>
-// IWYU pragma: end_exports
-
-namespace grpc {
-/// Return gRPC library version.
-grpc::string Version();
-}  // namespace grpc
-
-#endif  // GRPCPP_GRPCPP_H
\ No newline at end of file
diff --git a/host/commands/health/health.cpp b/host/commands/health/health.cpp
index 37cce86..aad9869 100644
--- a/host/commands/health/health.cpp
+++ b/host/commands/health/health.cpp
@@ -24,7 +24,7 @@
 #include "host/libs/vm_manager/vm_manager.h"
 
 std::string GetControlSocketPath(const cuttlefish::CuttlefishConfig& config) {
-  return config.ForDefaultInstance().PerInstanceInternalPath(
+  return config.ForDefaultInstance().PerInstanceInternalUdsPath(
       "crosvm_control.sock");
 }
 
@@ -87,6 +87,8 @@
     LOG(ERROR) << "Failed to obtain config object";
     return 1;
   }
+  // TODO(b/260649774): Consistent executable API for selecting an instance
+  auto instance = config->ForInstance(cuttlefish::GetInstance());
 
   if (argc != 2 && argc != 3) {
     return usage();
@@ -114,7 +116,7 @@
     }
   }
 
-  cuttlefish::Command command(config->crosvm_binary());
+  cuttlefish::Command command(instance.crosvm_binary());
   command.AddParameter("battery");
   command.AddParameter("goldfish");
   command.AddParameter(key);
diff --git a/host/commands/host_bugreport/main.cc b/host/commands/host_bugreport/main.cc
index c920e7c..8e8ebfe 100644
--- a/host/commands/host_bugreport/main.cc
+++ b/host/commands/host_bugreport/main.cc
@@ -48,7 +48,7 @@
   }
 }
 
-int CvdHostBugreportMain(int argc, char** argv) {
+Result<void> CvdHostBugreportMain(int argc, char** argv) {
   ::android::base::InitLogging(argv, android::base::StderrLogger);
   google::ParseCommandLineFlags(&argc, &argv, true);
 
@@ -56,7 +56,7 @@
   CHECK(config) << "Unable to find the config";
 
   auto out_path = FLAGS_output.c_str();
-  std::unique_ptr<FILE, decltype(&fclose)> out(fopen(out_path, "wb"), &fclose);
+  std::unique_ptr<FILE, decltype(&fclose)> out(fopen(out_path, "wbe"), &fclose);
   ZipWriter writer(out.get());
 
   auto save = [&writer, config](const std::string& path) {
@@ -77,14 +77,18 @@
     save("launcher.log");
     save("logcat");
     save("metrics.log");
-    auto tombstones = DirectoryContents(instance.PerInstancePath("tombstones"));
+    auto tombstones =
+        CF_EXPECT(DirectoryContents(instance.PerInstancePath("tombstones")),
+                  "Cannot read from tombstones directory.");
     for (const auto& tombstone : tombstones) {
       if (tombstone == "." || tombstone == "..") {
         continue;
       }
       save("tombstones/" + tombstone);
     }
-    auto recordings = DirectoryContents(instance.PerInstancePath("recording"));
+    auto recordings =
+        CF_EXPECT(DirectoryContents(instance.PerInstancePath("recording")),
+                  "Cannot read from recording directory.");
     for (const auto& recording : recordings) {
       if (recording == "." || recording == "..") {
         continue;
@@ -97,12 +101,14 @@
 
   LOG(INFO) << "Saved to \"" << FLAGS_output << "\"";
 
-  return 0;
+  return {};
 }
 
 }  // namespace
 }  // namespace cuttlefish
 
 int main(int argc, char** argv) {
-  return cuttlefish::CvdHostBugreportMain(argc, argv);
+  auto result = cuttlefish::CvdHostBugreportMain(argc, argv);
+  CHECK(result.ok()) << result.error().Message();
+  return 0;
 }
diff --git a/host/commands/kernel_log_monitor/kernel_log_server.cc b/host/commands/kernel_log_monitor/kernel_log_server.cc
index fe3a588..a67d0f9 100644
--- a/host/commands/kernel_log_monitor/kernel_log_server.cc
+++ b/host/commands/kernel_log_monitor/kernel_log_server.cc
@@ -53,13 +53,11 @@
     {cuttlefish::kBootStartedMessage, Event::BootStarted, kBare},
     {cuttlefish::kBootCompletedMessage, Event::BootCompleted, kBare},
     {cuttlefish::kBootFailedMessage, Event::BootFailed, kKeyValuePair},
-    {cuttlefish::kMobileNetworkConnectedMessage, Event::MobileNetworkConnected,
-     kBare},
+    {cuttlefish::kMobileNetworkConnectedMessage, Event::MobileNetworkConnected, kBare},
     {cuttlefish::kWifiConnectedMessage, Event::WifiNetworkConnected, kBare},
-    {cuttlefish::kEthernetConnectedMessage, Event::EthernetNetworkConnected,
-     kBare},
-    // TODO(b/131864854): Replace this with a string less likely to change
-    {"init: starting service 'adbd'...", Event::AdbdStarted, kBare},
+    {cuttlefish::kEthernetConnectedMessage, Event::EthernetNetworkConnected, kBare},
+    {cuttlefish::kAdbdStartedMessage, Event::AdbdStarted, kBare},
+    {cuttlefish::kFastbootdStartedMessage, Event::FastbootdStarted, kBare},
     {cuttlefish::kScreenChangedMessage, Event::ScreenChanged, kKeyValuePair},
     {cuttlefish::kBootloaderLoadedMessage, Event::BootloaderLoaded, kBare},
     {cuttlefish::kKernelLoadedMessage, Event::KernelLoaded, kBare},
@@ -91,11 +89,10 @@
 
 namespace monitor {
 KernelLogServer::KernelLogServer(cuttlefish::SharedFD pipe_fd,
-                                 const std::string& log_name,
-                                 bool deprecated_boot_completed)
+                                 const std::string& log_name)
     : pipe_fd_(pipe_fd),
-      log_fd_(cuttlefish::SharedFD::Open(log_name.c_str(), O_CREAT | O_RDWR | O_APPEND, 0666)),
-      deprecated_boot_completed_(deprecated_boot_completed) {}
+      log_fd_(cuttlefish::SharedFD::Open(log_name.c_str(),
+                                         O_CREAT | O_RDWR | O_APPEND, 0666)) {}
 
 void KernelLogServer::BeforeSelect(cuttlefish::SharedFDSet* fd_read) const {
   fd_read->Set(pipe_fd_);
@@ -167,15 +164,6 @@
           }
           message["metadata"] = metadata;
           ProcessSubscriptions(message, &subscribers_);
-
-          //TODO(b/69417553) Remove this when our clients have transitioned to the
-          // new boot completed
-          if (deprecated_boot_completed_) {
-            // Write to host kernel log
-            FILE* log = popen("/usr/bin/sudo /usr/bin/tee /dev/kmsg", "w");
-            fprintf(log, "%s\n", std::string(stage).c_str());
-            fclose(log);
-          }
         }
       }
       line_.clear();
diff --git a/host/commands/kernel_log_monitor/kernel_log_server.h b/host/commands/kernel_log_monitor/kernel_log_server.h
index 754ff24..cdbd9f0 100644
--- a/host/commands/kernel_log_monitor/kernel_log_server.h
+++ b/host/commands/kernel_log_monitor/kernel_log_server.h
@@ -42,6 +42,7 @@
                          * that we're booting a device.
                          */
   DisplayPowerModeChanged = 10,
+  FastbootdStarted = 11
 };
 
 enum class SubscriptionAction {
@@ -55,9 +56,7 @@
 // Only accept one connection.
 class KernelLogServer {
  public:
-  KernelLogServer(cuttlefish::SharedFD pipe_fd,
-                  const std::string& log_name,
-                  bool deprecated_boot_completed);
+  KernelLogServer(cuttlefish::SharedFD pipe_fd, const std::string& log_name);
 
   ~KernelLogServer() = default;
 
@@ -79,7 +78,6 @@
   cuttlefish::SharedFD pipe_fd_;
   cuttlefish::SharedFD log_fd_;
   std::string line_;
-  bool deprecated_boot_completed_;
   std::vector<EventCallback> subscribers_;
 
   KernelLogServer(const KernelLogServer&) = delete;
diff --git a/host/commands/kernel_log_monitor/main.cc b/host/commands/kernel_log_monitor/main.cc
index c7269b0..4856d8a 100644
--- a/host/commands/kernel_log_monitor/main.cc
+++ b/host/commands/kernel_log_monitor/main.cc
@@ -94,8 +94,8 @@
     return 2;
   }
 
-  monitor::KernelLogServer klog{pipe, instance.PerInstanceLogPath("kernel.log"),
-                                config->deprecated_boot_completed()};
+  monitor::KernelLogServer klog{pipe,
+                                instance.PerInstanceLogPath("kernel.log")};
 
   for (auto subscriber_fd: subscriber_fds) {
     if (subscriber_fd->IsOpen()) {
diff --git a/host/commands/log_tee/log_tee.cpp b/host/commands/log_tee/log_tee.cpp
index b63c2ac..5a959a7 100644
--- a/host/commands/log_tee/log_tee.cpp
+++ b/host/commands/log_tee/log_tee.cpp
@@ -13,6 +13,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+#include <signal.h>
+#include <sys/signalfd.h>
+
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <gflags/gflags.h>
@@ -36,7 +39,7 @@
 
   auto instance = config->ForDefaultInstance();
 
-  if (config->run_as_daemon()) {
+  if (instance.run_as_daemon()) {
     android::base::SetLogger(
         cuttlefish::LogToFiles({instance.launcher_log_path()}));
   } else {
@@ -52,33 +55,89 @@
     android::base::SetDefaultTag(FLAGS_process_name);
   }
 
-  char buf[1 << 16];
-  ssize_t chars_read = 0;
+  // mask SIGINT and handle it using signalfd
+  sigset_t mask;
+  sigemptyset(&mask);
+  sigaddset(&mask, SIGINT);
+  CHECK(sigprocmask(SIG_BLOCK, &mask, NULL) == 0)
+      << "sigprocmask failed: " << strerror(errno);
+  int sfd = signalfd(-1, &mask, 0);
+  CHECK(sfd >= 0) << "signalfd failed: " << strerror(errno);
+  auto int_fd = cuttlefish::SharedFD::Dup(sfd);
+  close(sfd);
+
+  auto poll_fds = std::vector<cuttlefish::PollSharedFd>{
+      cuttlefish::PollSharedFd{
+          .fd = log_fd,
+          .events = POLL_IN,
+          .revents = 0,
+      },
+      cuttlefish::PollSharedFd{
+          .fd = int_fd,
+          .events = POLL_IN,
+          .revents = 0,
+      },
+  };
 
   LOG(DEBUG) << "Starting to read from process " << FLAGS_process_name;
 
-  while ((chars_read = log_fd->Read(buf, sizeof(buf))) > 0) {
-    auto trimmed = android::base::Trim(std::string(buf, chars_read));
-    // Newlines inside `trimmed` are handled by the android logging code.
-    // These checks attempt to determine the log severity coming from crosvm.
-    // There is no guarantee of success all the time since log line boundaries
-    // could be out sync with the reads, but that's ok.
-    if (android::base::StartsWith(trimmed, "[INFO")) {
-      LOG(DEBUG) << trimmed;
-    } else if (android::base::StartsWith(trimmed, "[ERROR")) {
-      LOG(ERROR) << trimmed;
-    } else if (android::base::StartsWith(trimmed, "[WARNING")) {
-      LOG(WARNING) << trimmed;
-    } else if (android::base::StartsWith(trimmed, "[VERBOSE")) {
-      LOG(VERBOSE) << trimmed;
-    } else {
-      LOG(DEBUG) << trimmed;
-    }
-  }
+  char buf[1 << 16];
+  ssize_t chars_read = 0;
+  for (;;) {
+    // We can assume all writers to `log_fd` have completed before a SIGINT is
+    // sent, but we need to make sure we've actually read all the data before
+    // exiting. So, keep reading from `log_fd` until both (1) we get SIGINT and
+    // (2) `log_fd` is empty (but not necessarily EOF).
+    //
+    // This could be simpler if all the writers would close their FDs when they
+    // are finished. Then, we could just read until EOF. However that would
+    // require more work elsewhere in cuttlefish.
+    CHECK(cuttlefish::SharedFD::Poll(poll_fds, /*timeout=*/-1) >= 0)
+        << "poll failed: " << strerror(errno);
+    if (poll_fds[0].revents) {
+      chars_read = log_fd->Read(buf, sizeof(buf));
+      if (chars_read < 0) {
+        LOG(DEBUG) << "Failed to read from process " << FLAGS_process_name
+                   << ": " << log_fd->StrError();
+        break;
+      }
+      if (chars_read == 0) {
+        break;
+      }
+      auto trimmed = android::base::Trim(std::string_view(buf, chars_read));
+      // Newlines inside `trimmed` are handled by the android logging code.
+      // These checks attempt to determine the log severity coming from crosvm.
+      // There is no guarantee of success all the time since log line boundaries
+      // could be out sync with the reads, but that's ok.
+      //
+      // TODO(b/270424669): These checks are wrong, the format is
+      // "[<timestamp> ERROR". Maybe just stop bothering and send
+      // everything to LOG(DEBUG).
+      if (android::base::StartsWith(trimmed, "[INFO")) {
+        LOG(DEBUG) << trimmed;
+      } else if (android::base::StartsWith(trimmed, "[ERROR")) {
+        LOG(ERROR) << trimmed;
+      } else if (android::base::StartsWith(trimmed, "[WARNING")) {
+        LOG(WARNING) << trimmed;
+      } else if (android::base::StartsWith(trimmed, "[VERBOSE")) {
+        LOG(VERBOSE) << trimmed;
+      } else {
+        LOG(DEBUG) << trimmed;
+      }
 
-  if (chars_read < 0) {
-    LOG(DEBUG) << "Failed to read from process " << FLAGS_process_name << ": "
-               << log_fd->StrError();
+      // Go back to polling immediately to see if there is more data, don't
+      // handle any signals yet.
+      continue;
+    }
+    if (poll_fds[1].revents) {
+      struct signalfd_siginfo siginfo;
+      int s = int_fd->Read(&siginfo, sizeof(siginfo));
+      CHECK(s == sizeof(siginfo)) << "bad read size on signalfd, expected "
+                                  << sizeof(siginfo) << " got " << s;
+      CHECK(siginfo.ssi_signo == SIGINT)
+          << "unexpected signal: " << siginfo.ssi_signo;
+      break;
+    }
   }
 
   LOG(DEBUG) << "Finished reading from process " << FLAGS_process_name;
diff --git a/host/commands/metrics/Android.bp b/host/commands/metrics/Android.bp
index c454b53..16ec925 100644
--- a/host/commands/metrics/Android.bp
+++ b/host/commands/metrics/Android.bp
@@ -20,18 +20,30 @@
 cc_binary {
     name: "metrics",
     srcs: [
+        "events.cc",
+        "host_receiver.cc",
         "metrics.cc",
+        "utils.cc",
     ],
     shared_libs: [
-        "libext2_blkid",
+        "cf_proto",
+        "libbase",
+        "libcurl",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
-        "libbase",
+        "libext2_blkid",
+        "libfruit",
         "libjsoncpp",
+        "liblog",
+        "libprotobuf-cpp-lite",
+        "libz",
     ],
     static_libs: [
         "libcuttlefish_host_config",
+        "libcuttlefish_msg_queue",
         "libgflags",
     ],
     defaults: ["cuttlefish_host"],
 }
+
+subdirs = ["proto"]
diff --git a/host/commands/metrics/events.cc b/host/commands/metrics/events.cc
new file mode 100644
index 0000000..591bc17
--- /dev/null
+++ b/host/commands/metrics/events.cc
@@ -0,0 +1,151 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <fruit/fruit.h>
+#include <gflags/gflags.h>
+#include <string>
+
+#include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/metrics/events.h"
+#include "host/commands/metrics/metrics_defs.h"
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
+#include "host/commands/metrics/utils.h"
+#include "host/libs/config/config_flag.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/feature.h"
+#include "shared/api_level.h"
+
+using cuttlefish::MetricsExitCodes;
+
+const std::string kLogSourceStr = "CUTTLEFISH_METRICS";
+const int kLogSourceId = 1753;
+const int kCppClientType =
+    19;  // C++ native client type (clientanalytics.proto)
+
+namespace cuttlefish {
+
+Clearcut::Clearcut() {}
+
+Clearcut::~Clearcut() {}
+
+std::unique_ptr<cuttlefish::CuttlefishLogEvent> buildCFLogEvent(
+    uint64_t now_ms, cuttlefish::CuttlefishLogEvent::DeviceType device_type) {
+  uint64_t now_s = now_ms / 1000;
+  uint64_t now_ns = (now_ms % 1000) * 1000000;
+
+  // "cfEvent" is the top level CuttlefishLogEvent
+  auto cfEvent = std::make_unique<cuttlefish::CuttlefishLogEvent>();
+  cfEvent->set_device_type(device_type);
+  cfEvent->set_session_id(metrics::sessionId(now_ms));
+  if (metrics::cfVersion() != "") {
+    cfEvent->set_cuttlefish_version(metrics::cfVersion());
+  }
+  Timestamp* timestamp = cfEvent->mutable_timestamp_ms();
+  timestamp->set_seconds(now_s);
+  timestamp->set_nanos(now_ns);
+  return cfEvent;
+}
+
+void buildCFMetricsEvent(uint64_t now_ms,
+                         cuttlefish::CuttlefishLogEvent* cfEvent,
+                         cuttlefish::MetricsEvent::EventType event_type) {
+  uint64_t now_s = now_ms / 1000;
+  uint64_t now_ns = (now_ms % 1000) * 1000000;
+
+  // "metrics_event" is the 2nd level MetricsEvent
+  cuttlefish::MetricsEvent* metrics_event = cfEvent->mutable_metrics_event();
+  metrics_event->set_event_type(event_type);
+  metrics_event->set_os_type(metrics::osType());
+  metrics_event->set_os_version(metrics::osVersion());
+  metrics_event->set_vmm_type(metrics::vmmManager());
+  if (metrics::vmmVersion() != "") {
+    metrics_event->set_vmm_version(metrics::vmmVersion());
+  }
+  metrics_event->set_company(metrics::company());
+  metrics_event->set_api_level(PRODUCT_SHIPPING_API_LEVEL);
+  Timestamp* metrics_timestamp = metrics_event->mutable_event_time_ms();
+  metrics_timestamp->set_seconds(now_s);
+  metrics_timestamp->set_nanos(now_ns);
+}
+
+std::unique_ptr<LogRequest> buildLogRequest(
+    uint64_t now_ms, cuttlefish::CuttlefishLogEvent* cfEvent) {
+  // "log_request" is the top level LogRequest
+  auto log_request = std::make_unique<LogRequest>();
+  log_request->set_request_time_ms(now_ms);
+  log_request->set_log_source(kLogSourceId);
+  log_request->set_log_source_name(kLogSourceStr);
+  ClientInfo* client_info = log_request->mutable_client_info();
+  client_info->set_client_type(kCppClientType);
+
+  // "cfLogStr" is CuttlefishLogEvent serialized
+  std::string cfLogStr;
+  if (!cfEvent->SerializeToString(&cfLogStr)) {
+    LOG(ERROR) << "SerializeToString failed for event";
+    return nullptr;
+  }
+  LogEvent* logEvent = log_request->add_log_event();
+  logEvent->set_event_time_ms(now_ms);
+  logEvent->set_source_extension(cfLogStr);
+  return log_request;
+}
+
+int Clearcut::SendEvent(cuttlefish::CuttlefishLogEvent::DeviceType device_type,
+                        cuttlefish::MetricsEvent::EventType event_type) {
+  uint64_t now_ms = metrics::epochTimeMs();
+
+  auto cfEvent = buildCFLogEvent(now_ms, device_type);
+  buildCFMetricsEvent(now_ms, cfEvent.get(), event_type);
+  auto logRequest = buildLogRequest(now_ms, cfEvent.get());
+  if (!logRequest) {
+    LOG(ERROR) << "failed to build LogRequest";
+    return kMetricsError;
+  }
+
+  std::string logRequestStr;
+  if (!logRequest->SerializeToString(&logRequestStr)) {
+    LOG(ERROR) << "SerializeToString failed for log_request";
+    return kMetricsError;
+  }
+  return metrics::postReq(logRequestStr, metrics::kProd);
+}
+
+int Clearcut::SendVMStart(cuttlefish::CuttlefishLogEvent::DeviceType device) {
+  return SendEvent(
+      device, cuttlefish::MetricsEvent::CUTTLEFISH_EVENT_TYPE_VM_INSTANTIATION);
+}
+
+int Clearcut::SendVMStop(cuttlefish::CuttlefishLogEvent::DeviceType device) {
+  return SendEvent(device,
+                   cuttlefish::MetricsEvent::CUTTLEFISH_EVENT_TYPE_VM_STOP);
+}
+
+int Clearcut::SendDeviceBoot(
+    cuttlefish::CuttlefishLogEvent::DeviceType device) {
+  return SendEvent(device,
+                   cuttlefish::MetricsEvent::CUTTLEFISH_EVENT_TYPE_DEVICE_BOOT);
+}
+
+int Clearcut::SendLockScreen(
+    cuttlefish::CuttlefishLogEvent::DeviceType device) {
+  return SendEvent(
+      device,
+      cuttlefish::MetricsEvent::CUTTLEFISH_EVENT_TYPE_LOCK_SCREEN_AVAILABLE);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/metrics/events.h b/host/commands/metrics/events.h
new file mode 100644
index 0000000..348fb80
--- /dev/null
+++ b/host/commands/metrics/events.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
+
+namespace cuttlefish {
+
+class Clearcut {
+ private:
+  static int SendEvent(cuttlefish::CuttlefishLogEvent::DeviceType device_type,
+                       cuttlefish::MetricsEvent::EventType event_type);
+
+ public:
+  Clearcut();
+  ~Clearcut();
+  static int SendVMStart(
+      cuttlefish::CuttlefishLogEvent::DeviceType device_type);
+  static int SendVMStop(cuttlefish::CuttlefishLogEvent::DeviceType device_type);
+  static int SendDeviceBoot(
+      cuttlefish::CuttlefishLogEvent::DeviceType device_type);
+  static int SendLockScreen(
+      cuttlefish::CuttlefishLogEvent::DeviceType device_type);
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/metrics/host_receiver.cc b/host/commands/metrics/host_receiver.cc
new file mode 100644
index 0000000..617abd4
--- /dev/null
+++ b/host/commands/metrics/host_receiver.cc
@@ -0,0 +1,94 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <curl/curl.h>
+#include <gflags/gflags.h>
+#include <json/json.h>
+#include <net/if.h>
+#include <netinet/in.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/time.h>
+#include <sys/utsname.h>
+#include <chrono>
+#include <ctime>
+#include <iostream>
+
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/metrics/events.h"
+#include "host/commands/metrics/host_receiver.h"
+#include "host/commands/metrics/metrics_defs.h"
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/metrics/metrics_receiver.h"
+#include "host/libs/msg_queue/msg_queue.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/vm_manager/qemu_manager.h"
+
+using cuttlefish::MetricsExitCodes;
+
+namespace cuttlefish {
+
+MetricsHostReceiver::MetricsHostReceiver(
+    const cuttlefish::CuttlefishConfig& config)
+    : config_(config) {}
+
+MetricsHostReceiver::~MetricsHostReceiver() {}
+
+void MetricsHostReceiver::ServerLoop() {
+  auto msg_queue = cuttlefish::SysVMessageQueue::Create("cuttlefish_ipc", 'a');
+  if (msg_queue == NULL) {
+    LOG(FATAL) << "create: failed to create cuttlefish_ipc";
+  }
+
+  struct msg_buffer msg = {0, {0}};
+  while (1) {
+    int rc = msg_queue->Receive(&msg, MAX_MSG_SIZE, 1, true);
+    if (rc == -1) {
+      LOG(FATAL) << "receive: failed to receive any messages";
+    }
+    std::string text(msg.mesg_text);
+    LOG(INFO) << "Metrics host received: " << text;
+    auto hostDev = cuttlefish::CuttlefishLogEvent::CUTTLEFISH_DEVICE_TYPE_HOST;
+    if (text == "VMStart") {
+      rc = Clearcut::SendVMStart(hostDev);
+    } else if (text == "VMStop") {
+      rc = Clearcut::SendVMStop(hostDev);
+    } else if (text == "DeviceBoot") {
+      rc = Clearcut::SendDeviceBoot(hostDev);
+    } else if (text == "LockScreen") {
+      rc = Clearcut::SendLockScreen(hostDev);
+    }
+    if (rc != MetricsExitCodes::kSuccess) {
+      LOG(ERROR) << "Message failed to send to ClearCut: " << text;
+    }
+    sleep(1);
+  }
+}
+
+void MetricsHostReceiver::Join() { thread_.join(); }
+
+bool MetricsHostReceiver::Initialize() {
+  if (!config_.enable_metrics()) {
+    LOG(ERROR) << "init: metrics not enabled";
+    return false;
+  }
+  thread_ = std::thread(&MetricsHostReceiver::ServerLoop, this);
+  return true;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/metrics/host_receiver.h b/host/commands/metrics/host_receiver.h
new file mode 100644
index 0000000..0213f23
--- /dev/null
+++ b/host/commands/metrics/host_receiver.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <thread>
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+class MetricsHostReceiver {
+ private:
+  const CuttlefishConfig& config_;
+  std::thread thread_;
+  void ServerLoop();
+
+ public:
+  MetricsHostReceiver(const cuttlefish::CuttlefishConfig& config);
+  ~MetricsHostReceiver();
+  bool Initialize();
+  void Join();
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/metrics/metrics.cc b/host/commands/metrics/metrics.cc
index 5fcf58e..70500d9 100644
--- a/host/commands/metrics/metrics.cc
+++ b/host/commands/metrics/metrics.cc
@@ -13,28 +13,46 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-#include <android-base/strings.h>
-#include <gflags/gflags.h>
 #include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <curl/curl.h>
+#include <gflags/gflags.h>
+#include <json/json.h>
+#include <net/if.h>
+#include <netinet/in.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/time.h>
+#include <sys/utsname.h>
+#include <chrono>
+#include <ctime>
+#include <iostream>
 
 #include "common/libs/utils/tee_logging.h"
+#include "host/commands/metrics/host_receiver.h"
 #include "host/commands/metrics/metrics_defs.h"
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
 #include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/msg_queue/msg_queue.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/vm_manager/qemu_manager.h"
 
 using cuttlefish::MetricsExitCodes;
+using std::cout;
+using std::endl;
+using std::chrono::duration_cast;
+using std::chrono::milliseconds;
+using std::chrono::seconds;
+using std::chrono::system_clock;
 
 int main(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
   google::ParseCommandLineFlags(&argc, &argv, true);
-
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
   auto config = cuttlefish::CuttlefishConfig::Get();
-
   CHECK(config) << "Could not open cuttlefish config";
-
   auto instance = config->ForDefaultInstance();
   auto metrics_log_path = instance.PerInstanceLogPath("metrics.log");
-
-  if (config->run_as_daemon()) {
+  if (instance.run_as_daemon()) {
     android::base::SetLogger(
         cuttlefish::LogToFiles({metrics_log_path, instance.launcher_log_path()}));
   } else {
@@ -42,15 +60,17 @@
         cuttlefish::LogToStderrAndFiles(
             {metrics_log_path, instance.launcher_log_path()}));
   }
-
   if (config->enable_metrics() != cuttlefish::CuttlefishConfig::kYes) {
     LOG(ERROR) << "metrics not enabled, but metrics were launched.";
     return cuttlefish::MetricsExitCodes::kInvalidHostConfiguration;
   }
 
-  while (true) {
-    // do nothing
-    sleep(std::numeric_limits<unsigned int>::max());
+  cuttlefish::MetricsHostReceiver host_receiver(*config);
+  if (!host_receiver.Initialize()) {
+    LOG(ERROR) << "metrics host_receiver failed to init";
+    return cuttlefish::MetricsExitCodes::kMetricsError;
   }
+  LOG(INFO) << "Metrics started";
+  host_receiver.Join();
   return cuttlefish::MetricsExitCodes::kMetricsError;
 }
diff --git a/host/commands/metrics/proto/Android.bp b/host/commands/metrics/proto/Android.bp
index 250abcf..ad99ae0 100644
--- a/host/commands/metrics/proto/Android.bp
+++ b/host/commands/metrics/proto/Android.bp
@@ -21,7 +21,10 @@
     vendor_available: true,
 
     srcs: [
-        "*.proto",
+        "cf_log.proto",
+        "cf_metrics_event.proto",
+        "clientanalytics.proto",
+        "common.proto",
     ],
 
     shared_libs: [
diff --git a/host/commands/metrics/proto/cf_log.proto b/host/commands/metrics/proto/cf_log.proto
index 46659fb..318b68d 100644
--- a/host/commands/metrics/proto/cf_log.proto
+++ b/host/commands/metrics/proto/cf_log.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto2";
 
 package cuttlefish;
@@ -28,9 +43,6 @@
   // The type of device this log event originated from.
   optional DeviceType device_type = 2;
 
-  // The identifier for this device.
-  optional string device_id = 3;
-
   // The identifier for this session.
   optional string session_id = 6;
 
diff --git a/host/commands/metrics/proto/cf_metrics_event.proto b/host/commands/metrics/proto/cf_metrics_event.proto
index 7129c23..a7d7642 100644
--- a/host/commands/metrics/proto/cf_metrics_event.proto
+++ b/host/commands/metrics/proto/cf_metrics_event.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto2";
 
 package cuttlefish;
diff --git a/host/commands/metrics/proto/cf_metrics_proto.h b/host/commands/metrics/proto/cf_metrics_proto.h
new file mode 100644
index 0000000..49b4f85
--- /dev/null
+++ b/host/commands/metrics/proto/cf_metrics_proto.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// pragma is used here to disable the warnings emitted from the protobuf
+// headers. By adding #pragma before including cf_log.pb.h, it suppresses
+// protobuf warnings, but allows the rest of the files to continue using
+// the current flags.
+// This file should be included instead of directly including cf_log.pb.h
+
+#pragma GCC system_header
+
+#include <cf_log.pb.h>
+#include <cf_metrics_event.pb.h>
+#include <clientanalytics.pb.h>
+#include <common.pb.h>
diff --git a/host/commands/metrics/proto/clientanalytics.proto b/host/commands/metrics/proto/clientanalytics.proto
index 955a15c..f0ff898 100644
--- a/host/commands/metrics/proto/clientanalytics.proto
+++ b/host/commands/metrics/proto/clientanalytics.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto2";
 
 message LogRequest {
@@ -5,6 +20,7 @@
   optional int32 log_source = 2;
   optional int64 request_time_ms = 4;
   repeated LogEvent log_event = 3;
+  optional string log_source_name = 5;
 }
 
 message ClientInfo {
diff --git a/host/commands/metrics/proto/common.proto b/host/commands/metrics/proto/common.proto
index 6ad0380..11e6a00 100644
--- a/host/commands/metrics/proto/common.proto
+++ b/host/commands/metrics/proto/common.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto2";
 
 message Duration {
diff --git a/host/commands/metrics/utils.cc b/host/commands/metrics/utils.cc
new file mode 100644
index 0000000..072ba4d
--- /dev/null
+++ b/host/commands/metrics/utils.cc
@@ -0,0 +1,259 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <curl/curl.h>
+#include <gflags/gflags.h>
+#include <json/json.h>
+#include <net/if.h>
+#include <netinet/in.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/time.h>
+#include <sys/utsname.h>
+#include <chrono>
+#include <ctime>
+#include <iostream>
+
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/metrics/metrics_defs.h"
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
+#include "host/commands/metrics/utils.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/vm_manager/qemu_manager.h"
+
+using cuttlefish::MetricsExitCodes;
+
+namespace metrics {
+
+static std::string hashing(std::string input) {
+  const std::hash<std::string> hasher;
+  return std::to_string(hasher(input));
+}
+
+cuttlefish::MetricsEvent::OsType osType() {
+  struct utsname buf;
+  if (uname(&buf) != 0) {
+    LOG(ERROR) << "failed to retrieve system information";
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_UNSPECIFIED;
+  }
+  std::string sysname(buf.sysname);
+  std::string machine(buf.machine);
+
+  if (sysname != "Linux") {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_UNSPECIFIED;
+  }
+  if (machine == "x86_64") {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_LINUX_X86_64;
+  }
+  if (machine == "x86") {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_LINUX_X86;
+  }
+  if (machine == "aarch64" || machine == "arm64") {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_LINUX_AARCH64;
+  }
+  if (machine[0] == 'a') {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_LINUX_AARCH32;
+  }
+  return cuttlefish::MetricsEvent::CUTTLEFISH_OS_TYPE_UNSPECIFIED;
+}
+
+std::string sessionId(uint64_t now_ms) {
+  uint64_t now_day = now_ms / 1000 / 60 / 60 / 24;
+  return hashing(macAddress() + std::to_string(now_day));
+}
+
+std::string cfVersion() {
+  // TODO: per ellisr@ leave empty for now
+  return "";
+}
+
+std::string osVersion() {
+  struct utsname buf;
+  if (uname(&buf) != 0) {
+    LOG(ERROR) << "failed to retrieve system information";
+  }
+  std::string version = buf.release;
+  return version;
+}
+
+std::string macAddress() {
+  int sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP);
+  if (sock == -1) {
+    LOG(ERROR) << "couldn't connect to socket";
+    return "";
+  }
+
+  char buf2[1024];
+  struct ifconf ifc;
+  ifc.ifc_len = sizeof(buf2);
+  ifc.ifc_buf = buf2;
+  if (ioctl(sock, SIOCGIFCONF, &ifc) == -1) {
+    LOG(ERROR) << "couldn't connect to socket";
+    return "";
+  }
+
+  struct ifreq* it = ifc.ifc_req;
+  const struct ifreq* const end = it + (ifc.ifc_len / sizeof(struct ifreq));
+
+  unsigned char mac_address[6] = {0};
+  struct ifreq ifr;
+  for (; it != end; ++it) {
+    strcpy(ifr.ifr_name, it->ifr_name);
+    if (ioctl(sock, SIOCGIFFLAGS, &ifr) != 0) {
+      LOG(ERROR) << "couldn't connect to socket";
+      return "";
+    }
+    if (ifr.ifr_flags & IFF_LOOPBACK) {
+      continue;
+    }
+    if (ioctl(sock, SIOCGIFHWADDR, &ifr) == 0) {
+      memcpy(mac_address, ifr.ifr_hwaddr.sa_data, 6);
+      break;
+    }
+  }
+
+  char mac[100];
+  sprintf(mac, "%02x:%02x:%02x:%02x:%02x:%02x", mac_address[0], mac_address[1],
+          mac_address[2], mac_address[3], mac_address[4], mac_address[5]);
+  return mac;
+}
+
+std::string company() {
+  // TODO: per ellisr@ leave hard-coded for now
+  return "GOOGLE";
+}
+
+cuttlefish::MetricsEvent::VmmType vmmManager() {
+  auto config = cuttlefish::CuttlefishConfig::Get();
+  CHECK(config) << "Could not open cuttlefish config";
+  auto vmm = config->vm_manager();
+  if (vmm == cuttlefish::vm_manager::CrosvmManager::name()) {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_VMM_TYPE_CROSVM;
+  }
+  if (vmm == cuttlefish::vm_manager::QemuManager::name()) {
+    return cuttlefish::MetricsEvent::CUTTLEFISH_VMM_TYPE_QEMU;
+  }
+  return cuttlefish::MetricsEvent::CUTTLEFISH_VMM_TYPE_UNSPECIFIED;
+}
+
+std::string vmmVersion() {
+  // TODO: per ellisr@ leave empty for now
+  return "";
+}
+
+uint64_t epochTimeMs() {
+  auto now = std::chrono::system_clock::now().time_since_epoch();
+  uint64_t milliseconds_since_epoch =
+      std::chrono::duration_cast<std::chrono::milliseconds>(now).count();
+  return milliseconds_since_epoch;
+}
+
+cuttlefish::CuttlefishLogEvent* sampleEvent() {
+  cuttlefish::CuttlefishLogEvent* event = new cuttlefish::CuttlefishLogEvent();
+  event->set_device_type(
+      cuttlefish::CuttlefishLogEvent::CUTTLEFISH_DEVICE_TYPE_HOST);
+  return event;
+}
+
+std::string protoToStr(LogEvent* event) {
+  std::string output;
+  if (!event->SerializeToString(&output)) {
+    LOG(ERROR) << "failed to serialize proto LogEvent";
+  }
+  return output;
+}
+
+size_t curl_out_writer([[maybe_unused]] char* response, size_t size,
+                       size_t nmemb, [[maybe_unused]] void* userdata) {
+  return size * nmemb;
+}
+
+MetricsExitCodes postReq(std::string output, metrics::ClearcutServer server) {
+  const char *clearcut_scheme, *clearcut_host, *clearcut_path, *clearcut_port;
+  switch (server) {
+    case metrics::kLocal:
+      clearcut_scheme = "http";
+      clearcut_host = "localhost";
+      clearcut_path = "/log";
+      clearcut_port = "27910";
+      break;
+    case metrics::kStaging:
+      clearcut_scheme = "https";
+      clearcut_host = "play.googleapis.com";
+      clearcut_path = "/staging/log";
+      clearcut_port = "443";
+      break;
+    case metrics::kProd:
+      clearcut_scheme = "https";
+      clearcut_host = "play.googleapis.com";
+      clearcut_path = "/log";
+      clearcut_port = "443";
+      break;
+    default:
+      return cuttlefish::kInvalidHostConfiguration;
+  }
+
+  CURLU* url = curl_url();
+  CURLUcode urc = curl_url_set(url, CURLUPART_SCHEME, clearcut_scheme, 0);
+  if (urc != 0) {
+    LOG(ERROR) << "failed to set url CURLUPART_SCHEME";
+    return cuttlefish::kMetricsError;
+  }
+  urc = curl_url_set(url, CURLUPART_HOST, clearcut_host, 0);
+  if (urc != 0) {
+    LOG(ERROR) << "failed to set url CURLUPART_HOST";
+    return cuttlefish::kMetricsError;
+  }
+  urc = curl_url_set(url, CURLUPART_PATH, clearcut_path, 0);
+  if (urc != 0) {
+    LOG(ERROR) << "failed to set url CURLUPART_PATH";
+    return cuttlefish::kMetricsError;
+  }
+  urc = curl_url_set(url, CURLUPART_PORT, clearcut_port, 0);
+  if (urc != 0) {
+    LOG(ERROR) << "failed to set url CURLUPART_PORT";
+    return cuttlefish::kMetricsError;
+  }
+  curl_global_init(CURL_GLOBAL_ALL);
+  CURL* curl = curl_easy_init();
+  if (curl) {
+    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &curl_out_writer);
+    curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0L);
+    curl_easy_setopt(curl, CURLOPT_CURLU, url);
+    curl_easy_setopt(curl, CURLOPT_POSTFIELDS, output.c_str());
+    CURLcode rc = curl_easy_perform(curl);
+    long http_code = 0;
+    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_code);
+    if (http_code == 200 && rc != CURLE_ABORTED_BY_CALLBACK) {
+      LOG(INFO) << "Metrics posted to ClearCut";
+    } else {
+      LOG(ERROR) << "Metrics message failed: [" << output << "]";
+      LOG(ERROR) << "http error code: " << http_code;
+      if (rc != CURLE_OK) {
+        LOG(ERROR) << "curl error code: " << rc << " | "
+                   << curl_easy_strerror(rc);
+      }
+      return cuttlefish::kMetricsError;
+    }
+    curl_easy_cleanup(curl);
+  }
+  curl_url_cleanup(url);
+  curl_global_cleanup();
+  return cuttlefish::kSuccess;
+}
+}  // namespace metrics
diff --git a/host/commands/metrics/utils.h b/host/commands/metrics/utils.h
new file mode 100644
index 0000000..a9ebe6b
--- /dev/null
+++ b/host/commands/metrics/utils.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string.h>
+#include "host/commands/metrics/proto/cf_metrics_proto.h"
+
+namespace metrics {
+enum ClearcutServer : int {
+  kLocal = 0,
+  kStaging = 1,
+  kProd = 2,
+};
+
+cuttlefish::MetricsEvent::OsType osType();
+std::string osVersion();
+std::string sessionId(uint64_t now);
+std::string cfVersion();
+std::string macAddress();
+std::string company();
+cuttlefish::MetricsEvent::VmmType vmmManager();
+std::string vmmVersion();
+uint64_t epochTimeMs();
+std::string protoToStr(LogEvent* event);
+cuttlefish::MetricsExitCodes postReq(std::string output, ClearcutServer server);
+}  // namespace metrics
diff --git a/host/commands/mkenvimage_slim/mkenvimage_slim.cc b/host/commands/mkenvimage_slim/mkenvimage_slim.cc
index ebd1b9d..704d429 100644
--- a/host/commands/mkenvimage_slim/mkenvimage_slim.cc
+++ b/host/commands/mkenvimage_slim/mkenvimage_slim.cc
@@ -87,6 +87,10 @@
 
 int main(int argc, char** argv) {
   auto res = cuttlefish::MkenvimageSlimMain(argc, argv);
-  CHECK(res.ok()) << "mkenvimage_slim failed: \n" << res.error();
-  return *res;
+  if (res.ok()) {
+    return *res;
+  }
+  LOG(ERROR) << "mkenvimage_slim failed: \n" << res.error().Message();
+  LOG(ERROR) << "mkenvimage_slim failed: \n" << res.error().Trace();
+  abort();
 }
diff --git a/host/commands/modem_simulator/channel_monitor.cpp b/host/commands/modem_simulator/channel_monitor.cpp
index 941a8b4..31bc5f3 100644
--- a/host/commands/modem_simulator/channel_monitor.cpp
+++ b/host/commands/modem_simulator/channel_monitor.cpp
@@ -45,7 +45,7 @@
   if (response.back() != '\r') {
     response += '\r';
   }
-  LOG(DEBUG) << " AT< " << response;
+  LOG(VERBOSE) << " AT< " << response;
 
   std::lock_guard<std::mutex> autolock(const_cast<Client*>(this)->write_mutex);
   client_fd->Write(response.data(), response.size());
@@ -153,13 +153,13 @@
     if (r_pos != std::string::npos) {
       auto command = commands.substr(pos, r_pos - pos);
       if (command.size() > 0) {  // "\r\r" ?
-        LOG(DEBUG) << "AT> " << command;
+        LOG(VERBOSE) << "AT> " << command;
         modem_->DispatchCommand(client, command);
       }
       pos = r_pos + 1;  // Skip '\r'
     } else if (pos < commands.length()) {  // Incomplete command
       incomplete_command = commands.substr(pos);
-      LOG(DEBUG) << "incomplete command: " << incomplete_command;
+      LOG(VERBOSE) << "incomplete command: " << incomplete_command;
     }
   }
 }
diff --git a/host/commands/modem_simulator/modem_simulator.cpp b/host/commands/modem_simulator/modem_simulator.cpp
index 71c9ada..ceb94a1 100644
--- a/host/commands/modem_simulator/modem_simulator.cpp
+++ b/host/commands/modem_simulator/modem_simulator.cpp
@@ -158,4 +158,11 @@
   }
 }
 
+bool ModemSimulator::SetPhoneNumber(std::string_view number) {
+  if (sim_service_) {
+    return sim_service_->SetPhoneNumber(number);
+  }
+  return false;
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/modem_simulator/modem_simulator.h b/host/commands/modem_simulator/modem_simulator.h
index 9340e49..5ab68b9 100644
--- a/host/commands/modem_simulator/modem_simulator.h
+++ b/host/commands/modem_simulator/modem_simulator.h
@@ -47,7 +47,7 @@
   }
 
   void SetTimeZone(std::string timezone);
-
+  bool SetPhoneNumber(std::string_view number);
  private:
   int32_t modem_id_;
   std::unique_ptr<ChannelMonitor> channel_monitor_;
diff --git a/host/commands/modem_simulator/network_service.cpp b/host/commands/modem_simulator/network_service.cpp
index 4d102fa..2334eb3 100644
--- a/host/commands/modem_simulator/network_service.cpp
+++ b/host/commands/modem_simulator/network_service.cpp
@@ -319,13 +319,13 @@
 
 bool NetworkService::WakeupFromSleep() {
   // It has not called once yet
-  if (android_last_signal_time_ == 0) {
+  if (android_last_signal_time_.load() == 0) {
       return false;
   }
   // Heuristics: if guest has not asked for signal strength
   // for 2 minutes, we assume it is caused by host sleep
   time_t now = time(0);
-  const bool wakeup_from_sleep = (now > android_last_signal_time_ + 120);
+  const bool wakeup_from_sleep = (now > android_last_signal_time_.load() + 120);
   return wakeup_from_sleep;
 }
 
@@ -416,11 +416,10 @@
 void NetworkService::HandleSignalStrength(const Client& client) {
   std::vector<std::string> responses;
   std::stringstream ss;
-
+  bool expected = true;
   if (WakeupFromSleep()) {
     misc_service_->TimeUpdate();
-  } else if (first_signal_strength_request_) {
-    first_signal_strength_request_ = false;
+  } else if (first_signal_strength_request_.compare_exchange_strong(expected, false)) {
     misc_service_->TimeUpdate();
   }
 
diff --git a/host/commands/modem_simulator/network_service.h b/host/commands/modem_simulator/network_service.h
index 37808fb..171cc11 100644
--- a/host/commands/modem_simulator/network_service.h
+++ b/host/commands/modem_simulator/network_service.h
@@ -15,6 +15,7 @@
 
 #pragma once
 
+#include <atomic>
 #include <ctime>
 
 #include "host/commands/modem_simulator/data_service.h"
@@ -290,8 +291,8 @@
   int getModemTechFromPrefer(int preferred_mask);
   ModemTechnology getTechFromNetworkType(NetworkRegistrationStatus::AccessTechnoloy act);
 
-  bool first_signal_strength_request_;  // For time update
-  time_t android_last_signal_time_;
+  std::atomic<bool> first_signal_strength_request_;  // For time update
+  std::atomic<time_t> android_last_signal_time_;
 
   class KeepSignalStrengthChangingLoop {
    public:
diff --git a/host/commands/modem_simulator/pdu_parser.cpp b/host/commands/modem_simulator/pdu_parser.cpp
index 5cc425e..f3395de 100644
--- a/host/commands/modem_simulator/pdu_parser.cpp
+++ b/host/commands/modem_simulator/pdu_parser.cpp
@@ -22,7 +22,6 @@
 #include <sstream>
 #include <string>
 #include <thread>
-
 namespace cuttlefish {
 
 static const std::string kWithoutServiceCenterAddress     = "00";
@@ -100,7 +99,7 @@
     int offset = ud_length / 8;
     pos -= offset * 2;
   } else if (data_code_scheme_ == "08") {  // GSM_UCS2
-    pos += ud_length;
+    pos += ud_length * 2 + 2;
   } else {
     pos += ud_length * 2 + 2;
   }
@@ -274,6 +273,24 @@
   return dst;
 }
 
+// This function is a reverse of the function PDUParser::BCDToString
+std::string PDUParser::StringToBCD(std::string_view data) {
+  std::string dst;
+  if (data.empty()) {
+    return "";
+  }
+  int length = data.size();
+  for (int i = 0; i < length; i += 2) {
+    if (i + 1 < length) {
+      dst += data[i + 1];
+    } else {
+      dst += 'F';
+    }
+    dst += data[i];
+  }
+  return dst;
+}
+
 std::string PDUParser::GetCurrentTimeStamp() {
   std::string time_stamp;
   auto now = std::time(0);
diff --git a/host/commands/modem_simulator/pdu_parser.h b/host/commands/modem_simulator/pdu_parser.h
index 5618ba7..b6f48be 100644
--- a/host/commands/modem_simulator/pdu_parser.h
+++ b/host/commands/modem_simulator/pdu_parser.h
@@ -32,6 +32,7 @@
   std::string GetPhoneNumberFromAddress();
 
   static std::string BCDToString(std::string& data);
+  static std::string StringToBCD(std::string_view data);
 
  private:
   bool DecodePDU(std::string& pdu);
diff --git a/host/commands/modem_simulator/sim_service.cpp b/host/commands/modem_simulator/sim_service.cpp
index e6eced5..2b7f3c2 100644
--- a/host/commands/modem_simulator/sim_service.cpp
+++ b/host/commands/modem_simulator/sim_service.cpp
@@ -349,6 +349,10 @@
                     [this](const Client& client, std::string& cmd) {
                     this->HandleSimAuthentication(client, cmd);
                     }),
+      CommandHandler("+REMOTEUPADATEPHONENUMBER",
+                    [this](const Client& client, std::string& cmd) {
+                      this->HandlePhoneNumberUpdate(client,cmd);
+                    }),
   };
   return (command_handlers);
 }
@@ -725,27 +729,8 @@
 }
 
 std::string SimService::GetPhoneNumber() {
-  XMLElement *root = sim_file_system_.GetRootElement();
-  if (!root) return "";
-
-  auto path = SimFileSystem::GetUsimEFPath(SimFileSystem::EFId::EF_MSISDN);
-
-  size_t pos = 0;
-  auto parent = root;
-  while (pos < path.length()) {
-    std::string sub_path(path.substr(pos, 4));
-    auto app = SimFileSystem::FindAttribute(parent, "path", sub_path);
-    if (!app) return "";
-    pos += 4;
-    parent = app;
-  }
-
-  XMLElement* ef = SimFileSystem::FindAttribute(parent, "id", "6F40");
-  if (!ef) return "";
-
-  XMLElement *final = SimFileSystem::FindAttribute(ef, "cmd", "B2");;
+  XMLElement* final = GetPhoneNumberElement();
   if (!final) return "";
-
   std::string record = final->GetText();
   int footerOffset = record.length() - kFooterSizeBytes * 2;
   int numberLength = (record[footerOffset] - '0') * 16 +
@@ -764,6 +749,33 @@
   return PDUParser::BCDToString(bcd_number);
 }
 
+bool SimService::SetPhoneNumber(std::string_view number) {
+  if (number.size() > kMaxNumberSizeBytes) {  // Invalid number length
+    return false;
+  }
+  XMLElement* elem = GetPhoneNumberElement();
+  if (!elem) return false;
+  std::string record = elem->GetText();
+  int footerOffset = record.length() - kFooterSizeBytes * 2;
+  std::string bcd_number = PDUParser::StringToBCD(number);
+  int newLength = 0;
+  // Skip Type(91) and Country Code(68)
+  if (number.size() == 12 && number.compare("68") == 0) {
+    record.replace(footerOffset + 6, bcd_number.size(), bcd_number);
+    newLength = 8;
+  } else { // Skip Type(81)
+    record.replace(footerOffset + 4, bcd_number.size(), bcd_number);
+    newLength = (bcd_number.size() + 2) / 2;
+  }
+
+  record[footerOffset] = '0' + newLength / 16;
+  record[footerOffset + 1] = '0' + (newLength % 16);
+
+  elem->SetText(record.c_str());
+  sim_file_system_.doc.SaveFile(sim_file_system_.file_path.c_str());
+  return true;
+}
+
 SimService::SimStatus SimService::GetSimStatus() const {
   return sim_status_;
 }
@@ -993,6 +1005,28 @@
   }
 }
 
+XMLElement* SimService::GetPhoneNumberElement() {
+  XMLElement* root = sim_file_system_.GetRootElement();
+  if (!root) return nullptr;
+
+  auto path = SimFileSystem::GetUsimEFPath(SimFileSystem::EFId::EF_MSISDN);
+
+  size_t pos = 0;
+  auto parent = root;
+  while (pos < path.length()) {
+    std::string sub_path(path.substr(pos, 4));
+    auto app = SimFileSystem::FindAttribute(parent, "path", sub_path);
+    if (!app) return nullptr;
+    pos += 4;
+    parent = app;
+  }
+
+  XMLElement* ef = SimFileSystem::FindAttribute(parent, "id", "6F40");
+  if (!ef) return nullptr;
+
+  return SimFileSystem::FindAttribute(ef, "cmd", "B2");
+}
+
 bool SimService::checkPin1AndAdjustSimStatus(std::string_view pin) {
   if (pin1_status_.VerifyPIN(pin) == true) {
     sim_status_ = SIM_STATUS_READY;
@@ -1693,5 +1727,12 @@
   client.SendCommandResponse(responses);
 }
 
+void SimService::HandlePhoneNumberUpdate(const Client& client,
+                                         const std::string& command) {
+  (void)client;
+  CommandParser cmd(command);
+  cmd.SkipWhiteSpace();
+  SetPhoneNumber(cmd.GetNextStr(' '));
+}
 
 }  // namespace cuttlefish
diff --git a/host/commands/modem_simulator/sim_service.h b/host/commands/modem_simulator/sim_service.h
index 7cccfcb..889a5fb 100644
--- a/host/commands/modem_simulator/sim_service.h
+++ b/host/commands/modem_simulator/sim_service.h
@@ -57,13 +57,15 @@
                                    const std::string& command);
   void HandleSimAuthentication(const Client& client,
                                    const std::string& command);
-
+  void HandlePhoneNumberUpdate(const Client& client,
+                               const std::string& command);
   void SavePinStateToIccProfile();
   void SaveFacilityLockToIccProfile();
   bool IsFDNEnabled();
   bool IsFixedDialNumber(std::string_view number);
   XMLElement* GetIccProfile();
   std::string GetPhoneNumber();
+  bool SetPhoneNumber(std::string_view number);
 
   enum SimStatus {
     SIM_STATUS_ABSENT = 0,
@@ -82,7 +84,7 @@
   void InitializeSimFileSystemAndSimState();
   void InitializeFacilityLock();
   void OnSimStatusChanged();
-
+  XMLElement* GetPhoneNumberElement();
   NetworkService* network_service_;
 
   /* SimStatus */
diff --git a/host/commands/modem_simulator/unittest/pdu_parser_test.cpp b/host/commands/modem_simulator/unittest/pdu_parser_test.cpp
index 4129aa1..5a93f90 100644
--- a/host/commands/modem_simulator/unittest/pdu_parser_test.cpp
+++ b/host/commands/modem_simulator/unittest/pdu_parser_test.cpp
@@ -21,6 +21,10 @@
   std::string pdu = "0001000D91688118109844F0000017AFD7903AB55A9BBA69D639D4ADCBF99E3DCCAE9701";
   cuttlefish::PDUParser smspdu(pdu);
   EXPECT_TRUE(smspdu.IsValidPDU());
+
+  std::string pdu_unicode = "000100048145540008024F60";
+  cuttlefish::PDUParser pduUnicode(pdu_unicode);
+  EXPECT_TRUE(pduUnicode.IsValidPDU());
 }
 
 TEST(PDUParserTest, IsValidPDU_false) {
diff --git a/host/commands/modem_simulator/unittest/service_test.cpp b/host/commands/modem_simulator/unittest/service_test.cpp
index 4bcbcb0..2d6289f 100644
--- a/host/commands/modem_simulator/unittest/service_test.cpp
+++ b/host/commands/modem_simulator/unittest/service_test.cpp
@@ -22,6 +22,7 @@
 
 #include "common/libs/fs/shared_select.h"
 #include "common/libs/utils/files.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/commands/modem_simulator/channel_monitor.h"
 #include "host/commands/modem_simulator/device_config.h"
 #include "host/commands/modem_simulator/modem_simulator.h"
@@ -43,13 +44,13 @@
       cuttlefish::CuttlefishConfig tmp_config_obj;
       std::string config_file = tmp_test_dir + "/.cuttlefish_config.json";
       tmp_config_obj.set_root_dir(tmp_test_dir + "/cuttlefish");
-      tmp_config_obj.set_ril_dns("8.8.8.8");
       std::vector<int> instance_nums;
       for (int i = 0; i < 1; i++) {
         instance_nums.push_back(cuttlefish::GetInstance() + i);
       }
       for (const auto &num : instance_nums) {
-        tmp_config_obj.ForInstance(num);  // Trigger creation in map
+        auto instance = tmp_config_obj.ForInstance(num);  // Trigger creation in map
+        instance.set_ril_dns(CF_DEFAULTS_RIL_DNS);
       }
 
       for (auto instance : tmp_config_obj.Instances()) {
diff --git a/host/commands/openwrt_control_server/Android.bp b/host/commands/openwrt_control_server/Android.bp
new file mode 100644
index 0000000..669431d
--- /dev/null
+++ b/host/commands/openwrt_control_server/Android.bp
@@ -0,0 +1,107 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_host_static {
+    name: "libopenwrt_control_server",
+    shared_libs: [
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    static_libs: [
+        "libgflags",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+    ],
+    generated_headers: [
+        "OpenwrtControlServerProto_h",
+    ],
+    generated_sources: [
+        "OpenwrtControlServerProto_cc",
+    ],
+    export_generated_headers: [
+        "OpenwrtControlServerProto_h",
+    ],
+    defaults: ["cuttlefish_host"],
+    include_dirs: [
+        "external/grpc-grpc/include",
+        "external/protobuf/src",
+    ],
+}
+
+cc_binary_host {
+    name: "openwrt_control_server",
+    shared_libs: [
+        "libbase",
+        "libcuttlefish_utils",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+        "libgflags",
+        "libopenwrt_control_server",
+        "libgrpc++_reflection",
+    ],
+    srcs: [
+        "main.cpp",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+    ],
+    defaults: ["cuttlefish_host"],
+}
+
+filegroup {
+    name: "OpenwrtControlServerProto",
+    srcs: [
+        "openwrt_control.proto",
+    ],
+}
+
+genrule {
+    name: "OpenwrtControlServerProto_h",
+    tools: [
+        "aprotoc",
+        "protoc-gen-grpc-cpp-plugin",
+    ],
+    cmd: "$(location aprotoc) -Idevice/google/cuttlefish/host/commands/openwrt_control_server -Iexternal/protobuf/src --plugin=protoc-gen-grpc=$(location protoc-gen-grpc-cpp-plugin) $(in) --grpc_out=$(genDir) --cpp_out=$(genDir)",
+    srcs: [
+        ":OpenwrtControlServerProto",
+    ],
+    out: [
+        "openwrt_control.grpc.pb.h",
+        "openwrt_control.pb.h",
+    ],
+}
+
+genrule {
+    name: "OpenwrtControlServerProto_cc",
+    tools: [
+        "aprotoc",
+        "protoc-gen-grpc-cpp-plugin",
+    ],
+    cmd: "$(location aprotoc) -Idevice/google/cuttlefish/host/commands/openwrt_control_server -Iexternal/protobuf/src --plugin=protoc-gen-grpc=$(location protoc-gen-grpc-cpp-plugin) $(in) --grpc_out=$(genDir) --cpp_out=$(genDir)",
+    srcs: [
+        ":OpenwrtControlServerProto",
+    ],
+    out: [
+        "openwrt_control.grpc.pb.cc",
+        "openwrt_control.pb.cc",
+    ],
+}
diff --git a/host/commands/openwrt_control_server/main.cpp b/host/commands/openwrt_control_server/main.cpp
new file mode 100644
index 0000000..1a32f52
--- /dev/null
+++ b/host/commands/openwrt_control_server/main.cpp
@@ -0,0 +1,116 @@
+/*
+ *
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fstream>
+#include <iostream>
+#include <memory>
+#include <regex>
+#include <string>
+
+#include <gflags/gflags.h>
+#include <grpcpp/ext/proto_server_reflection_plugin.h>
+#include <grpcpp/grpcpp.h>
+#include <grpcpp/health_check_service_interface.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "openwrt_control.grpc.pb.h"
+
+using google::protobuf::Empty;
+using grpc::Server;
+using grpc::ServerBuilder;
+using grpc::ServerContext;
+using grpc::Status;
+using openwrtcontrolserver::OpenwrtControlService;
+using openwrtcontrolserver::OpenwrtIpaddrReply;
+
+DEFINE_string(grpc_uds_path, "", "grpc_uds_path");
+
+DEFINE_bool(bridged_wifi_tap, false,
+            "True for using cvd-wtap-XX, false for using cvd-wifiap-XX");
+DEFINE_string(launcher_log_path, "", "File path for launcher.log");
+DEFINE_string(openwrt_log_path, "", "File path for crosvm_openwrt.log");
+
+namespace cuttlefish {
+
+class OpenwrtControlServiceImpl final : public OpenwrtControlService::Service {
+  Status OpenwrtIpaddr(ServerContext* context, const Empty* request,
+                       OpenwrtIpaddrReply* response) override {
+    // TODO(seungjaeyoo) : Find IP address from crosvm_openwrt.log when using
+    // cvd-wtap-XX after disabling DHCP inside OpenWRT in bridged_wifi_tap mode.
+    auto result = FindIpaddrLauncherLog();
+
+    response->set_is_error(!TypeIsSuccess(result));
+    if (TypeIsSuccess(result)) {
+      response->set_ipaddr(*result);
+    }
+    return Status::OK;
+  }
+
+  Result<std::string> FindIpaddrLauncherLog() {
+    if (!FileExists(FLAGS_launcher_log_path)) {
+      return CF_ERR("launcher.log doesn't exist");
+    }
+
+    std::regex re("wan_ipaddr=[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+");
+    std::smatch matches;
+    std::ifstream ifs(FLAGS_launcher_log_path);
+    std::string line, last_match;
+    while (std::getline(ifs, line)) {
+      if (std::regex_search(line, matches, re)) {
+        last_match = matches[0];
+      }
+    }
+
+    if (last_match.empty()) {
+      return CF_ERR("IP address is not found from launcher.log");
+    } else {
+      return last_match.substr(last_match.find('=') + 1);
+    }
+  }
+};
+
+}  // namespace cuttlefish
+
+void RunServer() {
+  std::string server_address("unix:" + FLAGS_grpc_uds_path);
+  cuttlefish::OpenwrtControlServiceImpl service;
+
+  grpc::EnableDefaultHealthCheckService(true);
+  grpc::reflection::InitProtoReflectionServerBuilderPlugin();
+  ServerBuilder builder;
+  // Listen on the given address without any authentication mechanism.
+  builder.AddListeningPort(server_address, grpc::InsecureServerCredentials());
+  // Register "service" as the instance through which we'll communicate with
+  // clients. In this case it corresponds to an *synchronous* service.
+  builder.RegisterService(&service);
+  // Finally assemble the server.
+  std::unique_ptr<Server> server(builder.BuildAndStart());
+  std::cout << "Server listening on " << server_address << std::endl;
+
+  // Wait for the server to shutdown. Note that some other thread must be
+  // responsible for shutting down the server for this call to ever return.
+  server->Wait();
+}
+
+int main(int argc, char** argv) {
+  ::gflags::ParseCommandLineFlags(&argc, &argv, true);
+  RunServer();
+
+  return 0;
+}
\ No newline at end of file
diff --git a/host/commands/openwrt_control_server/openwrt_control.proto b/host/commands/openwrt_control_server/openwrt_control.proto
new file mode 100644
index 0000000..a5af9e7
--- /dev/null
+++ b/host/commands/openwrt_control_server/openwrt_control.proto
@@ -0,0 +1,28 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package openwrtcontrolserver;
+
+import "google/protobuf/empty.proto";
+
+service OpenwrtControlService {
+  rpc OpenwrtIpaddr (google.protobuf.Empty) returns (OpenwrtIpaddrReply) {}
+}
+
+message OpenwrtIpaddrReply {
+  bool is_error = 1;
+  string ipaddr = 2;
+}
diff --git a/host/commands/powerwash_cvd/Android.bp b/host/commands/powerwash_cvd/Android.bp
index 5695f8b..e089156 100644
--- a/host/commands/powerwash_cvd/Android.bp
+++ b/host/commands/powerwash_cvd/Android.bp
@@ -23,16 +23,14 @@
         "powerwash_cvd.cc",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
-        "libfruit",
         "libjsoncpp",
     ],
     static_libs: [
+        "libcuttlefish_command_util",
         "libcuttlefish_host_config",
-        "libcuttlefish_vm_manager",
         "libgflags",
     ],
     defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
diff --git a/host/commands/powerwash_cvd/powerwash_cvd.cc b/host/commands/powerwash_cvd/powerwash_cvd.cc
index b2e322b..6e886c0 100644
--- a/host/commands/powerwash_cvd/powerwash_cvd.cc
+++ b/host/commands/powerwash_cvd/powerwash_cvd.cc
@@ -14,150 +14,73 @@
  * limitations under the License.
  */
 
-#include <inttypes.h>
-#include <limits.h>
-#include <stdio.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/wait.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <signal.h>
-
-#include <algorithm>
+#include <cstdint>
 #include <cstdlib>
-#include <fstream>
-#include <iomanip>
-#include <memory>
-#include <sstream>
-#include <string>
-#include <vector>
 
 #include <gflags/gflags.h>
 #include <android-base/logging.h>
 
-#include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/fs/shared_select.h"
-#include "common/libs/utils/environment.h"
+#include "common/libs/utils/result.h"
 #include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/command_util/util.h"
 #include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/vm_manager/vm_manager.h"
 
 DEFINE_int32(instance_num, cuttlefish::GetInstance(),
-             "Which instance to powerwash");
+             "Which instance to powerwash.");
 
-DEFINE_int32(wait_for_launcher, 30,
-             "How many seconds to wait for the launcher to respond to the status "
-             "command. A value of zero means wait indefinetly");
+DEFINE_int32(
+    wait_for_launcher, 30,
+    "How many seconds to wait for the launcher to respond to the status "
+    "command. A value of zero means wait indefinitely.");
 
-DEFINE_int32(boot_timeout, 1000, "How many seconds to wait for the device to "
-                                 "reboot.");
+DEFINE_int32(boot_timeout, 500,
+             "How many seconds to wait for the device to "
+             "reboot.");
 
 namespace cuttlefish {
 namespace {
 
-int PowerwashCvdMain(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
-  google::ParseCommandLineFlags(&argc, &argv, true);
+Result<void> PowerwashCvdMain() {
+  const CuttlefishConfig* config =
+      CF_EXPECT(CuttlefishConfig::Get(), "Failed to obtain config object");
+  SharedFD monitor_socket = CF_EXPECT(
+      GetLauncherMonitor(*config, FLAGS_instance_num, FLAGS_wait_for_launcher));
 
-  auto config = CuttlefishConfig::Get();
-  if (!config) {
-    LOG(ERROR) << "Failed to obtain config object";
-    return 1;
-  }
-
-  auto instance = config->ForInstance(FLAGS_instance_num);
-  auto monitor_path = instance.launcher_monitor_socket_path();
-  if (monitor_path.empty()) {
-    LOG(ERROR) << "No path to launcher monitor found";
-    return 2;
-  }
-  auto monitor_socket = SharedFD::SocketLocalClient(
-      monitor_path.c_str(), false, SOCK_STREAM, FLAGS_wait_for_launcher);
-  if (!monitor_socket->IsOpen()) {
-    LOG(ERROR) << "Unable to connect to launcher monitor at " << monitor_path
-               << ": " << monitor_socket->StrError();
-    return 3;
-  }
-  auto request = LauncherAction::kPowerwash;
-  auto bytes_sent = monitor_socket->Send(&request, sizeof(request), 0);
-  if (bytes_sent < 0) {
-    LOG(ERROR) << "Error sending launcher monitor the status command: "
-               << monitor_socket->StrError();
-    return 4;
-  }
-  // Perform a select with a timeout to guard against launcher hanging
-  SharedFDSet read_set;
-  read_set.Set(monitor_socket);
-  struct timeval timeout = {FLAGS_wait_for_launcher, 0};
-  int selected = Select(&read_set, nullptr, nullptr,
-                        FLAGS_wait_for_launcher <= 0 ? nullptr : &timeout);
-  if (selected < 0){
-    LOG(ERROR) << "Failed communication with the launcher monitor: "
-               << strerror(errno);
-    return 5;
-  }
-  if (selected == 0) {
-    LOG(ERROR) << "Timeout expired waiting for launcher monitor to respond";
-    return 6;
-  }
-  LauncherResponse response;
-  auto bytes_recv = monitor_socket->Recv(&response, sizeof(response), 0);
-  if (bytes_recv < 0) {
-    LOG(ERROR) << "Error receiving response from launcher monitor: "
-               << monitor_socket->StrError();
-    return 7;
-  }
   LOG(INFO) << "Requesting powerwash";
-  if (response != LauncherResponse::kSuccess) {
-    LOG(ERROR) << "Received '" << static_cast<char>(response)
-               << "' response from launcher monitor for powerwash request";
-    return 8;
-  }
+  CF_EXPECT(WriteLauncherAction(monitor_socket, LauncherAction::kPowerwash));
+  CF_EXPECT(WaitForRead(monitor_socket, FLAGS_wait_for_launcher));
+  LauncherResponse powerwash_response =
+      CF_EXPECT(ReadLauncherResponse(monitor_socket));
+  CF_EXPECT(
+      powerwash_response == LauncherResponse::kSuccess,
+      "Received `" << static_cast<char>(powerwash_response)
+                   << "` response from launcher monitor for powerwash request");
+
   LOG(INFO) << "Waiting for device to boot up again";
+  CF_EXPECT(WaitForRead(monitor_socket, FLAGS_boot_timeout));
+  RunnerExitCodes boot_exit_code = CF_EXPECT(ReadExitCode(monitor_socket));
+  CF_EXPECT(boot_exit_code != RunnerExitCodes::kVirtualDeviceBootFailed,
+            "Boot failed");
+  CF_EXPECT(boot_exit_code == RunnerExitCodes::kSuccess,
+            "Unknown response" << static_cast<int>(boot_exit_code));
 
-  read_set.Set(monitor_socket);
-  timeout = {FLAGS_boot_timeout, 0};
-  selected = Select(&read_set, nullptr, nullptr,
-                    FLAGS_boot_timeout <= 0 ? nullptr : &timeout);
-  if (selected < 0){
-    LOG(ERROR) << "Failed communication with the launcher monitor: "
-               << strerror(errno);
-    return 5;
-  }
-  if (selected == 0) {
-    LOG(ERROR) << "Timeout expired waiting for launcher monitor to respond";
-    return 6;
-  }
-
-  RunnerExitCodes exit_code;
-  bytes_recv = ReadExactBinary(monitor_socket, &exit_code);
-  if (bytes_recv < 0) {
-    LOG(ERROR) << "Error in stream response: " << monitor_socket->StrError();
-    return 9;
-  } else if (bytes_recv == 0) {
-    LOG(ERROR) << "Launcher socket closed unexpectedly";
-    return 10;
-  } else if (bytes_recv != sizeof(exit_code)) {
-    LOG(ERROR) << "Launcher response was too short";
-    return 11;
-  } else if (exit_code == RunnerExitCodes::kVirtualDeviceBootFailed) {
-    LOG(ERROR) << "Boot failed";
-    return 12;
-  } else if (exit_code != RunnerExitCodes::kSuccess) {
-    LOG(ERROR) << "Unknown response: " << (int) exit_code;
-    return 13;
-  }
   LOG(INFO) << "Powerwash successful";
-  return 0;
+  return {};
 }
 
 } // namespace
 } // namespace cuttlefish
 
 int main(int argc, char** argv) {
-  return cuttlefish::PowerwashCvdMain(argc, argv);
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  cuttlefish::Result<void> result = cuttlefish::PowerwashCvdMain();
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Message();
+    LOG(DEBUG) << result.error().Trace();
+    return EXIT_FAILURE;
+  }
+  return EXIT_SUCCESS;
 }
diff --git a/host/commands/process_restarter/Android.bp b/host/commands/process_restarter/Android.bp
new file mode 100644
index 0000000..106ef1d
--- /dev/null
+++ b/host/commands/process_restarter/Android.bp
@@ -0,0 +1,33 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_binary_host {
+    name: "process_restarter",
+    defaults: ["cuttlefish_host",],
+    srcs: ["main.cc",],
+    shared_libs: [
+        "libbase",
+        "libcuttlefish_utils",
+        "libgflags",
+        "libjsoncpp",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+    ],
+}
diff --git a/host/commands/process_restarter/main.cc b/host/commands/process_restarter/main.cc
new file mode 100644
index 0000000..6025169
--- /dev/null
+++ b/host/commands/process_restarter/main.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <signal.h>
+#include <stdlib.h>
+#include <sys/prctl.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
+#include <cstdlib>
+#include <optional>
+
+#include <android-base/logging.h>
+#include <android-base/parseint.h>
+#include <gflags/gflags.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/logging.h"
+
+DEFINE_bool(when_dumped, false, "restart when the process crashed");
+DEFINE_bool(when_killed, false, "restart when the process was killed");
+DEFINE_bool(when_exited_with_failure, false,
+            "restart when the process exited with a code !=0");
+DEFINE_int32(when_exited_with_code, -1,
+             "restart when the process exited with a specific code");
+
+namespace cuttlefish {
+namespace {
+
+static bool ShouldRestartProcess(siginfo_t const& info) {
+  if (info.si_code == CLD_DUMPED && FLAGS_when_dumped) {
+    return true;
+  }
+  if (info.si_code == CLD_KILLED && FLAGS_when_killed) {
+    return true;
+  }
+  if (info.si_code == CLD_EXITED && FLAGS_when_exited_with_failure &&
+      info.si_status != 0) {
+    return true;
+  }
+  if (info.si_code == CLD_EXITED &&
+      info.si_status == FLAGS_when_exited_with_code) {
+    return true;
+  }
+  return false;
+}
+
+Result<int> RunProcessRestarter(const char* exec_cmd, char** exec_args) {
+  LOG(VERBOSE) << "process_restarter starting";
+  siginfo_t infop;
+
+  do {
+    LOG(VERBOSE) << "Starting monitored process " << exec_cmd;
+    pid_t pid = fork();
+    CF_EXPECT(pid != -1, "fork failed (" << strerror(errno) << ")");
+    if (pid == 0) {                     // child process
+      prctl(PR_SET_PDEATHSIG, SIGHUP);  // Die when parent dies
+      execvp(exec_cmd, exec_args);
+      // if exec returns, it failed
+      return CF_ERRNO("exec failed (" << strerror(errno) << ")");
+    } else {  // parent process
+      int return_val = TEMP_FAILURE_RETRY(waitid(P_PID, pid, &infop, WEXITED));
+      CF_EXPECT(return_val != -1,
+                "waitid call failed (" << strerror(errno) << ")");
+      LOG(VERBOSE) << exec_cmd << " exited with exit code: " << infop.si_status;
+    }
+  } while (ShouldRestartProcess(infop));
+  return {infop.si_status};
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) {
+  // these stderr logs are directed to log tee and logged at the proper level
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  ::android::base::SetMinimumLogSeverity(android::base::VERBOSE);
+
+  gflags::SetUsageMessage(R"#(
+    This program launches and automatically restarts the input command
+    following the selected restart conditions.
+    Example usage:
+
+      ./process_restarter -when_dumped -- my_program --arg1 --arg2
+  )#");
+
+  // Parse command line flags with remove_flags=true
+  // so that the remainder is the command to execute.
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  auto result = cuttlefish::RunProcessRestarter(argv[1], argv + 1);
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Message();
+    LOG(DEBUG) << result.error().Trace();
+    return EXIT_FAILURE;
+  }
+  return result.value();
+}
diff --git a/host/commands/remote/Android.bp b/host/commands/remote/Android.bp
new file mode 100644
index 0000000..8a26e43
--- /dev/null
+++ b/host/commands/remote/Android.bp
@@ -0,0 +1,66 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_defaults {
+    name: "cvdremote_defaults",
+    static_libs: [
+        "libbase",
+        "liblog",
+        "libjsoncpp",
+        "libcuttlefish_utils",
+        "libcuttlefish_fs",
+        "libcuttlefish_web",
+        "libcurl",
+        "libcrypto",
+        "libssl",
+        "libz",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+cc_library_static {
+    name: "libcvdremote",
+    srcs: [
+        "remote.cc",
+        "actions.cc",
+    ],
+    defaults: ["cvdremote_defaults"],
+}
+
+cc_binary {
+    name: "cvdremote",
+    srcs: [
+        "cvdremote.cc"
+    ],
+    static_libs: [
+        "libgflags",
+        "libcvdremote",
+    ],
+    defaults: ["cvdremote_defaults"],
+}
+
+cc_test_host {
+    name: "libcvdremote_test",
+    srcs: [
+        "unittest/main_test.cc",
+        "unittest/output_test.cc",
+    ],
+    defaults: ["cuttlefish_host", "cvdremote_defaults"],
+}
+
diff --git a/host/commands/remote/actions.cc b/host/commands/remote/actions.cc
new file mode 100644
index 0000000..de746a9
--- /dev/null
+++ b/host/commands/remote/actions.cc
@@ -0,0 +1,136 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/remote/actions.h"
+
+#include <future>
+#include <iostream>
+#include <string>
+
+namespace cuttlefish {
+namespace internal {
+
+const char* kFieldName = "name";
+
+// Creates a host to run cvds on it.
+class CreateHostAction : public Action<std::string> {
+ public:
+  CreateHostAction(CloudOrchestratorApi& api,
+                   const CreateHostInstanceRequest& request)
+      : api_(api), request_(request) {}
+
+  ~CreateHostAction() {}
+
+  Result<std::string> Execute() override {
+    auto operation_name =
+        CF_EXPECT(api_.CreateHost(request_), "Create host failed");
+    auto operation = CF_EXPECT(api_.WaitCloudOperation(operation_name),
+                               "Waiting for operation failed");
+    if (!operation.done) {
+      return CF_ERR("Create host operation is not done yet");
+    }
+    OperationResult& result = operation.result;
+    CF_EXPECT(
+        result.response.isMember(kFieldName),
+        "Invalid operation response, missing field: '" << kFieldName << "'");
+    return result.response[kFieldName].asString();
+  }
+
+ private:
+  CloudOrchestratorApi& api_;
+  const CreateHostInstanceRequest& request_;
+};
+
+class DeleteHostsAction : public Action<std::vector<Result<void>>> {
+ public:
+  DeleteHostsAction(CloudOrchestratorApi& api,
+                    const std::vector<std::string>& names)
+      : api_(api), names_(names) {}
+
+  ~DeleteHostsAction() {}
+
+  Result<std::vector<Result<void>>> Execute() override {
+    std::vector<std::future<Result<void>>> del_futures;
+    for (const std::string& name : names_) {
+      del_futures.push_back(std::async([this, name]() -> Result<void> {
+        CF_EXPECT(api_.DeleteHost(name),
+                  "Failed deleting host instance \"" << name << "\".");
+        return {};
+      }));
+    }
+    std::vector<Result<void>> results;
+    for (auto& f : del_futures) {
+      results.push_back(f.get());
+    }
+    return results;
+  }
+
+ private:
+  CloudOrchestratorApi& api_;
+  const std::vector<std::string>& names_;
+};
+
+// Creates a cvd.
+class CreateCVDAction : public Action<std::string> {
+ public:
+  CreateCVDAction(CloudOrchestratorApi& api, const CreateCVDRequest& request,
+                  std::string host)
+      : api_(api), request_(request), host_(host) {}
+
+  ~CreateCVDAction() {}
+
+  Result<std::string> Execute() override {
+    auto operation_name =
+        CF_EXPECT(api_.CreateCVD(host_, request_), "Create cvd failed");
+    auto operation = CF_EXPECT(api_.WaitHostOperation(host_, operation_name),
+                               "Waiting for operation failed");
+    if (!operation.done) {
+      return CF_ERR("Create cvd operation is not done yet");
+    }
+    OperationResult& result = operation.result;
+    CF_EXPECT(
+        result.response.isMember(kFieldName),
+        "Invalid operation response, missing field: '" << kFieldName << "'");
+    return result.response[kFieldName].asString();
+  }
+
+ private:
+  CloudOrchestratorApi& api_;
+  const CreateCVDRequest& request_;
+  std::string host_;
+};
+
+}  // namespace internal
+
+std::unique_ptr<Action<std::string>> CreateHostAction(
+    CloudOrchestratorApi& api, const CreateHostInstanceRequest& request) {
+  return std::unique_ptr<Action<std::string>>(
+      new internal::CreateHostAction(api, request));
+}
+
+std::unique_ptr<Action<std::string>> CreateCVDAction(
+    CloudOrchestratorApi& api, const CreateCVDRequest& request,
+    std::string host) {
+  return std::unique_ptr<Action<std::string>>(
+      new internal::CreateCVDAction(api, request, host));
+}
+
+std::unique_ptr<Action<std::vector<Result<void>>>> DeleteHostsAction(
+    CloudOrchestratorApi& api, const std::vector<std::string>& names) {
+  return std::unique_ptr<Action<std::vector<Result<void>>>>(
+      new internal::DeleteHostsAction(api, names));
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/remote/actions.h b/host/commands/remote/actions.h
new file mode 100644
index 0000000..241c0e6
--- /dev/null
+++ b/host/commands/remote/actions.h
@@ -0,0 +1,45 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include "host/commands/remote/remote.h"
+
+namespace cuttlefish {
+
+// Represents the action that gets executed for the user commands.
+template <typename T>
+class Action {
+ public:
+  virtual ~Action() {}
+
+  // Executes the action.
+  virtual Result<T> Execute() = 0;
+};
+
+// Action that creates a host to run cvds on it.
+std::unique_ptr<Action<std::string>> CreateHostAction(
+    CloudOrchestratorApi& api, const CreateHostInstanceRequest& request);
+
+// Action that deletes the passed hosts.
+std::unique_ptr<Action<std::vector<Result<void>>>> DeleteHostsAction(
+    CloudOrchestratorApi& api, const std::vector<std::string>& names);
+
+// Action that creates a cvd.
+std::unique_ptr<Action<std::string>> CreateCVDAction(
+    CloudOrchestratorApi& api, const CreateCVDRequest& request,
+    std::string host);
+
+}  // namespace cuttlefish
diff --git a/host/commands/remote/cvdremote.cc b/host/commands/remote/cvdremote.cc
new file mode 100644
index 0000000..e81d107
--- /dev/null
+++ b/host/commands/remote/cvdremote.cc
@@ -0,0 +1,349 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+
+#include "host/commands/remote/actions.h"
+#include "host/commands/remote/output.h"
+#include "host/commands/remote/remote.h"
+#include "host/libs/web/http_client/sso_client.h"
+
+const std::string kUsage = R"(
+NAME
+    cvdremote - manage Cuttlefish Virtual Devices (CVDs) in the cloud.
+
+SYNOPSIS
+    cvdremote --service_url=<url> --zone=<zone> [<resource>] <command> [<args>]
+
+RESOURCES
+    cvd (default)
+        Cuttlefish Virtual Devices.
+
+    host
+        Host machines where CVDs live.
+
+COMMANDS
+    create
+        Create a resource.
+
+    list
+        List the resources.
+)";
+
+// General flags.
+DEFINE_string(service_url, "", "Cloud orchestration service url.");
+DEFINE_string(zone, "us-central1-b", "Cloud zone.");
+DEFINE_bool(verbose, false,
+            "Indicates whether to print a verbose output or not.");
+DEFINE_bool(use_sso_client, false,
+            "Communicates with cloud orchestration using sso_client_binary");
+
+// Flags specifics to host resource commands.
+DEFINE_string(machine_type, "zones/us-central1-b/machineTypes/n1-standard-4",
+              "Full or partial URL of the machine type resource.");
+DEFINE_string(min_cpu_platform, "Intel Haswell",
+              "Specifies a minimum CPU platform for the VM instance.");
+
+// Flags specifics to cvd resource commands.
+DEFINE_string(host, "", "If empty, cvds from all hosts will be printed out.");
+DEFINE_string(build_id, "", "Android build identifier.");
+DEFINE_string(target, "aosp_cf_x86_64_phone-userdebug",
+              "Android build target.");
+
+namespace cuttlefish {
+namespace {
+
+//
+// Create host.
+//
+int CommandCreateHostMain(const std::vector<std::string>&) {
+  auto http_client =
+      FLAGS_use_sso_client
+          ? std::unique_ptr<HttpClient>(new http_client::SsoClient())
+          : HttpClient::CurlClient();
+  CloudOrchestratorApi api(FLAGS_service_url, FLAGS_zone, *http_client);
+  GCPInstance gcp;
+  gcp.machine_type = FLAGS_machine_type.c_str();
+  gcp.min_cpu_platform = FLAGS_min_cpu_platform.c_str();
+  CreateHostInstanceRequest request;
+  request.gcp = &gcp;
+  auto action = CreateHostAction(api, request);
+  auto result = action->Execute();
+  if (!result.ok()) {
+    std::cerr << result.error().Message();
+    return -1;
+  }
+  std::cout << *result << std::endl;
+  return 0;
+}
+
+//
+// List hosts.
+//
+int CommandListHostsMain(const std::vector<std::string>&) {
+  auto http_client =
+      FLAGS_use_sso_client
+          ? std::unique_ptr<HttpClient>(new http_client::SsoClient())
+          : HttpClient::CurlClient();
+  CloudOrchestratorApi api(FLAGS_service_url, FLAGS_zone, *http_client);
+  auto hosts = api.ListHosts();
+  if (!hosts.ok()) {
+    std::cerr << hosts.error().Message();
+    LOG(DEBUG) << hosts.error().Trace();
+    return -1;
+  }
+  if ((*hosts).empty()) {
+    std::cerr << "~ No hosts found ~" << std::endl;
+    return 0;
+  }
+  for (const std::string& host : *hosts) {
+    std::cout << host << std::endl;
+  }
+  return 0;
+}
+
+//
+// Delete host.
+//
+int CommandDeleteHostMain(const std::vector<std::string>& args) {
+  if (args.empty()) {
+    std::cerr << "Missing host name." << std::endl;
+    return -1;
+  }
+  auto http_client =
+      FLAGS_use_sso_client
+          ? std::unique_ptr<HttpClient>(new http_client::SsoClient())
+          : HttpClient::CurlClient();
+  CloudOrchestratorApi api(FLAGS_service_url, FLAGS_zone, *http_client);
+  auto action = DeleteHostsAction(api, args);
+  auto action_result = action->Execute();
+  if (!action_result.ok()) {
+    std::cerr << action_result.error().Message();
+    return -1;
+  }
+  bool any_del_had_error = false;
+  for (auto& del_instance_result : *action_result) {
+    if (!del_instance_result.ok()) {
+      std::cerr << del_instance_result.error().Message() << std::endl
+                << std::endl;
+      any_del_had_error = true;
+    }
+  }
+  if (any_del_had_error) {
+    return -1;
+  }
+  return 0;
+}
+
+void PrintCVDs(const std::string& host, const std::vector<std::string>& cvds) {
+  for (const std::string& cvd : cvds) {
+    CVDOutput o{
+      service_url : FLAGS_service_url,
+      zone : FLAGS_zone,
+      host : host,
+      verbose : FLAGS_verbose,
+      name : cvd
+    };
+    std::cout << o.ToString() << std::endl;
+  }
+}
+
+//
+// Create cvd.
+//
+int CommandCreateCVDMain(const std::vector<std::string>&) {
+  if (FLAGS_build_id == "") {
+    std::cerr << "Missing --build_id flag.";
+    return -1;
+  }
+  auto http_client =
+      FLAGS_use_sso_client
+          ? std::unique_ptr<HttpClient>(new http_client::SsoClient())
+          : HttpClient::CurlClient();
+  auto retrying_http_client = HttpClient::ServerErrorRetryClient(
+      *http_client, 5 /* retry_attempts */,
+      std::chrono::milliseconds(5000) /* retry_delay */);
+  CloudOrchestratorApi api(FLAGS_service_url, FLAGS_zone,
+                           *retrying_http_client);
+  std::string host = FLAGS_host;
+  if (host == "") {
+    GCPInstance gcp;
+    gcp.machine_type = FLAGS_machine_type.c_str();
+    gcp.min_cpu_platform = FLAGS_min_cpu_platform.c_str();
+    CreateHostInstanceRequest request;
+    request.gcp = &gcp;
+    auto action = CreateHostAction(api, request);
+    auto result = action->Execute();
+    if (!result.ok()) {
+      std::cerr << result.error().Message();
+      return -1;
+    }
+    host = *result;
+  }
+  CreateCVDRequest request{
+    build_info : BuildInfo{
+      build_id : FLAGS_build_id,
+      target : FLAGS_target,
+    },
+  };
+  auto action = CreateCVDAction(api, request, host);
+  auto result = action->Execute();
+  if (!result.ok()) {
+    std::cerr << result.error().Message();
+    return -1;
+  }
+  std::cout << *result << std::endl;
+  return 0;
+}
+
+// List cvds.
+//
+// Non-verbose output:
+//   Format: "[INSTANCE_NAME] ([HOST_IDENTIFIER])"
+//   Example:
+//     cvd-1 (cf-ec559de7-6621-4ace-a8be-0f480a6f9498)
+//     cvd-2 (cf-ec559de7-6621-4ace-a8be-0f480a6f9498)
+//     cvd-3 (cf-ec559de7-6621-4ace-a8be-0f480a6f9498)
+//     cvd-1 (cf-e4b0b61d-21c4-497e-8045-bd48c37e487e)
+//     cvd-1 (cf-b3aa26b2-1312-4241-989f-b80f92d6d9ae)
+//
+// Verbose output:
+//   Format:
+//     ```
+//     [INSTANCE_NAME] ([HOST_IDENTIFIER])
+//       [KEY_1]: [VALUE_1]
+//       [KEY_2]: [VALUE_3]
+//       ...
+//       [KEY_N]: [VALUE_N]
+//
+//     ```
+//   Example:
+//     [1] cvd-1 (cf-ec559de7-6621-4ace-a8be-0f480a6f9498)
+//           create time: 2018-10-25T06:32:08.182-07:00
+//           display: 1080x1920 (240)
+//           webrtcstream_url: https://foo.com/.../client.html
+//
+//     [1] cvd-2 (cf-ec559de7-6621-4ace-a8be-0f480a6f9498)
+//           create time: 2018-10-25T06:32:08.182-07:00
+//           display: 1080x1920 (240)
+//           webrtcstream_url: https://foo.com/.../client.html
+
+int CommandListCVDsMain(const std::vector<std::string>&) {
+  auto http_client =
+      FLAGS_use_sso_client
+          ? std::unique_ptr<HttpClient>(new http_client::SsoClient())
+          : HttpClient::CurlClient();
+  CloudOrchestratorApi api(FLAGS_service_url, FLAGS_zone, *http_client);
+  // TODO(b/248087309): Implements list cvds with multiple hosts asynchronously.
+  if (FLAGS_host == "") {
+    auto hosts = api.ListHosts();
+    if (!hosts.ok()) {
+      std::cerr << hosts.error().Message();
+      LOG(DEBUG) << hosts.error().Trace();
+      return -1;
+    }
+    if ((*hosts).empty()) {
+      std::cerr << "~ No cvds found ~" << std::endl;
+      return 0;
+    }
+    for (const std::string& host : *hosts) {
+      auto cvd_streams = api.ListCVDWebRTCStreams(host);
+      if (!cvd_streams.ok()) {
+        continue;
+      }
+      PrintCVDs(host, *cvd_streams);
+    }
+  } else {
+    auto cvd_streams = api.ListCVDWebRTCStreams(FLAGS_host);
+    if (!cvd_streams.ok()) {
+      std::cerr << cvd_streams.error().Message();
+      LOG(DEBUG) << cvd_streams.error().Trace();
+      return -1;
+    }
+    PrintCVDs(FLAGS_host, *cvd_streams);
+  }
+  return 0;
+}
+
+constexpr char kResourceHost[] = "host";
+constexpr char kResourceCVD[] = "cvd";
+
+constexpr char kCommandList[] = "list";
+constexpr char kCommandCreate[] = "create";
+constexpr char kCommandDelete[] = "delete";
+
+std::map<
+    std::string,
+    std::map<std::string, std::function<int(const std::vector<std::string>&)>>>
+    commands_map = {
+        {kResourceHost,
+         {
+             {kCommandCreate, CommandCreateHostMain},
+             {kCommandList, CommandListHostsMain},
+             {kCommandDelete, CommandDeleteHostMain},
+         }},
+        {kResourceCVD,
+         {
+             {kCommandCreate, CommandCreateCVDMain},
+             {kCommandList, CommandListCVDsMain},
+         }},
+};
+
+int Main(int argc, char** argv) {
+  ::gflags::SetUsageMessage(kUsage);
+  ::gflags::ParseCommandLineFlags(&argc, &argv, true);
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  if (FLAGS_service_url == "") {
+    std::cerr << "Missing service_url flag";
+    return -1;
+  }
+  std::vector<std::string> args;
+  for (int i = 1; i < argc; i++) {
+    args.push_back(argv[i]);
+  }
+  if (args.empty()) {
+    std::cerr << "Missing command";
+    return -1;
+  }
+  if (args.size() == 1) {
+    args.insert(args.begin(), kResourceCVD);
+  }
+  std::string resource = args[0];
+  args.erase(args.begin());
+  if (commands_map.find(resource) == commands_map.end()) {
+    std::cerr << "Invalid resource"
+              << " \"" << resource << "\".";
+    return -1;
+  }
+  std::string command = args[0];
+  args.erase(args.begin());
+  const auto& res_commands_map = commands_map[resource];
+  if (res_commands_map.find(command) == res_commands_map.end()) {
+    std::cerr << "Invalid command"
+              << " \"" << command << "\" "
+              << "for"
+              << " \"" << resource << "\" resource.";
+    return -1;
+  }
+  return commands_map[resource][command](args);
+}
+
+}  // namespace
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) { return cuttlefish::Main(argc, argv); }
diff --git a/host/commands/remote/output.h b/host/commands/remote/output.h
new file mode 100644
index 0000000..2a79575
--- /dev/null
+++ b/host/commands/remote/output.h
@@ -0,0 +1,49 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <sstream>
+#include <string>
+
+namespace cuttlefish {
+
+struct CVDOutput {
+  bool verbose;
+  const std::string& service_url;
+  const std::string& zone;
+  const std::string& host;
+  const std::string& name;
+
+  std::string ToString() { return verbose ? Verbose() : Name(); }
+
+ private:
+  std::string Name() { return name + " " + "(" + host + ")"; }
+
+  std::string Verbose() {
+    std::stringstream stream;
+    stream << name + " " + "(" + host + ")" << std::endl;
+    stream << "  "
+           << "webrtcstream_url: " << WebRTCStreamURL() << std::endl;
+    return stream.str();
+  }
+
+  std::string WebRTCStreamURL() {
+    return service_url + "/v1/zones/" + zone + "/hosts/" + host + "/devices/" +
+           name + "/files/client.html";
+  }
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/remote/remote.cc b/host/commands/remote/remote.cc
new file mode 100644
index 0000000..50e5df8
--- /dev/null
+++ b/host/commands/remote/remote.cc
@@ -0,0 +1,197 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/remote/remote.h"
+
+#include <iostream>
+
+namespace cuttlefish {
+namespace {
+
+const char* kFieldItems = "items";
+const char* kFieldName = "name";
+const char* kFieldDone = "done";
+const char* kFieldResult = "result";
+const char* kFieldResponse = "response";
+
+static std::string JsonToString(const Json::Value& input) {
+  Json::StreamWriterBuilder wbuilder;
+  wbuilder["indentation"] = "";
+  return Json::writeString(wbuilder, input);
+}
+
+static std::string CreateHostBody(const CreateHostInstanceRequest& request) {
+  Json::Value gcp;
+  gcp["machine_type"] = request.gcp->machine_type;
+  gcp["min_cpu_platform"] = request.gcp->min_cpu_platform;
+  Json::Value request_json;
+  request_json["create_host_instance_request"]["gcp"] = gcp;
+  return JsonToString(request_json);
+}
+
+static std::string ToJson(const CreateCVDRequest& request) {
+  Json::Value build_info;
+  build_info["build_id"] = request.build_info.build_id;
+  build_info["target"] = request.build_info.target;
+  Json::Value root;
+  root["build_info"] = build_info;
+  return JsonToString(root);
+}
+
+}  // namespace
+
+CloudOrchestratorApi::CloudOrchestratorApi(const std::string& service_url,
+                                           const std::string& zone,
+                                           HttpClient& http_client)
+    : service_url_(service_url), zone_(zone), http_client_(http_client) {}
+
+CloudOrchestratorApi::~CloudOrchestratorApi() {}
+
+Result<std::string> CloudOrchestratorApi::CreateHost(
+    const CreateHostInstanceRequest& request) {
+  std::string url = service_url_ + "/v1/zones/" + zone_ + "/hosts";
+  std::string data = CreateHostBody(request);
+  auto resp =
+      CF_EXPECT(http_client_.PostToString(url, data), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto resp_json =
+      CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  CF_EXPECT(
+      resp_json.isMember(kFieldName),
+      "Invalid create host response,  missing field: '" << kFieldName << "'");
+  return resp_json[kFieldName].asString();
+}
+
+Result<Operation> CloudOrchestratorApi::WaitCloudOperation(
+    const std::string& name) {
+  std::string url =
+      service_url_ + "/v1/zones/" + zone_ + "/operations/" + name + "/wait";
+  auto resp =
+      CF_EXPECT(http_client_.PostToString(url, ""), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto resp_json =
+      CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  CF_EXPECT(resp_json.isMember(kFieldDone),
+            "Invalid response,  missing field: '" << kFieldDone << "'");
+  bool done = resp_json[kFieldDone].asBool();
+  if (!done) {
+    return Operation{done : done};
+  }
+  CF_EXPECT(resp_json.isMember(kFieldResult),
+            "Invalid response,  missing field: '" << kFieldResult << "'");
+  CF_EXPECT(resp_json[kFieldResult].isMember(kFieldResponse),
+            "Invalid response,  missing field: '" << kFieldResponse << "'");
+  return Operation{
+    done : done,
+    result : OperationResult{
+      response : resp_json[kFieldResult][kFieldResponse],
+    }
+  };
+}
+
+Result<std::vector<std::string>> CloudOrchestratorApi::ListHosts() {
+  std::string url = service_url_ + "/v1/zones/" + zone_ + "/hosts";
+  auto resp = CF_EXPECT(http_client_.GetToString(url), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto root = CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  CF_EXPECT(
+      root.isMember(kFieldItems),
+      "Invalid list hosts response,  missing field: '" << kFieldItems << "'");
+  std::vector<std::string> result;
+  for (const Json::Value& item : root[kFieldItems]) {
+    result.push_back(item["name"].asString());
+  }
+  return result;
+}
+
+Result<void> CloudOrchestratorApi::DeleteHost(const std::string& host) {
+  std::string url = service_url_ + "/v1/zones/" + zone_ + "/hosts/" + host;
+  auto resp = CF_EXPECT(http_client_.DeleteToString(url), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  return {};
+}
+
+Result<std::string> CloudOrchestratorApi::CreateCVD(
+    const std::string& host, const CreateCVDRequest& request) {
+  std::string url =
+      service_url_ + "/v1/zones/" + zone_ + "/hosts/" + host + "/cvds";
+  std::string data = ToJson(request);
+  auto resp =
+      CF_EXPECT(http_client_.PostToString(url, data), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto resp_json =
+      CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  CF_EXPECT(
+      resp_json.isMember(kFieldName),
+      "Invalid create cvd response,  missing field: '" << kFieldName << "'");
+  return resp_json[kFieldName].asString();
+}
+
+Result<Operation> CloudOrchestratorApi::WaitHostOperation(
+    const std::string& host, const std::string& name) {
+  std::string url = service_url_ + "/v1/zones/" + zone_ + "/hosts/" + host +
+                    "/operations/" + name + "/wait";
+  auto resp =
+      CF_EXPECT(http_client_.PostToString(url, ""), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto resp_json =
+      CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  CF_EXPECT(resp_json.isMember(kFieldDone),
+            "Invalid response,  missing field: '" << kFieldDone << "'");
+  bool done = resp_json[kFieldDone].asBool();
+  if (!done) {
+    return Operation{done : done};
+  }
+  CF_EXPECT(resp_json.isMember(kFieldResult),
+            "Invalid response,  missing field: '" << kFieldResult << "'");
+  CF_EXPECT(resp_json[kFieldResult].isMember(kFieldResponse),
+            "Invalid response,  missing field: '" << kFieldResponse << "'");
+  return Operation{
+    done : done,
+    result : OperationResult{
+      response : resp_json[kFieldResult][kFieldResponse],
+    }
+  };
+}
+
+Result<std::vector<std::string>> CloudOrchestratorApi::ListCVDWebRTCStreams(
+    const std::string& host) {
+  std::string url =
+      service_url_ + "/v1/zones/" + zone_ + "/hosts/" + host + "/devices";
+  auto resp = CF_EXPECT(http_client_.GetToString(url), "Http client failed");
+  CF_EXPECT(resp.HttpSuccess(), "Http request failed with status code: "
+                                    << resp.http_code << ", server response:\n"
+                                    << resp.data);
+  auto root = CF_EXPECT(ParseJson(resp.data), "Failed parsing response body");
+  std::vector<std::string> result;
+  for (int index = 0; index < root.size(); index++) {
+    result.push_back(root[index].asString());
+  }
+  return result;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/remote/remote.h b/host/commands/remote/remote.h
new file mode 100644
index 0000000..6bbf5bd
--- /dev/null
+++ b/host/commands/remote/remote.h
@@ -0,0 +1,82 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string>
+
+#include "common/libs/utils/json.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/web/http_client/http_client.h"
+
+namespace cuttlefish {
+
+struct GCPInstance {
+  const char* machine_type;
+  const char* min_cpu_platform;
+};
+
+struct CreateHostInstanceRequest {
+  GCPInstance* gcp;
+};
+
+struct BuildInfo {
+  const std::string& build_id;
+  const std::string& target;
+};
+
+struct CreateCVDRequest {
+  const BuildInfo& build_info;
+};
+
+struct OperationResult {
+  Json::Value response;
+};
+
+struct Operation {
+  bool done;
+  OperationResult result;
+};
+
+class CloudOrchestratorApi {
+ public:
+  CloudOrchestratorApi(const std::string& service_url, const std::string& zone,
+                       HttpClient& http_client);
+  ~CloudOrchestratorApi();
+
+  Result<std::string> CreateHost(const CreateHostInstanceRequest& request);
+
+  Result<Operation> WaitCloudOperation(const std::string& name);
+
+  Result<std::vector<std::string>> ListHosts();
+
+  Result<void> DeleteHost(const std::string& name);
+
+  Result<std::string> CreateCVD(const std::string& host,
+                                const CreateCVDRequest& request);
+
+  Result<Operation> WaitHostOperation(const std::string& host,
+                                      const std::string& name);
+
+  Result<std::vector<std::string>> ListCVDWebRTCStreams(
+      const std::string& host);
+
+ private:
+  const std::string& service_url_;
+  const std::string& zone_;
+  HttpClient& http_client_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/remote/unittest/main_test.cc b/host/commands/remote/unittest/main_test.cc
new file mode 100644
index 0000000..d2ceeb7
--- /dev/null
+++ b/host/commands/remote/unittest/main_test.cc
@@ -0,0 +1,21 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <gtest/gtest.h>
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}
diff --git a/host/commands/remote/unittest/output_test.cc b/host/commands/remote/unittest/output_test.cc
new file mode 100644
index 0000000..5aa56f3
--- /dev/null
+++ b/host/commands/remote/unittest/output_test.cc
@@ -0,0 +1,57 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/remote/output.h"
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+namespace cuttlefish {
+
+TEST(CVDOutputTest, NonVerbose) {
+  CVDOutput o{
+    service_url : "http://xyzzy.com",
+    zone : "central1-b",
+    host : "foo",
+    verbose : false,
+    name : "bar"
+  };
+
+  auto result = o.ToString();
+
+  EXPECT_EQ(result, "bar (foo)");
+}
+
+TEST(CVDOutputTest, InstanceVerbose) {
+  CVDOutput o{
+    service_url : "http://xyzzy.com",
+    zone : "central1-b",
+    host : "foo",
+    verbose : true,
+    name : "bar"
+  };
+
+  auto result = o.ToString();
+
+  // clang-format off
+  const char *expected =
+      "bar (foo)\n"
+      "  webrtcstream_url: http://xyzzy.com/v1/zones/central1-b/hosts/foo/devices/bar/files/client.html\n";
+  // clang-format on
+  EXPECT_EQ(result, expected);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/restart_cvd/Android.bp b/host/commands/restart_cvd/Android.bp
index 8b1b971..fd52336 100644
--- a/host/commands/restart_cvd/Android.bp
+++ b/host/commands/restart_cvd/Android.bp
@@ -23,16 +23,14 @@
         "restart_cvd.cc",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
-        "libfruit",
         "libjsoncpp",
     ],
     static_libs: [
+        "libcuttlefish_command_util",
         "libcuttlefish_host_config",
-        "libcuttlefish_vm_manager",
         "libgflags",
     ],
     defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
diff --git a/host/commands/restart_cvd/restart_cvd.cc b/host/commands/restart_cvd/restart_cvd.cc
index 6c165f9..66b46d9 100644
--- a/host/commands/restart_cvd/restart_cvd.cc
+++ b/host/commands/restart_cvd/restart_cvd.cc
@@ -14,43 +14,25 @@
  * limitations under the License.
  */
 
-#include <inttypes.h>
-#include <limits.h>
-#include <stdio.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/wait.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <signal.h>
-
-#include <algorithm>
+#include <cstdint>
 #include <cstdlib>
-#include <fstream>
-#include <iomanip>
-#include <memory>
-#include <sstream>
-#include <string>
-#include <vector>
 
-#include <gflags/gflags.h>
 #include <android-base/logging.h>
+#include <gflags/gflags.h>
 
-#include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/fs/shared_select.h"
-#include "common/libs/utils/environment.h"
+#include "common/libs/utils/result.h"
 #include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/command_util/util.h"
 #include "host/libs/config/cuttlefish_config.h"
 
 DEFINE_int32(instance_num, cuttlefish::GetInstance(),
-             "Which instance to restart");
+             "Which instance to restart.");
 
-DEFINE_int32(wait_for_launcher, 30,
-             "How many seconds to wait for the launcher to respond to the status "
-             "command. A value of zero means wait indefinetly");
+DEFINE_int32(
+    wait_for_launcher, 30,
+    "How many seconds to wait for the launcher to respond to the status "
+    "command. A value of zero means wait indefinitely.");
 
 DEFINE_int32(boot_timeout, 1000, "How many seconds to wait for the device to "
                                  "reboot.");
@@ -58,106 +40,46 @@
 namespace cuttlefish {
 namespace {
 
-int RestartCvdMain(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
-  google::ParseCommandLineFlags(&argc, &argv, true);
+Result<void> RestartCvdMain() {
+  const CuttlefishConfig* config =
+      CF_EXPECT(CuttlefishConfig::Get(), "Failed to obtain config object");
+  SharedFD monitor_socket = CF_EXPECT(
+      GetLauncherMonitor(*config, FLAGS_instance_num, FLAGS_wait_for_launcher));
 
-  auto config = CuttlefishConfig::Get();
-  if (!config) {
-    LOG(ERROR) << "Failed to obtain config object";
-    return 1;
-  }
-
-  auto instance = config->ForInstance(FLAGS_instance_num);
-  auto monitor_path = instance.launcher_monitor_socket_path();
-  if (monitor_path.empty()) {
-    LOG(ERROR) << "No path to launcher monitor found";
-    return 2;
-  }
-  // This may hang if the server never picks up the connection.
-  auto monitor_socket = SharedFD::SocketLocalClient(
-      monitor_path.c_str(), false, SOCK_STREAM, FLAGS_wait_for_launcher);
-  if (!monitor_socket->IsOpen()) {
-    LOG(ERROR) << "Unable to connect to launcher monitor at " << monitor_path
-               << ": " << monitor_socket->StrError();
-    return 3;
-  }
-  auto request = LauncherAction::kRestart;
-  auto bytes_sent = monitor_socket->Send(&request, sizeof(request), 0);
-  if (bytes_sent < 0) {
-    LOG(ERROR) << "Error sending launcher monitor the status command: "
-               << monitor_socket->StrError();
-    return 4;
-  }
-  // Perform a select with a timeout to guard against launcher hanging
-  SharedFDSet read_set;
-  read_set.Set(monitor_socket);
-  struct timeval timeout = {FLAGS_wait_for_launcher, 0};
-  int selected = Select(&read_set, nullptr, nullptr,
-                        FLAGS_wait_for_launcher <= 0 ? nullptr : &timeout);
-  if (selected < 0){
-    LOG(ERROR) << "Failed communication with the launcher monitor: "
-               << strerror(errno);
-    return 5;
-  }
-  if (selected == 0) {
-    LOG(ERROR) << "Timeout expired waiting for launcher monitor to respond";
-    return 6;
-  }
-  LauncherResponse response;
-  auto bytes_recv = monitor_socket->Recv(&response, sizeof(response), 0);
-  if (bytes_recv < 0) {
-    LOG(ERROR) << "Error receiving response from launcher monitor: "
-               << monitor_socket->StrError();
-    return 7;
-  }
   LOG(INFO) << "Requesting restart";
-  if (response != LauncherResponse::kSuccess) {
-    LOG(ERROR) << "Received '" << static_cast<char>(response)
-               << "' response from launcher monitor for restart request";
-    return 8;
-  }
+  CF_EXPECT(WriteLauncherAction(monitor_socket, LauncherAction::kRestart));
+  CF_EXPECT(WaitForRead(monitor_socket, FLAGS_wait_for_launcher));
+  LauncherResponse restart_response =
+      CF_EXPECT(ReadLauncherResponse(monitor_socket));
+  CF_EXPECT(
+      restart_response == LauncherResponse::kSuccess,
+      "Received `" << static_cast<char>(restart_response)
+                   << "` response from launcher monitor for restart request");
+
   LOG(INFO) << "Waiting for device to boot up again";
+  CF_EXPECT(WaitForRead(monitor_socket, FLAGS_boot_timeout));
+  RunnerExitCodes boot_exit_code = CF_EXPECT(ReadExitCode(monitor_socket));
+  CF_EXPECT(boot_exit_code != RunnerExitCodes::kVirtualDeviceBootFailed,
+            "Boot failed");
+  CF_EXPECT(boot_exit_code == RunnerExitCodes::kSuccess,
+            "Unknown response" << static_cast<int>(boot_exit_code));
 
-  read_set.Set(monitor_socket);
-  timeout = {FLAGS_boot_timeout, 0};
-  selected = Select(&read_set, nullptr, nullptr,
-                    FLAGS_boot_timeout <= 0 ? nullptr : &timeout);
-  if (selected < 0){
-    LOG(ERROR) << "Failed communication with the launcher monitor: "
-               << strerror(errno);
-    return 5;
-  }
-  if (selected == 0) {
-    LOG(ERROR) << "Timeout expired waiting for launcher monitor to respond";
-    return 6;
-  }
-
-  RunnerExitCodes exit_code;
-  bytes_recv = ReadExactBinary(monitor_socket, &exit_code);
-  if (bytes_recv < 0) {
-    LOG(ERROR) << "Error in stream response: " << monitor_socket->StrError();
-    return 9;
-  } else if (bytes_recv == 0) {
-    LOG(ERROR) << "Launcher socket closed unexpectedly";
-    return 10;
-  } else if (bytes_recv != sizeof(exit_code)) {
-    LOG(ERROR) << "Launcher response was too short";
-    return 11;
-  } else if (exit_code == RunnerExitCodes::kVirtualDeviceBootFailed) {
-    LOG(ERROR) << "Boot failed";
-    return 12;
-  } else if (exit_code != RunnerExitCodes::kSuccess) {
-    LOG(ERROR) << "Unknown response: " << (int) exit_code;
-    return 13;
-  }
   LOG(INFO) << "Restart successful";
-  return 0;
+  return {};
 }
 
 } // namespace
 } // namespace cuttlefish
 
 int main(int argc, char** argv) {
-  return cuttlefish::RestartCvdMain(argc, argv);
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  cuttlefish::Result<void> result = cuttlefish::RestartCvdMain();
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Message();
+    LOG(DEBUG) << result.error().Trace();
+    return EXIT_FAILURE;
+  }
+  return EXIT_SUCCESS;
 }
diff --git a/host/commands/run_cvd/Android.bp b/host/commands/run_cvd/Android.bp
index 2f6eb0b..6f87114 100644
--- a/host/commands/run_cvd/Android.bp
+++ b/host/commands/run_cvd/Android.bp
@@ -17,13 +17,59 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
+// Allow the KeyMint reference implementation to be selected at build time.
+soong_config_module_type {
+    name: "keymint_impl_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "secure_env",
+    variables: ["keymint_impl"],
+    properties: ["cflags"],
+}
+
+soong_config_string_variable {
+    name: "keymint_impl",
+    values: ["rust", "cpp"],
+}
+
+keymint_impl_defaults {
+    name: "secure_env_keymint_impl_defaults",
+    soong_config_variables: {
+        keymint_impl: {
+            rust: {
+                cflags: ["-DCUTTLEFISH_KEYMINT_RUST",],
+            },
+            cpp: {},
+            conditions_default: {},
+        },
+    },
+}
+
+
 cc_binary {
     name: "run_cvd",
     srcs: [
         "boot_state_machine.cc",
-        "launch.cc",
-        "launch_modem.cpp",
-        "launch_streamer.cpp",
+        "launch/bluetooth_connector.cpp",
+        "launch/uwb_connector.cpp",
+        "launch/config_server.cpp",
+        "launch/console_forwarder.cpp",
+        "launch/gnss_grpc_proxy.cpp",
+        "launch/kernel_log_monitor.cpp",
+        "launch/logcat_receiver.cpp",
+        "launch/log_tee_creator.cpp",
+        "launch/grpc_socket_creator.cpp",
+        "launch/modem.cpp",
+        "launch/metrics.cpp",
+        "launch/open_wrt.cpp",
+        "launch/openwrt_control_server.cpp",
+        "launch/echo_server.cpp",
+        "launch/root_canal.cpp",
+        "launch/pica.cpp",
+        "launch/secure_env.cpp",
+        "launch/streamer.cpp",
+        "launch/tombstone_receiver.cpp",
+        "launch/wmediumd_server.cpp",
+        "launch/netsim_server.cpp",
         "main.cc",
         "reporting.cpp",
         "process_monitor.cc",
@@ -43,12 +89,17 @@
     static_libs: [
         "libcuttlefish_host_config",
         "libcuttlefish_host_config_adb",
+        "libcuttlefish_host_config_fastboot",
         "libcuttlefish_vm_manager",
+        "libcuttlefish_msg_queue",
+        "libcuttlefish_metrics",
+        "libcuttlefish_utils",
         "libgflags",
     ],
     defaults: [
         "cuttlefish_host",
         "cuttlefish_libicuuc",
         "cvd_cc_defaults",
+        "secure_env_keymint_impl_defaults",
     ],
 }
diff --git a/host/commands/run_cvd/README.md b/host/commands/run_cvd/README.md
new file mode 100644
index 0000000..b2405c7
--- /dev/null
+++ b/host/commands/run_cvd/README.md
@@ -0,0 +1,4 @@
+Init-style manager for processes relating to running a particular Cuttlefish
+Android device.
+
+[![linkage](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/run_cvd/doc/linkage.svg)
diff --git a/host/commands/run_cvd/boot_state_machine.cc b/host/commands/run_cvd/boot_state_machine.cc
index 5923631..27a5591 100644
--- a/host/commands/run_cvd/boot_state_machine.cc
+++ b/host/commands/run_cvd/boot_state_machine.cc
@@ -26,12 +26,13 @@
 
 #include "common/libs/fs/shared_fd.h"
 #include "common/libs/utils/tee_logging.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/commands/kernel_log_monitor/kernel_log_server.h"
 #include "host/commands/kernel_log_monitor/utils.h"
 #include "host/commands/run_cvd/runner_defs.h"
 #include "host/libs/config/feature.h"
 
-DEFINE_int32(reboot_notification_fd, -1,
+DEFINE_int32(reboot_notification_fd, CF_DEFAULTS_REBOOT_NOTIFICATION_FD,
              "A file descriptor to notify when boot completes.");
 
 namespace cuttlefish {
@@ -111,7 +112,9 @@
 
 class ProcessLeader : public SetupFeature {
  public:
-  INJECT(ProcessLeader(const CuttlefishConfig& config)) : config_(config) {}
+  INJECT(ProcessLeader(const CuttlefishConfig& config,
+                       const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
 
   SharedFD ForegroundLauncherPipe() { return foreground_launcher_pipe_; }
 
@@ -125,7 +128,7 @@
     /* These two paths result in pretty different process state, but both
      * achieve the same goal of making the current process the leader of a
      * process group, and are therefore grouped together. */
-    if (config_.run_as_daemon()) {
+    if (instance_.run_as_daemon()) {
       foreground_launcher_pipe_ = DaemonizeLauncher(config_);
       if (!foreground_launcher_pipe_->IsOpen()) {
         return false;
@@ -145,13 +148,14 @@
   }
 
   const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
   SharedFD foreground_launcher_pipe_;
 };
 
 // Maintains the state of the boot process, once a final state is reached
 // (success or failure) it sends the appropriate exit code to the foreground
 // launcher process
-class CvdBootStateMachine : public SetupFeature {
+class CvdBootStateMachine : public SetupFeature, public KernelLogPipeConsumer {
  public:
   INJECT(CvdBootStateMachine(ProcessLeader& process_leader,
                              KernelLogPipeProvider& kernel_log_pipe_provider))
@@ -300,9 +304,11 @@
 
 }  // namespace
 
-fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider>>
+fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
+                     const CuttlefishConfig::InstanceSpecific>>
 bootStateMachineComponent() {
   return fruit::createComponent()
+      .addMultibinding<KernelLogPipeConsumer, CvdBootStateMachine>()
       .addMultibinding<SetupFeature, ProcessLeader>()
       .addMultibinding<SetupFeature, CvdBootStateMachine>();
 }
diff --git a/host/commands/run_cvd/boot_state_machine.h b/host/commands/run_cvd/boot_state_machine.h
index 796bb6f..1a2ebfd 100644
--- a/host/commands/run_cvd/boot_state_machine.h
+++ b/host/commands/run_cvd/boot_state_machine.h
@@ -15,13 +15,14 @@
  */
 #pragma once
 
-#include "host/commands/run_cvd/launch.h"
+#include "host/commands/run_cvd/launch/launch.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/feature.h"
 
 namespace cuttlefish {
 
-fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider>>
+fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
+                     const CuttlefishConfig::InstanceSpecific>>
 bootStateMachineComponent();
 
 }  // namespace cuttlefish
diff --git a/host/commands/run_cvd/doc/linkage.dot b/host/commands/run_cvd/doc/linkage.dot
new file mode 100644
index 0000000..d556e10
--- /dev/null
+++ b/host/commands/run_cvd/doc/linkage.dot
@@ -0,0 +1,115 @@
+digraph {
+  rankdir = "LR";
+
+  config_server
+  console_forwarder
+  cvd
+  cvd_status
+  gnss_grpc_proxy
+  kernel_log_monitor
+  launch_cvd
+  launcher_monitor_socket [label = "launcher_monitor.sock", shape = "rectangle"]
+  logcat_receiver
+  metrics
+  modem_simulator
+  powerwash_cvd
+  restart_cvd
+  run_cvd [label = < <b>run_cvd</b> >, penwidth = "2"]
+  run_cvd_2 [label = "run_cvd", style = "dashed"]
+  run_cvd_etc [label = "...", style = "dashed"]
+  secure_env
+  stop_cvd
+  tombstone_receiver
+
+  subgraph cluster_adb_group {
+    label = "ADB"
+
+    adb_connector
+    socket_vsock_proxy
+  }
+
+  subgraph cluster_bluetooth_group {
+    label = "Bluetooth"
+
+    bt_connector
+    netsim
+    root_canal [label = "root-canal"]
+    root_canal_log_tee [label = "log_tee"]
+  }
+
+  subgraph cluster_vmm_group {
+    label = "VMM"
+
+    crosvm_android [label = "Android crosvm"]
+    crosvm_android_log_tee [label = "log_tee"]
+    crosvm_android_restarter [label = "process_restarter"]
+    gem5
+    qemu [label = "QEMU"]
+    vmm [label = "", shape = "diamond"]
+  }
+
+  subgraph cluster_webrtc_group {
+    label = "WebRTC"
+
+    operator_proxy
+    webrtc
+  }
+
+  subgraph cluster_wifi_group {
+    label = "Wifi"
+
+    crosvm_openwrt [label = "OpenWRT crosvm"]
+    crosvm_openwrt_log_tee [label = "log_tee"]
+    wmediumd
+    wmediumd_log_tee [label = "log_tee"]
+  }
+
+  cvd -> cvd_status
+  cvd -> launch_cvd
+  cvd -> powerwash_cvd
+  cvd -> restart_cvd
+  cvd -> stop_cvd
+
+  launch_cvd -> run_cvd
+  launch_cvd -> run_cvd_2 [style = "dashed"]
+  launch_cvd -> run_cvd_etc [style = "dashed"]
+
+  cvd_status -> launcher_monitor_socket [dir = "both"]
+  powerwash_cvd -> launcher_monitor_socket [dir = "both"]
+  restart_cvd -> launcher_monitor_socket [dir = "both"]
+  stop_cvd -> launcher_monitor_socket [dir = "both"]
+  launcher_monitor_socket -> run_cvd [dir = "both"]
+
+  run_cvd -> adb_connector [style = "dashed"]
+  run_cvd -> bt_connector [style = "dashed"]
+  run_cvd -> config_server
+  run_cvd -> console_forwarder [style = "dashed"]
+  run_cvd -> crosvm_openwrt
+  run_cvd -> crosvm_openwrt_log_tee
+  crosvm_openwrt_log_tee -> crosvm_openwrt [dir = "back"]
+  run_cvd -> gnss_grpc_proxy [style = "dashed"]
+  run_cvd -> kernel_log_monitor
+  run_cvd -> logcat_receiver
+  run_cvd -> metrics
+  run_cvd -> modem_simulator
+  run_cvd -> netsim [style = "dashed"]
+  run_cvd -> operator_proxy [style = "dashed"]
+  run_cvd -> root_canal [style = "dashed"]
+  run_cvd -> root_canal_log_tee [style = "dashed"]
+  root_canal_log_tee -> root_canal [dir = "back"]
+  run_cvd -> secure_env
+  run_cvd -> socket_vsock_proxy [style = "dashed"]
+  run_cvd -> tombstone_receiver
+  run_cvd -> vmm
+  run_cvd -> webrtc [style = "dashed"]
+  run_cvd -> wmediumd
+  run_cvd -> wmediumd_log_tee
+  wmediumd_log_tee -> wmediumd [dir = "back"]
+
+  crosvm_android_log_tee -> crosvm_android [dir = "back", style = "dashed"]
+  vmm -> crosvm_android_restarter [style = "dashed"]
+  crosvm_android_restarter -> crosvm_android
+  vmm -> crosvm_android_log_tee [style = "dashed"]
+  vmm -> gem5 [style = "dashed"]
+  vmm -> qemu [style = "dashed"]
+}
diff --git a/host/commands/run_cvd/doc/linkage.png b/host/commands/run_cvd/doc/linkage.png
new file mode 100644
index 0000000..b9d20f5
--- /dev/null
+++ b/host/commands/run_cvd/doc/linkage.png
Binary files differ
diff --git a/host/commands/run_cvd/doc/linkage.svg b/host/commands/run_cvd/doc/linkage.svg
new file mode 100644
index 0000000..11c8c58
--- /dev/null
+++ b/host/commands/run_cvd/doc/linkage.svg
@@ -0,0 +1,530 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="1468pt" height="1493pt"
+ viewBox="0.00 0.00 1468.41 1492.87" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1488.87)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-1488.87 1464.41,-1488.87 1464.41,4 -4,4"/>
+<g id="clust1" class="cluster">
+<title>cluster_adb_group</title>
+<polygon fill="none" stroke="black" points="732.66,-825.87 732.66,-954.87 915.05,-954.87 915.05,-825.87 732.66,-825.87"/>
+<text text-anchor="middle" x="823.86" y="-939.67" font-family="Times,serif" font-size="14.00">ADB</text>
+</g>
+<g id="clust2" class="cluster">
+<title>cluster_bluetooth_group</title>
+<polygon fill="none" stroke="black" points="759.31,-634.87 759.31,-817.87 1246.77,-817.87 1246.77,-634.87 759.31,-634.87"/>
+<text text-anchor="middle" x="1003.04" y="-802.67" font-family="Times,serif" font-size="14.00">Bluetooth</text>
+</g>
+<g id="clust3" class="cluster">
+<title>cluster_vmm_group</title>
+<polygon fill="none" stroke="black" points="788.86,-359.87 788.86,-596.87 1452.41,-596.87 1452.41,-359.87 788.86,-359.87"/>
+<text text-anchor="middle" x="1120.63" y="-581.67" font-family="Times,serif" font-size="14.00">VMM</text>
+</g>
+<g id="clust4" class="cluster">
+<title>cluster_webrtc_group</title>
+<polygon fill="none" stroke="black" points="750.86,-222.87 750.86,-351.87 896.85,-351.87 896.85,-222.87 750.86,-222.87"/>
+<text text-anchor="middle" x="823.86" y="-336.67" font-family="Times,serif" font-size="14.00">WebRTC</text>
+</g>
+<g id="clust5" class="cluster">
+<title>cluster_wifi_group</title>
+<polygon fill="none" stroke="black" points="779.46,-76.87 779.46,-205.87 1278.62,-205.87 1278.62,-76.87 779.46,-76.87"/>
+<text text-anchor="middle" x="1029.04" y="-190.67" font-family="Times,serif" font-size="14.00">Wifi</text>
+</g>
+<!-- config_server -->
+<g id="node1" class="node">
+<title>config_server</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1466.87" rx="59.29" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1463.17" font-family="Times,serif" font-size="14.00">config_server</text>
+</g>
+<!-- console_forwarder -->
+<g id="node2" class="node">
+<title>console_forwarder</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1412.87" rx="76.89" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1409.17" font-family="Times,serif" font-size="14.00">console_forwarder</text>
+</g>
+<!-- cvd -->
+<g id="node3" class="node">
+<title>cvd</title>
+<ellipse fill="none" stroke="black" cx="27" cy="-878.87" rx="27" ry="18"/>
+<text text-anchor="middle" x="27" y="-875.17" font-family="Times,serif" font-size="14.00">cvd</text>
+</g>
+<!-- cvd_status -->
+<g id="node4" class="node">
+<title>cvd_status</title>
+<ellipse fill="none" stroke="black" cx="157.59" cy="-986.87" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="157.59" y="-983.17" font-family="Times,serif" font-size="14.00">cvd_status</text>
+</g>
+<!-- cvd&#45;&gt;cvd_status -->
+<g id="edge1" class="edge">
+<title>cvd&#45;&gt;cvd_status</title>
+<path fill="none" stroke="black" d="M37.12,-895.58C47.72,-913.62 66.76,-942.15 90,-959.87 96,-964.44 102.89,-968.36 109.9,-971.68"/>
+<polygon fill="black" stroke="black" points="108.91,-975.07 119.48,-975.84 111.7,-968.65 108.91,-975.07"/>
+</g>
+<!-- launch_cvd -->
+<g id="node7" class="node">
+<title>launch_cvd</title>
+<ellipse fill="none" stroke="black" cx="157.59" cy="-770.87" rx="51.19" ry="18"/>
+<text text-anchor="middle" x="157.59" y="-767.17" font-family="Times,serif" font-size="14.00">launch_cvd</text>
+</g>
+<!-- cvd&#45;&gt;launch_cvd -->
+<g id="edge2" class="edge">
+<title>cvd&#45;&gt;launch_cvd</title>
+<path fill="none" stroke="black" d="M37.12,-862.15C47.72,-844.11 66.76,-815.58 90,-797.87 95.73,-793.5 102.26,-789.73 108.95,-786.51"/>
+<polygon fill="black" stroke="black" points="110.37,-789.7 118.09,-782.45 107.53,-783.31 110.37,-789.7"/>
+</g>
+<!-- powerwash_cvd -->
+<g id="node12" class="node">
+<title>powerwash_cvd</title>
+<ellipse fill="none" stroke="black" cx="157.59" cy="-932.87" rx="67.69" ry="18"/>
+<text text-anchor="middle" x="157.59" y="-929.17" font-family="Times,serif" font-size="14.00">powerwash_cvd</text>
+</g>
+<!-- cvd&#45;&gt;powerwash_cvd -->
+<g id="edge3" class="edge">
+<title>cvd&#45;&gt;powerwash_cvd</title>
+<path fill="none" stroke="black" d="M50.04,-888.63C61.86,-893.82 76.69,-900.27 90,-905.87 96.34,-908.53 103.03,-911.3 109.63,-914"/>
+<polygon fill="black" stroke="black" points="108.61,-917.36 119.19,-917.9 111.25,-910.88 108.61,-917.36"/>
+</g>
+<!-- restart_cvd -->
+<g id="node13" class="node">
+<title>restart_cvd</title>
+<ellipse fill="none" stroke="black" cx="157.59" cy="-878.87" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="157.59" y="-875.17" font-family="Times,serif" font-size="14.00">restart_cvd</text>
+</g>
+<!-- cvd&#45;&gt;restart_cvd -->
+<g id="edge4" class="edge">
+<title>cvd&#45;&gt;restart_cvd</title>
+<path fill="none" stroke="black" d="M54.02,-878.87C66.49,-878.87 81.96,-878.87 97.09,-878.87"/>
+<polygon fill="black" stroke="black" points="97.46,-882.37 107.46,-878.87 97.46,-875.37 97.46,-882.37"/>
+</g>
+<!-- stop_cvd -->
+<g id="node18" class="node">
+<title>stop_cvd</title>
+<ellipse fill="none" stroke="black" cx="157.59" cy="-824.87" rx="42.49" ry="18"/>
+<text text-anchor="middle" x="157.59" y="-821.17" font-family="Times,serif" font-size="14.00">stop_cvd</text>
+</g>
+<!-- cvd&#45;&gt;stop_cvd -->
+<g id="edge5" class="edge">
+<title>cvd&#45;&gt;stop_cvd</title>
+<path fill="none" stroke="black" d="M50.04,-869.11C61.86,-863.92 76.69,-857.46 90,-851.87 98.61,-848.25 107.89,-844.43 116.7,-840.85"/>
+<polygon fill="black" stroke="black" points="118.07,-844.07 126.03,-837.07 115.44,-837.58 118.07,-844.07"/>
+</g>
+<!-- launcher_monitor_socket -->
+<g id="node8" class="node">
+<title>launcher_monitor_socket</title>
+<polygon fill="none" stroke="black" points="402.19,-896.87 261.19,-896.87 261.19,-860.87 402.19,-860.87 402.19,-896.87"/>
+<text text-anchor="middle" x="331.69" y="-875.17" font-family="Times,serif" font-size="14.00">launcher_monitor.sock</text>
+</g>
+<!-- cvd_status&#45;&gt;launcher_monitor_socket -->
+<g id="edge9" class="edge">
+<title>cvd_status&#45;&gt;launcher_monitor_socket</title>
+<path fill="none" stroke="black" d="M203.1,-970.78C210.64,-967.5 218.26,-963.83 225.19,-959.87 253.32,-943.76 282.32,-920.97 302.78,-903.66"/>
+<polygon fill="black" stroke="black" points="201.54,-967.64 193.65,-974.71 204.23,-974.1 201.54,-967.64"/>
+<polygon fill="black" stroke="black" points="305.13,-906.25 310.45,-897.08 300.58,-900.93 305.13,-906.25"/>
+</g>
+<!-- gnss_grpc_proxy -->
+<g id="node5" class="node">
+<title>gnss_grpc_proxy</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1358.87" rx="71.49" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1355.17" font-family="Times,serif" font-size="14.00">gnss_grpc_proxy</text>
+</g>
+<!-- kernel_log_monitor -->
+<g id="node6" class="node">
+<title>kernel_log_monitor</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1304.87" rx="81.49" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1301.17" font-family="Times,serif" font-size="14.00">kernel_log_monitor</text>
+</g>
+<!-- run_cvd -->
+<g id="node14" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="485.63" cy="-851.87" rx="47.39" ry="18"/>
+<text text-anchor="start" x="457.13" y="-849.17" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="461.13" y="-849.17" font-family="Times,serif" font-weight="bold" font-size="14.00">run_cvd</text>
+<text text-anchor="start" x="510.13" y="-849.17" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- launch_cvd&#45;&gt;run_cvd -->
+<g id="edge6" class="edge">
+<title>launch_cvd&#45;&gt;run_cvd</title>
+<path fill="none" stroke="black" d="M195.93,-782.87C215.3,-788.91 239.41,-796.15 261.19,-801.87 319.81,-817.26 387.86,-832.05 433.32,-841.5"/>
+<polygon fill="black" stroke="black" points="432.73,-844.95 443.23,-843.55 434.15,-838.1 432.73,-844.95"/>
+</g>
+<!-- run_cvd_2 -->
+<g id="node15" class="node">
+<title>run_cvd_2</title>
+<ellipse fill="none" stroke="black" stroke-dasharray="5,2" cx="331.69" cy="-774.87" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="331.69" y="-771.17" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- launch_cvd&#45;&gt;run_cvd_2 -->
+<g id="edge7" class="edge">
+<title>launch_cvd&#45;&gt;run_cvd_2</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M208.83,-772.04C231.8,-772.57 258.92,-773.2 281.73,-773.73"/>
+<polygon fill="black" stroke="black" points="281.8,-777.23 291.88,-773.97 281.97,-770.23 281.8,-777.23"/>
+</g>
+<!-- run_cvd_etc -->
+<g id="node16" class="node">
+<title>run_cvd_etc</title>
+<ellipse fill="none" stroke="black" stroke-dasharray="5,2" cx="331.69" cy="-720.87" rx="27" ry="18"/>
+<text text-anchor="middle" x="331.69" y="-717.17" font-family="Times,serif" font-size="14.00">...</text>
+</g>
+<!-- launch_cvd&#45;&gt;run_cvd_etc -->
+<g id="edge8" class="edge">
+<title>launch_cvd&#45;&gt;run_cvd_etc</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M197.64,-759.52C227.5,-750.85 268.24,-739.01 296.74,-730.73"/>
+<polygon fill="black" stroke="black" points="298.02,-734 306.65,-727.85 296.07,-727.28 298.02,-734"/>
+</g>
+<!-- launcher_monitor_socket&#45;&gt;run_cvd -->
+<g id="edge13" class="edge">
+<title>launcher_monitor_socket&#45;&gt;run_cvd</title>
+<path fill="none" stroke="black" d="M412.32,-864.72C419.08,-863.52 425.81,-862.32 432.28,-861.17"/>
+<polygon fill="black" stroke="black" points="411.67,-861.28 402.43,-866.47 412.89,-868.17 411.67,-861.28"/>
+<polygon fill="black" stroke="black" points="432.98,-864.6 442.21,-859.41 431.75,-857.71 432.98,-864.6"/>
+</g>
+<!-- logcat_receiver -->
+<g id="node9" class="node">
+<title>logcat_receiver</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1250.87" rx="64.99" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1247.17" font-family="Times,serif" font-size="14.00">logcat_receiver</text>
+</g>
+<!-- metrics -->
+<g id="node10" class="node">
+<title>metrics</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1196.87" rx="37.09" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1193.17" font-family="Times,serif" font-size="14.00">metrics</text>
+</g>
+<!-- modem_simulator -->
+<g id="node11" class="node">
+<title>modem_simulator</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1142.87" rx="76.09" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1139.17" font-family="Times,serif" font-size="14.00">modem_simulator</text>
+</g>
+<!-- powerwash_cvd&#45;&gt;launcher_monitor_socket -->
+<g id="edge10" class="edge">
+<title>powerwash_cvd&#45;&gt;launcher_monitor_socket</title>
+<path fill="none" stroke="black" d="M211.67,-916.21C227.99,-911.09 246.18,-905.38 263.33,-900"/>
+<polygon fill="black" stroke="black" points="210.51,-912.91 202.02,-919.24 212.61,-919.59 210.51,-912.91"/>
+<polygon fill="black" stroke="black" points="264.79,-903.21 273.29,-896.88 262.7,-896.53 264.79,-903.21"/>
+</g>
+<!-- restart_cvd&#45;&gt;launcher_monitor_socket -->
+<g id="edge11" class="edge">
+<title>restart_cvd&#45;&gt;launcher_monitor_socket</title>
+<path fill="none" stroke="black" d="M218.27,-878.87C228.86,-878.87 240.05,-878.87 251.09,-878.87"/>
+<polygon fill="black" stroke="black" points="217.91,-875.37 207.91,-878.87 217.91,-882.37 217.91,-875.37"/>
+<polygon fill="black" stroke="black" points="251.17,-882.37 261.17,-878.87 251.17,-875.37 251.17,-882.37"/>
+</g>
+<!-- run_cvd&#45;&gt;config_server -->
+<g id="edge16" class="edge">
+<title>run_cvd&#45;&gt;config_server</title>
+<path fill="none" stroke="black" d="M486.33,-869.96C485.2,-959.99 485.08,-1358.62 569.08,-1439.87 620.45,-1489.56 707.85,-1487.35 765.47,-1478.91"/>
+<polygon fill="black" stroke="black" points="766.18,-1482.34 775.51,-1477.34 765.09,-1475.43 766.18,-1482.34"/>
+</g>
+<!-- run_cvd&#45;&gt;console_forwarder -->
+<g id="edge17" class="edge">
+<title>run_cvd&#45;&gt;console_forwarder</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M486.61,-869.97C486.89,-954.93 492.63,-1312.94 569.08,-1385.87 617.92,-1432.46 698.51,-1433.16 755.76,-1426.09"/>
+<polygon fill="black" stroke="black" points="756.37,-1429.54 765.81,-1424.74 755.43,-1422.6 756.37,-1429.54"/>
+</g>
+<!-- run_cvd&#45;&gt;gnss_grpc_proxy -->
+<g id="edge21" class="edge">
+<title>run_cvd&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M486.95,-869.99C488.74,-949.59 500.16,-1267.23 569.08,-1331.87 618.93,-1378.62 700.83,-1378.83 758.07,-1371.62"/>
+<polygon fill="black" stroke="black" points="758.66,-1375.07 768.1,-1370.25 757.72,-1368.14 758.66,-1375.07"/>
+</g>
+<!-- run_cvd&#45;&gt;kernel_log_monitor -->
+<g id="edge22" class="edge">
+<title>run_cvd&#45;&gt;kernel_log_monitor</title>
+<path fill="none" stroke="black" d="M487.37,-869.92C490.8,-943.74 507.63,-1221.44 569.08,-1277.87 617.8,-1322.61 696,-1324.35 752.73,-1318.03"/>
+<polygon fill="black" stroke="black" points="753.2,-1321.49 762.7,-1316.81 752.35,-1314.55 753.2,-1321.49"/>
+</g>
+<!-- run_cvd&#45;&gt;logcat_receiver -->
+<g id="edge23" class="edge">
+<title>run_cvd&#45;&gt;logcat_receiver</title>
+<path fill="none" stroke="black" d="M487.92,-869.91C493.13,-937.68 515.07,-1175.6 569.08,-1223.87 620.74,-1270.03 703.96,-1269.88 760.93,-1262.76"/>
+<polygon fill="black" stroke="black" points="761.45,-1266.22 770.89,-1261.41 760.51,-1259.29 761.45,-1266.22"/>
+</g>
+<!-- run_cvd&#45;&gt;metrics -->
+<g id="edge24" class="edge">
+<title>run_cvd&#45;&gt;metrics</title>
+<path fill="none" stroke="black" d="M488.68,-870.21C495.93,-931.83 522.54,-1129.74 569.08,-1169.87 627.68,-1220.39 724.42,-1214.55 780,-1205.73"/>
+<polygon fill="black" stroke="black" points="780.6,-1209.18 789.88,-1204.06 779.44,-1202.28 780.6,-1209.18"/>
+</g>
+<!-- run_cvd&#45;&gt;modem_simulator -->
+<g id="edge25" class="edge">
+<title>run_cvd&#45;&gt;modem_simulator</title>
+<path fill="none" stroke="black" d="M486.1,-869.9C485.73,-918.48 492.05,-1052.67 569.08,-1115.87 619.79,-1157.47 696.71,-1159.99 752.58,-1154.71"/>
+<polygon fill="black" stroke="black" points="753.17,-1158.17 762.75,-1153.64 752.44,-1151.2 753.17,-1158.17"/>
+</g>
+<!-- secure_env -->
+<g id="node17" class="node">
+<title>secure_env</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1088.87" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1085.17" font-family="Times,serif" font-size="14.00">secure_env</text>
+</g>
+<!-- run_cvd&#45;&gt;secure_env -->
+<g id="edge31" class="edge">
+<title>run_cvd&#45;&gt;secure_env</title>
+<path fill="none" stroke="black" d="M487.72,-869.86C491.28,-911.39 506.31,-1014.02 569.08,-1061.87 626.3,-1105.49 713.04,-1104.63 768.83,-1098.17"/>
+<polygon fill="black" stroke="black" points="769.39,-1101.63 778.87,-1096.91 768.51,-1094.69 769.39,-1101.63"/>
+</g>
+<!-- tombstone_receiver -->
+<g id="node19" class="node">
+<title>tombstone_receiver</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-1034.87" rx="80.69" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-1031.17" font-family="Times,serif" font-size="14.00">tombstone_receiver</text>
+</g>
+<!-- run_cvd&#45;&gt;tombstone_receiver -->
+<g id="edge33" class="edge">
+<title>run_cvd&#45;&gt;tombstone_receiver</title>
+<path fill="none" stroke="black" d="M490.32,-869.8C498.24,-903.26 520.25,-974.8 569.08,-1007.87 620.09,-1042.41 690.55,-1047.4 744.28,-1044.74"/>
+<polygon fill="black" stroke="black" points="744.68,-1048.22 754.46,-1044.14 744.27,-1041.24 744.68,-1048.22"/>
+</g>
+<!-- adb_connector -->
+<g id="node21" class="node">
+<title>adb_connector</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-905.87" rx="62.29" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-902.17" font-family="Times,serif" font-size="14.00">adb_connector</text>
+</g>
+<!-- run_cvd&#45;&gt;adb_connector -->
+<g id="edge14" class="edge">
+<title>run_cvd&#45;&gt;adb_connector</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M519.89,-864.53C534.74,-869.72 552.59,-875.33 569.08,-878.87 630.1,-891.95 700.8,-898.81 752.06,-902.34"/>
+<polygon fill="black" stroke="black" points="751.83,-905.83 762.04,-903 752.3,-898.84 751.83,-905.83"/>
+</g>
+<!-- socket_vsock_proxy -->
+<g id="node22" class="node">
+<title>socket_vsock_proxy</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-851.87" rx="83.39" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-848.17" font-family="Times,serif" font-size="14.00">socket_vsock_proxy</text>
+</g>
+<!-- run_cvd&#45;&gt;socket_vsock_proxy -->
+<g id="edge32" class="edge">
+<title>run_cvd&#45;&gt;socket_vsock_proxy</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M533.38,-851.87C584.17,-851.87 666.59,-851.87 730.52,-851.87"/>
+<polygon fill="black" stroke="black" points="730.61,-855.37 740.61,-851.87 730.61,-848.37 730.61,-855.37"/>
+</g>
+<!-- bt_connector -->
+<g id="node23" class="node">
+<title>bt_connector</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-768.87" rx="56.59" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-765.17" font-family="Times,serif" font-size="14.00">bt_connector</text>
+</g>
+<!-- run_cvd&#45;&gt;bt_connector -->
+<g id="edge15" class="edge">
+<title>run_cvd&#45;&gt;bt_connector</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M518.78,-838.96C533.88,-833.21 552.23,-826.66 569.08,-821.87 634.62,-803.2 711.4,-788.08 763.21,-778.83"/>
+<polygon fill="black" stroke="black" points="763.99,-782.25 773.22,-777.06 762.77,-775.36 763.99,-782.25"/>
+</g>
+<!-- netsim -->
+<g id="node24" class="node">
+<title>netsim</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-714.87" rx="34.39" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-711.17" font-family="Times,serif" font-size="14.00">netsim</text>
+</g>
+<!-- run_cvd&#45;&gt;netsim -->
+<g id="edge26" class="edge">
+<title>run_cvd&#45;&gt;netsim</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M494.24,-834.15C506.14,-808.85 531.73,-763.09 569.08,-741.87 635.35,-704.21 727.15,-704.97 780.25,-709.52"/>
+<polygon fill="black" stroke="black" points="780.05,-713.02 790.33,-710.47 780.7,-706.05 780.05,-713.02"/>
+</g>
+<!-- root_canal -->
+<g id="node25" class="node">
+<title>root_canal</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-660.87" rx="46.29" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-657.17" font-family="Times,serif" font-size="14.00">root&#45;canal</text>
+</g>
+<!-- run_cvd&#45;&gt;root_canal -->
+<g id="edge28" class="edge">
+<title>run_cvd&#45;&gt;root_canal</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M487.08,-833.71C489.26,-790.28 501.5,-680.56 569.08,-633.87 662.24,-569.5 966.18,-620.62 1078.63,-633.87 1101.1,-636.51 1125.57,-642.02 1145.97,-647.38"/>
+<polygon fill="black" stroke="black" points="1145.07,-650.76 1155.63,-649.98 1146.89,-644 1145.07,-650.76"/>
+</g>
+<!-- root_canal_log_tee -->
+<g id="node26" class="node">
+<title>root_canal_log_tee</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-660.87" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-657.17" font-family="Times,serif" font-size="14.00">log_tee</text>
+</g>
+<!-- run_cvd&#45;&gt;root_canal_log_tee -->
+<g id="edge29" class="edge">
+<title>run_cvd&#45;&gt;root_canal_log_tee</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M489.85,-833.86C497.11,-799.09 518.25,-723.05 569.08,-687.87 631.88,-644.39 725.18,-647.23 779.43,-653.71"/>
+<polygon fill="black" stroke="black" points="779.04,-657.19 789.41,-655 779.94,-650.25 779.04,-657.19"/>
+</g>
+<!-- vmm -->
+<g id="node32" class="node">
+<title>vmm</title>
+<polygon fill="none" stroke="black" points="823.86,-511.87 796.86,-493.87 823.86,-475.87 850.86,-493.87 823.86,-511.87"/>
+</g>
+<!-- run_cvd&#45;&gt;vmm -->
+<g id="edge35" class="edge">
+<title>run_cvd&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M487.58,-833.85C490.89,-788.79 505.52,-669.84 569.08,-600.87 629.83,-534.94 737.73,-508.19 791.12,-498.59"/>
+<polygon fill="black" stroke="black" points="791.78,-502.03 801.04,-496.89 790.59,-495.13 791.78,-502.03"/>
+</g>
+<!-- operator_proxy -->
+<g id="node33" class="node">
+<title>operator_proxy</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-302.87" rx="64.99" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-299.17" font-family="Times,serif" font-size="14.00">operator_proxy</text>
+</g>
+<!-- run_cvd&#45;&gt;operator_proxy -->
+<g id="edge27" class="edge">
+<title>run_cvd&#45;&gt;operator_proxy</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M487,-833.86C489.03,-753.25 501.38,-426.41 569.08,-355.87 615.53,-307.46 693.92,-297.46 751.31,-297.55"/>
+<polygon fill="black" stroke="black" points="751.37,-301.05 761.41,-297.66 751.45,-294.05 751.37,-301.05"/>
+</g>
+<!-- webrtc -->
+<g id="node34" class="node">
+<title>webrtc</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-248.87" rx="34.39" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-245.17" font-family="Times,serif" font-size="14.00">webrtc</text>
+</g>
+<!-- run_cvd&#45;&gt;webrtc -->
+<g id="edge36" class="edge">
+<title>run_cvd&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M486.39,-833.85C485.56,-745.07 486.73,-355.29 569.08,-275.87 625.82,-221.14 726.28,-229.44 782.28,-239.6"/>
+<polygon fill="black" stroke="black" points="781.71,-243.05 792.19,-241.5 783.03,-236.18 781.71,-243.05"/>
+</g>
+<!-- crosvm_openwrt -->
+<g id="node35" class="node">
+<title>crosvm_openwrt</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-156.87" rx="77.99" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-153.17" font-family="Times,serif" font-size="14.00">OpenWRT crosvm</text>
+</g>
+<!-- run_cvd&#45;&gt;crosvm_openwrt -->
+<g id="edge18" class="edge">
+<title>run_cvd&#45;&gt;crosvm_openwrt</title>
+<path fill="none" stroke="black" d="M487.65,-833.68C493.36,-734.56 522.87,-262.34 569.08,-218.87 610.41,-179.98 1022.53,-192.44 1078.63,-183.87 1096.13,-181.19 1114.92,-177.18 1132.05,-173.06"/>
+<polygon fill="black" stroke="black" points="1133.04,-176.42 1141.92,-170.63 1131.37,-169.62 1133.04,-176.42"/>
+</g>
+<!-- crosvm_openwrt_log_tee -->
+<g id="node36" class="node">
+<title>crosvm_openwrt_log_tee</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-156.87" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-153.17" font-family="Times,serif" font-size="14.00">log_tee</text>
+</g>
+<!-- run_cvd&#45;&gt;crosvm_openwrt_log_tee -->
+<g id="edge19" class="edge">
+<title>run_cvd&#45;&gt;crosvm_openwrt_log_tee</title>
+<path fill="none" stroke="black" d="M487.57,-833.87C493.02,-732.54 522.01,-236.02 569.08,-188.87 624.31,-133.54 723.59,-139.17 780.22,-148.09"/>
+<polygon fill="black" stroke="black" points="779.83,-151.58 790.27,-149.78 780.99,-144.67 779.83,-151.58"/>
+</g>
+<!-- wmediumd -->
+<g id="node37" class="node">
+<title>wmediumd</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-102.87" rx="51.19" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-99.17" font-family="Times,serif" font-size="14.00">wmediumd</text>
+</g>
+<!-- run_cvd&#45;&gt;wmediumd -->
+<g id="edge37" class="edge">
+<title>run_cvd&#45;&gt;wmediumd</title>
+<path fill="none" stroke="black" d="M487.14,-833.71C490.46,-721.76 510.72,-128.36 569.08,-75.87 736.37,74.59 1042.72,-37.24 1151.78,-84.49"/>
+<polygon fill="black" stroke="black" points="1150.54,-87.77 1161.11,-88.58 1153.35,-81.36 1150.54,-87.77"/>
+</g>
+<!-- wmediumd_log_tee -->
+<g id="node38" class="node">
+<title>wmediumd_log_tee</title>
+<ellipse fill="none" stroke="black" cx="823.86" cy="-102.87" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="823.86" y="-99.17" font-family="Times,serif" font-size="14.00">log_tee</text>
+</g>
+<!-- run_cvd&#45;&gt;wmediumd_log_tee -->
+<g id="edge38" class="edge">
+<title>run_cvd&#45;&gt;wmediumd_log_tee</title>
+<path fill="none" stroke="black" d="M487.37,-833.68C491.9,-726.58 517.42,-181.13 569.08,-129.87 624.59,-74.78 724.2,-82.82 780.71,-93.13"/>
+<polygon fill="black" stroke="black" points="780.26,-96.6 790.74,-95.06 781.58,-89.73 780.26,-96.6"/>
+</g>
+<!-- stop_cvd&#45;&gt;launcher_monitor_socket -->
+<g id="edge12" class="edge">
+<title>stop_cvd&#45;&gt;launcher_monitor_socket</title>
+<path fill="none" stroke="black" d="M201.74,-838.41C220.39,-844.26 242.66,-851.25 263.4,-857.75"/>
+<polygon fill="black" stroke="black" points="202.77,-835.06 192.18,-835.41 200.68,-841.74 202.77,-835.06"/>
+<polygon fill="black" stroke="black" points="262.53,-861.15 273.12,-860.8 264.63,-854.47 262.53,-861.15"/>
+</g>
+<!-- root_canal_log_tee&#45;&gt;root_canal -->
+<g id="edge30" class="edge">
+<title>root_canal_log_tee&#45;&gt;root_canal</title>
+<path fill="none" stroke="black" d="M870.72,-660.87C941.42,-660.87 1076.19,-660.87 1146.46,-660.87"/>
+<polygon fill="black" stroke="black" points="870.36,-657.37 860.36,-660.87 870.36,-664.37 870.36,-657.37"/>
+</g>
+<!-- crosvm_android -->
+<g id="node27" class="node">
+<title>crosvm_android</title>
+<ellipse fill="none" stroke="black" cx="1375.51" cy="-412.87" rx="68.79" ry="18"/>
+<text text-anchor="middle" x="1375.51" y="-409.17" font-family="Times,serif" font-size="14.00">Android crosvm</text>
+</g>
+<!-- crosvm_android_log_tee -->
+<g id="node28" class="node">
+<title>crosvm_android_log_tee</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-385.87" rx="36.29" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-382.17" font-family="Times,serif" font-size="14.00">log_tee</text>
+</g>
+<!-- crosvm_android_log_tee&#45;&gt;crosvm_android -->
+<g id="edge40" class="edge">
+<title>crosvm_android_log_tee&#45;&gt;crosvm_android</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1237.62,-392.43C1260.99,-395.92 1290.05,-400.26 1315.37,-404.04"/>
+<polygon fill="black" stroke="black" points="1238.06,-388.96 1227.65,-390.95 1237.02,-395.88 1238.06,-388.96"/>
+</g>
+<!-- crosvm_android_restarter -->
+<g id="node29" class="node">
+<title>crosvm_android_restarter</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-439.87" rx="70.39" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-436.17" font-family="Times,serif" font-size="14.00">process_restarter</text>
+</g>
+<!-- crosvm_android_restarter&#45;&gt;crosvm_android -->
+<g id="edge42" class="edge">
+<title>crosvm_android_restarter&#45;&gt;crosvm_android</title>
+<path fill="none" stroke="black" d="M1253.61,-430.91C1270.2,-428.44 1288.34,-425.73 1305.41,-423.18"/>
+<polygon fill="black" stroke="black" points="1305.94,-426.64 1315.31,-421.7 1304.9,-419.72 1305.94,-426.64"/>
+</g>
+<!-- gem5 -->
+<g id="node30" class="node">
+<title>gem5</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-493.87" rx="30.59" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-490.17" font-family="Times,serif" font-size="14.00">gem5</text>
+</g>
+<!-- qemu -->
+<g id="node31" class="node">
+<title>qemu</title>
+<ellipse fill="none" stroke="black" cx="1192.63" cy="-547.87" rx="37.09" ry="18"/>
+<text text-anchor="middle" x="1192.63" y="-544.17" font-family="Times,serif" font-size="14.00">QEMU</text>
+</g>
+<!-- vmm&#45;&gt;crosvm_android_log_tee -->
+<g id="edge43" class="edge">
+<title>vmm&#45;&gt;crosvm_android_log_tee</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M843.05,-488.51C899.46,-471.9 1072.43,-420.97 1151.47,-397.69"/>
+<polygon fill="black" stroke="black" points="1152.48,-401.04 1161.08,-394.86 1150.5,-394.33 1152.48,-401.04"/>
+</g>
+<!-- vmm&#45;&gt;crosvm_android_restarter -->
+<g id="edge41" class="edge">
+<title>vmm&#45;&gt;crosvm_android_restarter</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M846.16,-490.73C898.68,-483 1036.25,-462.74 1121.1,-450.25"/>
+<polygon fill="black" stroke="black" points="1121.79,-453.69 1131.17,-448.77 1120.77,-446.76 1121.79,-453.69"/>
+</g>
+<!-- vmm&#45;&gt;gem5 -->
+<g id="edge44" class="edge">
+<title>vmm&#45;&gt;gem5</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M850.94,-493.87C914.24,-493.87 1076.35,-493.87 1151.84,-493.87"/>
+<polygon fill="black" stroke="black" points="1151.89,-497.37 1161.89,-493.87 1151.89,-490.37 1151.89,-497.37"/>
+</g>
+<!-- vmm&#45;&gt;qemu -->
+<g id="edge45" class="edge">
+<title>vmm&#45;&gt;qemu</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M846.16,-497C904.4,-505.58 1067.18,-529.55 1146.73,-541.26"/>
+<polygon fill="black" stroke="black" points="1146.52,-544.76 1156.92,-542.76 1147.54,-537.84 1146.52,-544.76"/>
+</g>
+<!-- crosvm_openwrt_log_tee&#45;&gt;crosvm_openwrt -->
+<g id="edge20" class="edge">
+<title>crosvm_openwrt_log_tee&#45;&gt;crosvm_openwrt</title>
+<path fill="none" stroke="black" d="M870.44,-156.87C931.42,-156.87 1040.43,-156.87 1114.36,-156.87"/>
+<polygon fill="black" stroke="black" points="870.36,-153.37 860.36,-156.87 870.36,-160.37 870.36,-153.37"/>
+</g>
+<!-- wmediumd_log_tee&#45;&gt;wmediumd -->
+<g id="edge39" class="edge">
+<title>wmediumd_log_tee&#45;&gt;wmediumd</title>
+<path fill="none" stroke="black" d="M870.46,-102.87C939.3,-102.87 1069.3,-102.87 1140.99,-102.87"/>
+<polygon fill="black" stroke="black" points="870.36,-99.37 860.36,-102.87 870.36,-106.37 870.36,-99.37"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/run_cvd/launch.cc b/host/commands/run_cvd/launch.cc
deleted file mode 100644
index 235a3ce..0000000
--- a/host/commands/run_cvd/launch.cc
+++ /dev/null
@@ -1,820 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "host/commands/run_cvd/launch.h"
-
-#include <android-base/logging.h>
-
-#include <unordered_set>
-#include <utility>
-#include <vector>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/files.h"
-#include "common/libs/utils/network.h"
-#include "common/libs/utils/result.h"
-#include "common/libs/utils/subprocess.h"
-#include "host/commands/run_cvd/process_monitor.h"
-#include "host/commands/run_cvd/reporting.h"
-#include "host/commands/run_cvd/runner_defs.h"
-#include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/inject.h"
-#include "host/libs/config/known_paths.h"
-#include "host/libs/vm_manager/crosvm_builder.h"
-#include "host/libs/vm_manager/crosvm_manager.h"
-#include "host/libs/vm_manager/vm_manager.h"
-
-namespace cuttlefish {
-
-using vm_manager::VmManager;
-
-namespace {
-
-template <typename T>
-std::vector<T> single_element_emplace(T&& element) {
-  std::vector<T> vec;
-  vec.emplace_back(std::move(element));
-  return vec;
-}
-
-}  // namespace
-
-class KernelLogMonitor : public CommandSource,
-                         public KernelLogPipeProvider,
-                         public DiagnosticInformation {
- public:
-  INJECT(KernelLogMonitor(const CuttlefishConfig::InstanceSpecific& instance))
-      : instance_(instance) {}
-
-  // DiagnosticInformation
-  std::vector<std::string> Diagnostics() const override {
-    return {"Kernel log: " + instance_.PerInstancePath("kernel.log")};
-  }
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command command(KernelLogMonitorBinary());
-    command.AddParameter("-log_pipe_fd=", fifo_);
-
-    if (!event_pipe_write_ends_.empty()) {
-      command.AddParameter("-subscriber_fds=");
-      for (size_t i = 0; i < event_pipe_write_ends_.size(); i++) {
-        if (i > 0) {
-          command.AppendToLastParameter(",");
-        }
-        command.AppendToLastParameter(event_pipe_write_ends_[i]);
-      }
-    }
-
-    return single_element_emplace(std::move(command));
-  }
-
-  // KernelLogPipeProvider
-  SharedFD KernelLogPipe() override {
-    CHECK(!event_pipe_read_ends_.empty()) << "No more kernel pipes left";
-    SharedFD ret = event_pipe_read_ends_.back();
-    event_pipe_read_ends_.pop_back();
-    return ret;
-  }
-
- private:
-  // SetupFeature
-  bool Enabled() const override { return true; }
-  std::string Name() const override { return "KernelLogMonitor"; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    auto log_name = instance_.kernel_log_pipe_name();
-    CF_EXPECT(mkfifo(log_name.c_str(), 0600) == 0,
-              "Unable to create named pipe at " << log_name << ": "
-                                                << strerror(errno));
-
-    // Open the pipe here (from the launcher) to ensure the pipe is not deleted
-    // due to the usage counters in the kernel reaching zero. If this is not
-    // done and the kernel_log_monitor crashes for some reason the VMM may get
-    // SIGPIPE.
-    fifo_ = SharedFD::Open(log_name, O_RDWR);
-    CF_EXPECT(fifo_->IsOpen(),
-              "Unable to open \"" << log_name << "\": " << fifo_->StrError());
-
-    // TODO(schuffelen): Find a way to calculate this dynamically.
-    int number_of_event_pipes = 4;
-    if (number_of_event_pipes > 0) {
-      for (unsigned int i = 0; i < number_of_event_pipes; ++i) {
-        SharedFD event_pipe_write_end, event_pipe_read_end;
-        CF_EXPECT(SharedFD::Pipe(&event_pipe_read_end, &event_pipe_write_end),
-                  "Failed creating kernel log pipe: " << strerror(errno));
-        event_pipe_write_ends_.push_back(event_pipe_write_end);
-        event_pipe_read_ends_.push_back(event_pipe_read_end);
-      }
-    }
-    return {};
-  }
-
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD fifo_;
-  std::vector<SharedFD> event_pipe_write_ends_;
-  std::vector<SharedFD> event_pipe_read_ends_;
-};
-
-class LogTeeCreator {
- public:
-  INJECT(LogTeeCreator(const CuttlefishConfig::InstanceSpecific& instance))
-      : instance_(instance) {}
-
-  Command CreateLogTee(Command& cmd, const std::string& process_name) {
-    auto name_with_ext = process_name + "_logs.fifo";
-    auto logs_path = instance_.PerInstanceInternalPath(name_with_ext.c_str());
-    auto logs = SharedFD::Fifo(logs_path, 0666);
-    if (!logs->IsOpen()) {
-      LOG(FATAL) << "Failed to create fifo for " << process_name
-                 << " output: " << logs->StrError();
-    }
-
-    cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, logs);
-    cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, logs);
-
-    return Command(HostBinaryPath("log_tee"))
-        .AddParameter("--process_name=", process_name)
-        .AddParameter("--log_fd_in=", logs);
-  }
-
- private:
-  const CuttlefishConfig::InstanceSpecific& instance_;
-};
-
-class RootCanal : public CommandSource {
- public:
-  INJECT(RootCanal(const CuttlefishConfig& config,
-                   const CuttlefishConfig::InstanceSpecific& instance,
-                   LogTeeCreator& log_tee))
-      : config_(config), instance_(instance), log_tee_(log_tee) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    if (!Enabled()) {
-      return {};
-    }
-    Command command(RootCanalBinary());
-
-    // Test port
-    command.AddParameter(config_.rootcanal_test_port());
-    // HCI server port
-    command.AddParameter(config_.rootcanal_hci_port());
-    // Link server port
-    command.AddParameter(config_.rootcanal_link_port());
-    // Bluetooth controller properties file
-    command.AddParameter("--controller_properties_file=",
-                         config_.rootcanal_config_file());
-    // Default commands file
-    command.AddParameter("--default_commands_file=",
-                         config_.rootcanal_default_commands_file());
-
-    std::vector<Command> commands;
-    commands.emplace_back(log_tee_.CreateLogTee(command, "rootcanal"));
-    commands.emplace_back(std::move(command));
-    return commands;
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "RootCanal"; }
-  bool Enabled() const override {
-    return config_.enable_host_bluetooth() && instance_.start_rootcanal();
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override { return true; }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  LogTeeCreator& log_tee_;
-};
-
-class LogcatReceiver : public CommandSource, public DiagnosticInformation {
- public:
-  INJECT(LogcatReceiver(const CuttlefishConfig::InstanceSpecific& instance))
-      : instance_(instance) {}
-  // DiagnosticInformation
-  std::vector<std::string> Diagnostics() const override {
-    return {"Logcat output: " + instance_.logcat_path()};
-  }
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    return single_element_emplace(
-        Command(LogcatReceiverBinary()).AddParameter("-log_pipe_fd=", pipe_));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "LogcatReceiver"; }
-  bool Enabled() const override { return true; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() {
-    auto log_name = instance_.logcat_pipe_name();
-    CF_EXPECT(mkfifo(log_name.c_str(), 0600) == 0,
-              "Unable to create named pipe at " << log_name << ": "
-                                                << strerror(errno));
-    // Open the pipe here (from the launcher) to ensure the pipe is not deleted
-    // due to the usage counters in the kernel reaching zero. If this is not
-    // done and the logcat_receiver crashes for some reason the VMM may get
-    // SIGPIPE.
-    pipe_ = SharedFD::Open(log_name.c_str(), O_RDWR);
-    CF_EXPECT(pipe_->IsOpen(),
-              "Can't open \"" << log_name << "\": " << pipe_->StrError());
-    return {};
-  }
-
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD pipe_;
-};
-
-class ConfigServer : public CommandSource {
- public:
-  INJECT(ConfigServer(const CuttlefishConfig::InstanceSpecific& instance))
-      : instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    return single_element_emplace(
-        Command(ConfigServerBinary()).AddParameter("-server_fd=", socket_));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "ConfigServer"; }
-  bool Enabled() const override { return true; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    auto port = instance_.config_server_port();
-    socket_ = SharedFD::VsockServer(port, SOCK_STREAM);
-    CF_EXPECT(socket_->IsOpen(),
-              "Unable to create configuration server socket: "
-                  << socket_->StrError());
-    return {};
-  }
-
- private:
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD socket_;
-};
-
-class TombstoneReceiver : public CommandSource {
- public:
-  INJECT(TombstoneReceiver(const CuttlefishConfig::InstanceSpecific& instance))
-      : instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    return single_element_emplace(
-        Command(TombstoneReceiverBinary())
-            .AddParameter("-server_fd=", socket_)
-            .AddParameter("-tombstone_dir=", tombstone_dir_));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "TombstoneReceiver"; }
-  bool Enabled() const override { return true; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    tombstone_dir_ = instance_.PerInstancePath("tombstones");
-    if (!DirectoryExists(tombstone_dir_)) {
-      LOG(DEBUG) << "Setting up " << tombstone_dir_;
-      CF_EXPECT(mkdir(tombstone_dir_.c_str(),
-                      S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) == 0,
-                "Failed to create tombstone directory: "
-                    << tombstone_dir_ << ". Error: " << strerror(errno));
-    }
-
-    auto port = instance_.tombstone_receiver_port();
-    socket_ = SharedFD::VsockServer(port, SOCK_STREAM);
-    CF_EXPECT(socket_->IsOpen(), "Unable to create tombstone server socket: "
-                                     << socket_->StrError());
-    return {};
-  }
-
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD socket_;
-  std::string tombstone_dir_;
-};
-
-class MetricsService : public CommandSource {
- public:
-  INJECT(MetricsService(const CuttlefishConfig& config)) : config_(config) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    return single_element_emplace(Command(MetricsBinary()));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "MetricsService"; }
-  bool Enabled() const override {
-    return config_.enable_metrics() == CuttlefishConfig::kYes;
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override { return true; }
-
- private:
-  const CuttlefishConfig& config_;
-};
-
-class GnssGrpcProxyServer : public CommandSource {
- public:
-  INJECT(
-      GnssGrpcProxyServer(const CuttlefishConfig& config,
-                          const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command gnss_grpc_proxy_cmd(GnssGrpcProxyBinary());
-    const unsigned gnss_grpc_proxy_server_port =
-        instance_.gnss_grpc_proxy_server_port();
-    gnss_grpc_proxy_cmd.AddParameter("--gnss_in_fd=", gnss_grpc_proxy_in_wr_);
-    gnss_grpc_proxy_cmd.AddParameter("--gnss_out_fd=", gnss_grpc_proxy_out_rd_);
-    gnss_grpc_proxy_cmd.AddParameter("--gnss_grpc_port=",
-                                     gnss_grpc_proxy_server_port);
-    if (!instance_.gnss_file_path().empty()) {
-      // If path is provided, proxy will start as local mode.
-      gnss_grpc_proxy_cmd.AddParameter("--gnss_file_path=",
-                                       instance_.gnss_file_path());
-    }
-    return single_element_emplace(std::move(gnss_grpc_proxy_cmd));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "GnssGrpcProxyServer"; }
-  bool Enabled() const override {
-    return config_.enable_gnss_grpc_proxy() &&
-           FileExists(GnssGrpcProxyBinary());
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    std::vector<SharedFD> fifos;
-    std::vector<std::string> fifo_paths = {
-        instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.in"),
-        instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.out"),
-        instance_.PerInstanceInternalPath("locationhvc_fifo_vm.in"),
-        instance_.PerInstanceInternalPath("locationhvc_fifo_vm.out"),
-    };
-    for (const auto& path : fifo_paths) {
-      unlink(path.c_str());
-      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
-      auto fd = SharedFD::Open(path, O_RDWR);
-      CF_EXPECT(fd->IsOpen(),
-                "Could not open " << path << ": " << fd->StrError());
-      fifos.push_back(fd);
-    }
-
-    gnss_grpc_proxy_in_wr_ = fifos[0];
-    gnss_grpc_proxy_out_rd_ = fifos[1];
-    return {};
-  }
-
- private:
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD gnss_grpc_proxy_in_wr_;
-  SharedFD gnss_grpc_proxy_out_rd_;
-};
-
-class BluetoothConnector : public CommandSource {
- public:
-  INJECT(BluetoothConnector(const CuttlefishConfig& config,
-                            const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command command(HostBinaryPath("bt_connector"));
-    command.AddParameter("-bt_out=", fifos_[0]);
-    command.AddParameter("-bt_in=", fifos_[1]);
-    command.AddParameter("-hci_port=", config_.rootcanal_hci_port());
-    command.AddParameter("-link_port=", config_.rootcanal_link_port());
-    command.AddParameter("-test_port=", config_.rootcanal_test_port());
-    return single_element_emplace(std::move(command));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "BluetoothConnector"; }
-  bool Enabled() const override { return config_.enable_host_bluetooth(); }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() {
-    std::vector<std::string> fifo_paths = {
-        instance_.PerInstanceInternalPath("bt_fifo_vm.in"),
-        instance_.PerInstanceInternalPath("bt_fifo_vm.out"),
-    };
-    for (const auto& path : fifo_paths) {
-      unlink(path.c_str());
-      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
-      auto fd = SharedFD::Open(path, O_RDWR);
-      CF_EXPECT(fd->IsOpen(),
-                "Could not open " << path << ": " << fd->StrError());
-      fifos_.push_back(fd);
-    }
-    return {};
-  }
-
- private:
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  std::vector<SharedFD> fifos_;
-};
-
-class SecureEnvironment : public CommandSource {
- public:
-  INJECT(SecureEnvironment(const CuttlefishConfig& config,
-                           const CuttlefishConfig::InstanceSpecific& instance,
-                           KernelLogPipeProvider& kernel_log_pipe_provider))
-      : config_(config),
-        instance_(instance),
-        kernel_log_pipe_provider_(kernel_log_pipe_provider) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command command(HostBinaryPath("secure_env"));
-    command.AddParameter("-confui_server_fd=", confui_server_fd_);
-    command.AddParameter("-keymaster_fd_out=", fifos_[0]);
-    command.AddParameter("-keymaster_fd_in=", fifos_[1]);
-    command.AddParameter("-gatekeeper_fd_out=", fifos_[2]);
-    command.AddParameter("-gatekeeper_fd_in=", fifos_[3]);
-
-    const auto& secure_hals = config_.secure_hals();
-    bool secure_keymint = secure_hals.count(SecureHal::Keymint) > 0;
-    command.AddParameter("-keymint_impl=", secure_keymint ? "tpm" : "software");
-    bool secure_gatekeeper = secure_hals.count(SecureHal::Gatekeeper) > 0;
-    auto gatekeeper_impl = secure_gatekeeper ? "tpm" : "software";
-    command.AddParameter("-gatekeeper_impl=", gatekeeper_impl);
-
-    command.AddParameter("-kernel_events_fd=", kernel_log_pipe_);
-
-    return single_element_emplace(std::move(command));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "SecureEnvironment"; }
-  bool Enabled() const override { return true; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override {
-    return {&kernel_log_pipe_provider_};
-  }
-  Result<void> ResultSetup() override {
-    std::vector<std::string> fifo_paths = {
-        instance_.PerInstanceInternalPath("keymaster_fifo_vm.in"),
-        instance_.PerInstanceInternalPath("keymaster_fifo_vm.out"),
-        instance_.PerInstanceInternalPath("gatekeeper_fifo_vm.in"),
-        instance_.PerInstanceInternalPath("gatekeeper_fifo_vm.out"),
-    };
-    std::vector<SharedFD> fifos;
-    for (const auto& path : fifo_paths) {
-      unlink(path.c_str());
-      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
-      auto fd = SharedFD::Open(path, O_RDWR);
-      CF_EXPECT(fd->IsOpen(),
-                "Could not open " << path << ": " << fd->StrError());
-      fifos_.push_back(fd);
-    }
-
-    auto confui_socket_path =
-        instance_.PerInstanceInternalPath("confui_sign.sock");
-    confui_server_fd_ = SharedFD::SocketLocalServer(confui_socket_path, false,
-                                                    SOCK_STREAM, 0600);
-    CF_EXPECT(confui_server_fd_->IsOpen(),
-              "Could not open " << confui_socket_path << ": "
-                                << confui_server_fd_->StrError());
-    kernel_log_pipe_ = kernel_log_pipe_provider_.KernelLogPipe();
-
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD confui_server_fd_;
-  std::vector<SharedFD> fifos_;
-  KernelLogPipeProvider& kernel_log_pipe_provider_;
-  SharedFD kernel_log_pipe_;
-};
-
-class VehicleHalServer : public CommandSource {
- public:
-  INJECT(VehicleHalServer(const CuttlefishConfig& config,
-                          const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command grpc_server(VehicleHalGrpcServerBinary());
-
-    const unsigned vhal_server_cid = 2;
-    const unsigned vhal_server_port = instance_.vehicle_hal_server_port();
-    const std::string vhal_server_power_state_file =
-        AbsolutePath(instance_.PerInstancePath("power_state"));
-    const std::string vhal_server_power_state_socket =
-        AbsolutePath(instance_.PerInstancePath("power_state_socket"));
-
-    grpc_server.AddParameter("--server_cid=", vhal_server_cid);
-    grpc_server.AddParameter("--server_port=", vhal_server_port);
-    grpc_server.AddParameter("--power_state_file=",
-                             vhal_server_power_state_file);
-    grpc_server.AddParameter("--power_state_socket=",
-                             vhal_server_power_state_socket);
-    return single_element_emplace(std::move(grpc_server));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "VehicleHalServer"; }
-  bool Enabled() const override {
-    return config_.enable_vehicle_hal_grpc_server() &&
-           FileExists(VehicleHalGrpcServerBinary());
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override { return true; }
-
- private:
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-};
-
-class ConsoleForwarder : public CommandSource, public DiagnosticInformation {
- public:
-  INJECT(ConsoleForwarder(const CuttlefishConfig& config,
-                          const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-  // DiagnosticInformation
-  std::vector<std::string> Diagnostics() const override {
-    if (Enabled()) {
-      return {"To access the console run: screen " + instance_.console_path()};
-    } else {
-      return {"Serial console is disabled; use -console=true to enable it."};
-    }
-  }
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command console_forwarder_cmd(ConsoleForwarderBinary());
-
-    console_forwarder_cmd.AddParameter("--console_in_fd=",
-                                       console_forwarder_in_wr_);
-    console_forwarder_cmd.AddParameter("--console_out_fd=",
-                                       console_forwarder_out_rd_);
-    return single_element_emplace(std::move(console_forwarder_cmd));
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "ConsoleForwarder"; }
-  bool Enabled() const override { return config_.console(); }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    auto console_in_pipe_name = instance_.console_in_pipe_name();
-    CF_EXPECT(
-        mkfifo(console_in_pipe_name.c_str(), 0600) == 0,
-        "Failed to create console input fifo for crosvm: " << strerror(errno));
-
-    auto console_out_pipe_name = instance_.console_out_pipe_name();
-    CF_EXPECT(
-        mkfifo(console_out_pipe_name.c_str(), 0660) == 0,
-        "Failed to create console output fifo for crosvm: " << strerror(errno));
-
-    // These fds will only be read from or written to, but open them with
-    // read and write access to keep them open in case the subprocesses exit
-    console_forwarder_in_wr_ =
-        SharedFD::Open(console_in_pipe_name.c_str(), O_RDWR);
-    CF_EXPECT(console_forwarder_in_wr_->IsOpen(),
-              "Failed to open console_forwarder input fifo for writes: "
-                  << console_forwarder_in_wr_->StrError());
-
-    console_forwarder_out_rd_ =
-        SharedFD::Open(console_out_pipe_name.c_str(), O_RDWR);
-    CF_EXPECT(console_forwarder_out_rd_->IsOpen(),
-              "Failed to open console_forwarder output fifo for reads: "
-                  << console_forwarder_out_rd_->StrError());
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  SharedFD console_forwarder_in_wr_;
-  SharedFD console_forwarder_out_rd_;
-};
-
-class WmediumdServer : public CommandSource {
- public:
-  INJECT(WmediumdServer(const CuttlefishConfig& config,
-                        const CuttlefishConfig::InstanceSpecific& instance,
-                        LogTeeCreator& log_tee))
-      : config_(config), instance_(instance), log_tee_(log_tee) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command cmd(WmediumdBinary());
-    cmd.AddParameter("-u", config_.vhost_user_mac80211_hwsim());
-    cmd.AddParameter("-a", config_.wmediumd_api_server_socket());
-    cmd.AddParameter("-c", config_path_);
-
-    std::vector<Command> commands;
-    commands.emplace_back(log_tee_.CreateLogTee(cmd, "wmediumd"));
-    commands.emplace_back(std::move(cmd));
-    return commands;
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "WmediumdServer"; }
-  bool Enabled() const override {
-#ifndef ENFORCE_MAC80211_HWSIM
-    return false;
-#else
-    return instance_.start_wmediumd();
-#endif
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    // If wmediumd configuration is given, use it
-    if (!config_.wmediumd_config().empty()) {
-      config_path_ = config_.wmediumd_config();
-      return {};
-    }
-    // Otherwise, generate wmediumd configuration using the current wifi mac
-    // prefix before start
-    config_path_ = instance_.PerInstanceInternalPath("wmediumd.cfg");
-    Command gen_config_cmd(WmediumdGenConfigBinary());
-    gen_config_cmd.AddParameter("-o", config_path_);
-    gen_config_cmd.AddParameter("-p", instance_.wifi_mac_prefix());
-
-    int success = gen_config_cmd.Start().Wait();
-    CF_EXPECT(success == 0, "Unable to run " << gen_config_cmd.Executable()
-                                             << ". Exited with status "
-                                             << success);
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  LogTeeCreator& log_tee_;
-  std::string config_path_;
-};
-
-class VmmCommands : public CommandSource {
- public:
-  INJECT(VmmCommands(const CuttlefishConfig& config, VmManager& vmm))
-      : config_(config), vmm_(vmm) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    return vmm_.StartCommands(config_);
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "VirtualMachineManager"; }
-  bool Enabled() const override { return true; }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override { return true; }
-
-  const CuttlefishConfig& config_;
-  VmManager& vmm_;
-};
-
-class OpenWrt : public CommandSource {
- public:
-  INJECT(OpenWrt(const CuttlefishConfig& config,
-                 const CuttlefishConfig::InstanceSpecific& instance,
-                 LogTeeCreator& log_tee))
-      : config_(config), instance_(instance), log_tee_(log_tee) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    constexpr auto crosvm_for_ap_socket = "ap_control.sock";
-
-    CrosvmBuilder ap_cmd;
-    ap_cmd.SetBinary(config_.crosvm_binary());
-    ap_cmd.AddControlSocket(
-        instance_.PerInstanceInternalPath(crosvm_for_ap_socket));
-
-    if (!config_.vhost_user_mac80211_hwsim().empty()) {
-      ap_cmd.Cmd().AddParameter("--vhost-user-mac80211-hwsim=",
-                                config_.vhost_user_mac80211_hwsim());
-    }
-    SharedFD wifi_tap = ap_cmd.AddTap(instance_.wifi_tap_name());
-    // Only run the leases workaround if we are not using the new network
-    // bridge architecture - in that case, we have a wider DHCP address
-    // space and stale leases should be much less of an issue
-    if (!FileExists("/var/run/cuttlefish-dnsmasq-cvd-wbr.leases") &&
-        wifi_tap->IsOpen()) {
-      // TODO(schuffelen): QEMU also needs this and this is not the best place
-      // for this code. Find a better place to put it.
-      auto lease_file =
-          ForCurrentInstance("/var/run/cuttlefish-dnsmasq-cvd-wbr-") +
-          ".leases";
-      std::uint8_t dhcp_server_ip[] = {
-          192, 168, 96, (std::uint8_t)(ForCurrentInstance(1) * 4 - 3)};
-      if (!ReleaseDhcpLeases(lease_file, wifi_tap, dhcp_server_ip)) {
-        LOG(ERROR)
-            << "Failed to release wifi DHCP leases. Connecting to the wifi "
-            << "network may not work.";
-      }
-    }
-    if (config_.enable_sandbox()) {
-      ap_cmd.Cmd().AddParameter("--seccomp-policy-dir=",
-                                config_.seccomp_policy_dir());
-    } else {
-      ap_cmd.Cmd().AddParameter("--disable-sandbox");
-    }
-    ap_cmd.Cmd().AddParameter("--rwdisk=",
-                              instance_.PerInstancePath("ap_overlay.img"));
-    ap_cmd.Cmd().AddParameter(
-        "--disk=", instance_.PerInstancePath("persistent_composite.img"));
-    ap_cmd.Cmd().AddParameter("--params=\"root=" + config_.ap_image_dev_path() +
-                              "\"");
-
-    auto kernel_logs_path = instance_.PerInstanceLogPath("crosvm_openwrt.log");
-    ap_cmd.AddSerialConsoleReadOnly(kernel_logs_path);
-
-    ap_cmd.Cmd().AddParameter(config_.ap_kernel_image());
-
-    std::vector<Command> commands;
-    commands.emplace_back(log_tee_.CreateLogTee(ap_cmd.Cmd(), "openwrt"));
-    commands.emplace_back(std::move(ap_cmd.Cmd()));
-    return commands;
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "OpenWrt"; }
-  bool Enabled() const override {
-#ifndef ENFORCE_MAC80211_HWSIM
-    return false;
-#else
-    return instance_.start_ap() &&
-           config_.vm_manager() == vm_manager::CrosvmManager::name();
-#endif
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override { return true; }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  LogTeeCreator& log_tee_;
-};
-
-using PublicDeps = fruit::Required<const CuttlefishConfig, VmManager,
-                                   const CuttlefishConfig::InstanceSpecific>;
-fruit::Component<PublicDeps, KernelLogPipeProvider> launchComponent() {
-  using InternalDeps = fruit::Required<const CuttlefishConfig, VmManager,
-                                       const CuttlefishConfig::InstanceSpecific,
-                                       KernelLogPipeProvider>;
-  using Multi = Multibindings<InternalDeps>;
-  using Bases =
-      Multi::Bases<CommandSource, DiagnosticInformation, SetupFeature>;
-  return fruit::createComponent()
-      .bind<KernelLogPipeProvider, KernelLogMonitor>()
-      .install(Bases::Impls<BluetoothConnector>)
-      .install(Bases::Impls<ConfigServer>)
-      .install(Bases::Impls<ConsoleForwarder>)
-      .install(Bases::Impls<GnssGrpcProxyServer>)
-      .install(Bases::Impls<KernelLogMonitor>)
-      .install(Bases::Impls<LogcatReceiver>)
-      .install(Bases::Impls<MetricsService>)
-      .install(Bases::Impls<RootCanal>)
-      .install(Bases::Impls<SecureEnvironment>)
-      .install(Bases::Impls<TombstoneReceiver>)
-      .install(Bases::Impls<VehicleHalServer>)
-      .install(Bases::Impls<VmmCommands>)
-      .install(Bases::Impls<WmediumdServer>)
-      .install(Bases::Impls<OpenWrt>);
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch.h b/host/commands/run_cvd/launch.h
deleted file mode 100644
index ead43bc..0000000
--- a/host/commands/run_cvd/launch.h
+++ /dev/null
@@ -1,48 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#pragma once
-
-#include <fruit/fruit.h>
-
-#include <string>
-#include <vector>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/subprocess.h"
-#include "host/libs/config/command_source.h"
-#include "host/libs/config/custom_actions.h"
-#include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/feature.h"
-#include "host/libs/config/kernel_log_pipe_provider.h"
-#include "host/libs/vm_manager/vm_manager.h"
-
-namespace cuttlefish {
-
-fruit::Component<fruit::Required<const CuttlefishConfig, vm_manager::VmManager,
-                                 const CuttlefishConfig::InstanceSpecific>,
-                 KernelLogPipeProvider>
-launchComponent();
-
-fruit::Component<fruit::Required<const CuttlefishConfig,
-                                 const CuttlefishConfig::InstanceSpecific>>
-launchModemComponent();
-
-fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
-                                 const CuttlefishConfig::InstanceSpecific,
-                                 const CustomActionConfigProvider>>
-launchStreamerComponent();
-
-} // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/bluetooth_connector.cpp b/host/commands/run_cvd/launch/bluetooth_connector.cpp
new file mode 100644
index 0000000..64b2e71
--- /dev/null
+++ b/host/commands/run_cvd/launch/bluetooth_connector.cpp
@@ -0,0 +1,96 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+// Copied from net/bluetooth/hci.h
+#define HCI_MAX_ACL_SIZE 1024
+#define HCI_MAX_FRAME_SIZE (HCI_MAX_ACL_SIZE + 4)
+
+// Include H4 header byte, and reserve more buffer size in the case of excess
+// packet.
+constexpr const size_t kBufferSize = (HCI_MAX_FRAME_SIZE + 1) * 2;
+
+namespace cuttlefish {
+namespace {
+
+class BluetoothConnector : public CommandSource {
+ public:
+  INJECT(BluetoothConnector(const CuttlefishConfig& config,
+                            const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(TcpConnectorBinary());
+    command.AddParameter("-fifo_out=", fifos_[0]);
+    command.AddParameter("-fifo_in=", fifos_[1]);
+    command.AddParameter("-data_port=", config_.rootcanal_hci_port());
+    command.AddParameter("-buffer_size=", kBufferSize);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "BluetoothConnector"; }
+  bool Enabled() const override { return config_.enable_host_bluetooth_connector(); }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() {
+    std::vector<std::string> fifo_paths = {
+        instance_.PerInstanceInternalPath("bt_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("bt_fifo_vm.out"),
+    };
+    for (const auto& path : fifo_paths) {
+      unlink(path.c_str());
+      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
+      auto fd = SharedFD::Open(path, O_RDWR);
+      CF_EXPECT(fd->IsOpen(),
+                "Could not open " << path << ": " << fd->StrError());
+      fifos_.push_back(fd);
+    }
+    return {};
+  }
+
+ private:
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  std::vector<SharedFD> fifos_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+BluetoothConnectorComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, BluetoothConnector>()
+      .addMultibinding<SetupFeature, BluetoothConnector>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/config_server.cpp b/host/commands/run_cvd/launch/config_server.cpp
new file mode 100644
index 0000000..061f2eb
--- /dev/null
+++ b/host/commands/run_cvd/launch/config_server.cpp
@@ -0,0 +1,75 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class ConfigServer : public CommandSource {
+ public:
+  INJECT(ConfigServer(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(ConfigServerBinary());
+    command.AddParameter("-server_fd=", socket_);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "ConfigServer"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    auto port = instance_.config_server_port();
+    socket_ = SharedFD::VsockServer(port, SOCK_STREAM);
+    CF_EXPECT(socket_->IsOpen(),
+              "Unable to create configuration server socket: "
+                  << socket_->StrError());
+    return {};
+  }
+
+ private:
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD socket_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+ConfigServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, ConfigServer>()
+      .addMultibinding<SetupFeature, ConfigServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/console_forwarder.cpp b/host/commands/run_cvd/launch/console_forwarder.cpp
new file mode 100644
index 0000000..3fcd3a8
--- /dev/null
+++ b/host/commands/run_cvd/launch/console_forwarder.cpp
@@ -0,0 +1,105 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/reporting.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class ConsoleForwarder : public CommandSource, public DiagnosticInformation {
+ public:
+  INJECT(ConsoleForwarder(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+  // DiagnosticInformation
+  std::vector<std::string> Diagnostics() const override {
+    if (Enabled()) {
+      return {"To access the console run: screen " + instance_.console_path()};
+    } else {
+      return {"Serial console is disabled; use -console=true to enable it."};
+    }
+  }
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command console_forwarder_cmd(ConsoleForwarderBinary());
+    console_forwarder_cmd.AddParameter("--console_in_fd=",
+                                       console_forwarder_in_wr_);
+    console_forwarder_cmd.AddParameter("--console_out_fd=",
+                                       console_forwarder_out_rd_);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(console_forwarder_cmd));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "ConsoleForwarder"; }
+  bool Enabled() const override { return instance_.console(); }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    auto console_in_pipe_name = instance_.console_in_pipe_name();
+    CF_EXPECT(
+        mkfifo(console_in_pipe_name.c_str(), 0600) == 0,
+        "Failed to create console input fifo for crosvm: " << strerror(errno));
+
+    auto console_out_pipe_name = instance_.console_out_pipe_name();
+    CF_EXPECT(
+        mkfifo(console_out_pipe_name.c_str(), 0660) == 0,
+        "Failed to create console output fifo for crosvm: " << strerror(errno));
+
+    // These fds will only be read from or written to, but open them with
+    // read and write access to keep them open in case the subprocesses exit
+    console_forwarder_in_wr_ =
+        SharedFD::Open(console_in_pipe_name.c_str(), O_RDWR);
+    CF_EXPECT(console_forwarder_in_wr_->IsOpen(),
+              "Failed to open console_forwarder input fifo for writes: "
+                  << console_forwarder_in_wr_->StrError());
+
+    console_forwarder_out_rd_ =
+        SharedFD::Open(console_out_pipe_name.c_str(), O_RDWR);
+    CF_EXPECT(console_forwarder_out_rd_->IsOpen(),
+              "Failed to open console_forwarder output fifo for reads: "
+                  << console_forwarder_out_rd_->StrError());
+    return {};
+  }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD console_forwarder_in_wr_;
+  SharedFD console_forwarder_out_rd_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+ConsoleForwarderComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, ConsoleForwarder>()
+      .addMultibinding<SetupFeature, ConsoleForwarder>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/echo_server.cpp b/host/commands/run_cvd/launch/echo_server.cpp
new file mode 100644
index 0000000..fa5bbb6
--- /dev/null
+++ b/host/commands/run_cvd/launch/echo_server.cpp
@@ -0,0 +1,67 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/launch/grpc_socket_creator.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class EchoServer : public CommandSource {
+ public:
+  INJECT(EchoServer(GrpcSocketCreator& grpc_socket))
+      : grpc_socket_(grpc_socket) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(EchoServerBinary());
+    command.AddParameter("--grpc_uds_path=",
+                         grpc_socket_.CreateGrpcSocket(Name()));
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "EchoServer"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  GrpcSocketCreator& grpc_socket_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<GrpcSocketCreator>> EchoServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, EchoServer>()
+      .addMultibinding<SetupFeature, EchoServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/gnss_grpc_proxy.cpp b/host/commands/run_cvd/launch/gnss_grpc_proxy.cpp
new file mode 100644
index 0000000..20403e7
--- /dev/null
+++ b/host/commands/run_cvd/launch/gnss_grpc_proxy.cpp
@@ -0,0 +1,120 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class GnssGrpcProxyServer : public CommandSource {
+ public:
+  INJECT(GnssGrpcProxyServer(const CuttlefishConfig::InstanceSpecific& instance,
+                             GrpcSocketCreator& grpc_socket))
+      : instance_(instance), grpc_socket_(grpc_socket) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command gnss_grpc_proxy_cmd(GnssGrpcProxyBinary());
+    const unsigned gnss_grpc_proxy_server_port =
+        instance_.gnss_grpc_proxy_server_port();
+    gnss_grpc_proxy_cmd.AddParameter("--gnss_in_fd=", gnss_grpc_proxy_in_wr_);
+    gnss_grpc_proxy_cmd.AddParameter("--gnss_out_fd=", gnss_grpc_proxy_out_rd_);
+    gnss_grpc_proxy_cmd.AddParameter("--fixed_location_in_fd=",
+                                     fixed_location_grpc_proxy_in_wr_);
+    gnss_grpc_proxy_cmd.AddParameter("--fixed_location_out_fd=",
+                                     fixed_location_grpc_proxy_out_rd_);
+    gnss_grpc_proxy_cmd.AddParameter("--gnss_grpc_port=",
+                                     gnss_grpc_proxy_server_port);
+    gnss_grpc_proxy_cmd.AddParameter("--gnss_grpc_socket=",
+                                     grpc_socket_.CreateGrpcSocket(Name()));
+    if (!instance_.gnss_file_path().empty()) {
+      // If path is provided, proxy will start as local mode.
+      gnss_grpc_proxy_cmd.AddParameter("--gnss_file_path=",
+                                       instance_.gnss_file_path());
+    }
+    if (!instance_.fixed_location_file_path().empty()) {
+      gnss_grpc_proxy_cmd.AddParameter("--fixed_location_file_path=",
+                                       instance_.fixed_location_file_path());
+    }
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(gnss_grpc_proxy_cmd));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "GnssGrpcProxyServer"; }
+  bool Enabled() const override {
+    return instance_.enable_gnss_grpc_proxy() &&
+           FileExists(GnssGrpcProxyBinary());
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    std::vector<SharedFD> fifos;
+    std::vector<std::string> fifo_paths = {
+        instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.out"),
+        instance_.PerInstanceInternalPath("locationhvc_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("locationhvc_fifo_vm.out"),
+    };
+    for (const auto& path : fifo_paths) {
+      unlink(path.c_str());
+      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
+      auto fd = SharedFD::Open(path, O_RDWR);
+      CF_EXPECT(fd->IsOpen(),
+                "Could not open " << path << ": " << fd->StrError());
+      fifos.push_back(fd);
+    }
+
+    gnss_grpc_proxy_in_wr_ = fifos[0];
+    gnss_grpc_proxy_out_rd_ = fifos[1];
+    fixed_location_grpc_proxy_in_wr_ = fifos[2];
+    fixed_location_grpc_proxy_out_rd_ = fifos[3];
+    return {};
+  }
+
+ private:
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD gnss_grpc_proxy_in_wr_;
+  SharedFD gnss_grpc_proxy_out_rd_;
+  SharedFD fixed_location_grpc_proxy_in_wr_;
+  SharedFD fixed_location_grpc_proxy_out_rd_;
+  GrpcSocketCreator& grpc_socket_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 GrpcSocketCreator>>
+GnssGrpcProxyServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, GnssGrpcProxyServer>()
+      .addMultibinding<SetupFeature, GnssGrpcProxyServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/grpc_socket_creator.cpp b/host/commands/run_cvd/launch/grpc_socket_creator.cpp
new file mode 100644
index 0000000..8e67e9c
--- /dev/null
+++ b/host/commands/run_cvd/launch/grpc_socket_creator.cpp
@@ -0,0 +1,32 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/grpc_socket_creator.h"
+
+namespace cuttlefish {
+
+GrpcSocketCreator::GrpcSocketCreator(
+    const CuttlefishConfig::InstanceSpecific& instance)
+    : instance_(instance) {}
+
+std::string GrpcSocketCreator::CreateGrpcSocket(
+    const std::string& process_name) {
+  auto name_with_ext = process_name + ".sock";
+  auto socket_path = instance_.PerInstanceGrpcSocketPath(name_with_ext);
+
+  return socket_path;
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/grpc_socket_creator.h b/host/commands/run_cvd/launch/grpc_socket_creator.h
new file mode 100644
index 0000000..4a9d262
--- /dev/null
+++ b/host/commands/run_cvd/launch/grpc_socket_creator.h
@@ -0,0 +1,34 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+class GrpcSocketCreator {
+ public:
+  INJECT(GrpcSocketCreator(const CuttlefishConfig::InstanceSpecific& instance));
+
+  std::string CreateGrpcSocket(const std::string& process_name);
+
+ private:
+  const CuttlefishConfig::InstanceSpecific& instance_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/kernel_log_monitor.cpp b/host/commands/run_cvd/launch/kernel_log_monitor.cpp
new file mode 100644
index 0000000..69cf3ad
--- /dev/null
+++ b/host/commands/run_cvd/launch/kernel_log_monitor.cpp
@@ -0,0 +1,133 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/reporting.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/inject.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class KernelLogMonitor : public CommandSource,
+                         public KernelLogPipeProvider,
+                         public DiagnosticInformation,
+                         public LateInjected {
+ public:
+  INJECT(KernelLogMonitor(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+
+  // DiagnosticInformation
+  std::vector<std::string> Diagnostics() const override {
+    return {"Kernel log: " + instance_.PerInstancePath("kernel.log")};
+  }
+
+  Result<void> LateInject(fruit::Injector<>& injector) override {
+    number_of_event_pipes_ =
+        injector.getMultibindings<KernelLogPipeConsumer>().size();
+    return {};
+  }
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(KernelLogMonitorBinary());
+    command.AddParameter("-log_pipe_fd=", fifo_);
+
+    if (!event_pipe_write_ends_.empty()) {
+      command.AddParameter("-subscriber_fds=");
+      for (size_t i = 0; i < event_pipe_write_ends_.size(); i++) {
+        if (i > 0) {
+          command.AppendToLastParameter(",");
+        }
+        command.AppendToLastParameter(event_pipe_write_ends_[i]);
+      }
+    }
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // KernelLogPipeProvider
+  SharedFD KernelLogPipe() override {
+    CHECK(!event_pipe_read_ends_.empty()) << "No more kernel pipes left. Make sure you inhereted "
+                                             "KernelLogPipeProvider and provided multibinding "
+                                             "from KernelLogPipeConsumer to your type.";
+    SharedFD ret = event_pipe_read_ends_.back();
+    event_pipe_read_ends_.pop_back();
+    return ret;
+  }
+
+ private:
+  // SetupFeature
+  bool Enabled() const override { return true; }
+  std::string Name() const override { return "KernelLogMonitor"; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    auto log_name = instance_.kernel_log_pipe_name();
+    CF_EXPECT(mkfifo(log_name.c_str(), 0600) == 0,
+              "Unable to create named pipe at " << log_name << ": "
+                                                << strerror(errno));
+
+    // Open the pipe here (from the launcher) to ensure the pipe is not deleted
+    // due to the usage counters in the kernel reaching zero. If this is not
+    // done and the kernel_log_monitor crashes for some reason the VMM may get
+    // SIGPIPE.
+    fifo_ = SharedFD::Open(log_name, O_RDWR);
+    CF_EXPECT(fifo_->IsOpen(),
+              "Unable to open \"" << log_name << "\": " << fifo_->StrError());
+
+    for (unsigned int i = 0; i < number_of_event_pipes_; ++i) {
+      SharedFD event_pipe_write_end, event_pipe_read_end;
+      CF_EXPECT(SharedFD::Pipe(&event_pipe_read_end, &event_pipe_write_end),
+                "Failed creating kernel log pipe: " << strerror(errno));
+      event_pipe_write_ends_.push_back(event_pipe_write_end);
+      event_pipe_read_ends_.push_back(event_pipe_read_end);
+    }
+    return {};
+  }
+
+  int number_of_event_pipes_ = 0;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD fifo_;
+  std::vector<SharedFD> event_pipe_write_ends_;
+  std::vector<SharedFD> event_pipe_read_ends_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
+                 KernelLogPipeProvider>
+KernelLogMonitorComponent() {
+  return fruit::createComponent()
+      .bind<KernelLogPipeProvider, KernelLogMonitor>()
+      .addMultibinding<CommandSource, KernelLogMonitor>()
+      .addMultibinding<SetupFeature, KernelLogMonitor>()
+      .addMultibinding<DiagnosticInformation, KernelLogMonitor>()
+      .addMultibinding<LateInjected, KernelLogMonitor>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/launch.h b/host/commands/run_cvd/launch/launch.h
new file mode 100644
index 0000000..39cb9eb
--- /dev/null
+++ b/host/commands/run_cvd/launch/launch.h
@@ -0,0 +1,111 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/commands/run_cvd/launch/grpc_socket_creator.h"
+#include "host/commands/run_cvd/launch/log_tee_creator.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/custom_actions.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/feature.h"
+#include "host/libs/config/kernel_log_pipe_provider.h"
+#include "host/libs/vm_manager/vm_manager.h"
+
+namespace cuttlefish {
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+UwbConnectorComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+BluetoothConnectorComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
+                 KernelLogPipeProvider>
+KernelLogMonitorComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+LogcatReceiverComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+ConfigServerComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+ConsoleForwarderComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 GrpcSocketCreator>>
+GnssGrpcProxyServerComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig>>
+MetricsServiceComponent();
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+OpenWrtComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 GrpcSocketCreator>>
+OpenwrtControlServerComponent();
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+RootCanalComponent();
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+PicaComponent();
+
+fruit::Component<fruit::Required<GrpcSocketCreator>> EchoServerComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+NetsimServerComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 KernelLogPipeProvider>>
+SecureEnvComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+TombstoneReceiverComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 LogTeeCreator, GrpcSocketCreator>>
+WmediumdServerComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+launchModemComponent();
+
+fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 const CustomActionConfigProvider>>
+launchStreamerComponent();
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/log_tee_creator.cpp b/host/commands/run_cvd/launch/log_tee_creator.cpp
new file mode 100644
index 0000000..c2ba1b4
--- /dev/null
+++ b/host/commands/run_cvd/launch/log_tee_creator.cpp
@@ -0,0 +1,41 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/log_tee_creator.h"
+
+namespace cuttlefish {
+
+LogTeeCreator::LogTeeCreator(const CuttlefishConfig::InstanceSpecific& instance)
+    : instance_(instance) {}
+
+Command LogTeeCreator::CreateLogTee(Command& cmd,
+                                    const std::string& process_name) {
+  auto name_with_ext = process_name + "_logs.fifo";
+  auto logs_path = instance_.PerInstanceInternalPath(name_with_ext.c_str());
+  auto logs = SharedFD::Fifo(logs_path, 0666);
+  if (!logs->IsOpen()) {
+    LOG(FATAL) << "Failed to create fifo for " << process_name
+               << " output: " << logs->StrError();
+  }
+
+  cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdOut, logs);
+  cmd.RedirectStdIO(Subprocess::StdIOChannel::kStdErr, logs);
+
+  return Command(HostBinaryPath("log_tee"))
+      .AddParameter("--process_name=", process_name)
+      .AddParameter("--log_fd_in=", logs);
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/log_tee_creator.h b/host/commands/run_cvd/launch/log_tee_creator.h
new file mode 100644
index 0000000..3f74291
--- /dev/null
+++ b/host/commands/run_cvd/launch/log_tee_creator.h
@@ -0,0 +1,35 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+class LogTeeCreator {
+ public:
+  INJECT(LogTeeCreator(const CuttlefishConfig::InstanceSpecific& instance));
+
+  Command CreateLogTee(Command& cmd, const std::string& process_name);
+
+ private:
+  const CuttlefishConfig::InstanceSpecific& instance_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/logcat_receiver.cpp b/host/commands/run_cvd/launch/logcat_receiver.cpp
new file mode 100644
index 0000000..f4a02a3
--- /dev/null
+++ b/host/commands/run_cvd/launch/logcat_receiver.cpp
@@ -0,0 +1,86 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/reporting.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class LogcatReceiver : public CommandSource, public DiagnosticInformation {
+ public:
+  INJECT(LogcatReceiver(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+  // DiagnosticInformation
+  std::vector<std::string> Diagnostics() const override {
+    return {"Logcat output: " + instance_.logcat_path()};
+  }
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(LogcatReceiverBinary());
+    command.AddParameter("-log_pipe_fd=", pipe_);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "LogcatReceiver"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() {
+    auto log_name = instance_.logcat_pipe_name();
+    CF_EXPECT(mkfifo(log_name.c_str(), 0600) == 0,
+              "Unable to create named pipe at " << log_name << ": "
+                                                << strerror(errno));
+    // Open the pipe here (from the launcher) to ensure the pipe is not deleted
+    // due to the usage counters in the kernel reaching zero. If this is not
+    // done and the logcat_receiver crashes for some reason the VMM may get
+    // SIGPIPE.
+    pipe_ = SharedFD::Open(log_name.c_str(), O_RDWR);
+    CF_EXPECT(pipe_->IsOpen(),
+              "Can't open \"" << log_name << "\": " << pipe_->StrError());
+    return {};
+  }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD pipe_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+LogcatReceiverComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, LogcatReceiver>()
+      .addMultibinding<SetupFeature, LogcatReceiver>()
+      .addMultibinding<DiagnosticInformation, LogcatReceiver>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/metrics.cpp b/host/commands/run_cvd/launch/metrics.cpp
new file mode 100644
index 0000000..397b35b
--- /dev/null
+++ b/host/commands/run_cvd/launch/metrics.cpp
@@ -0,0 +1,67 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class MetricsService : public CommandSource {
+ public:
+  INJECT(MetricsService(const CuttlefishConfig& config)) : config_(config) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(MetricsBinary());
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "MetricsService"; }
+  bool Enabled() const override {
+    return config_.enable_metrics() == CuttlefishConfig::kYes;
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+ private:
+  const CuttlefishConfig& config_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig>>
+MetricsServiceComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, MetricsService>()
+      .addMultibinding<SetupFeature, MetricsService>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/modem.cpp b/host/commands/run_cvd/launch/modem.cpp
new file mode 100644
index 0000000..39b8b58
--- /dev/null
+++ b/host/commands/run_cvd/launch/modem.cpp
@@ -0,0 +1,144 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string.h>
+
+#include <sstream>
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+
+static bool StopModemSimulator(int id) {
+  std::string socket_name = "modem_simulator" + std::to_string(id);
+  auto monitor_sock =
+      SharedFD::SocketLocalClient(socket_name, true, SOCK_STREAM);
+  if (!monitor_sock->IsOpen()) {
+    LOG(ERROR) << "The connection to modem simulator is closed";
+    return false;
+  }
+  std::string msg("STOP");
+  if (monitor_sock->Write(msg.data(), msg.size()) < 0) {
+    monitor_sock->Close();
+    LOG(ERROR) << "Failed to send 'STOP' to modem simulator";
+    return false;
+  }
+  char buf[64] = {0};
+  if (monitor_sock->Read(buf, sizeof(buf)) <= 0) {
+    monitor_sock->Close();
+    LOG(ERROR) << "Failed to read message from modem simulator";
+    return false;
+  }
+  if (strcmp(buf, "OK")) {
+    monitor_sock->Close();
+    LOG(ERROR) << "Read '" << buf << "' instead of 'OK' from modem simulator";
+    return false;
+  }
+
+  return true;
+}
+
+class ModemSimulator : public CommandSource {
+ public:
+  INJECT(ModemSimulator(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command cmd(ModemSimulatorBinary(), [this](Subprocess* proc) {
+      auto stopped = StopModemSimulator(instance_.modem_simulator_host_id());
+      if (stopped) {
+        return StopperResult::kStopSuccess;
+      }
+      LOG(WARNING) << "Failed to stop modem simulator nicely, "
+                   << "attempting to KILL";
+      return KillSubprocess(proc) == StopperResult::kStopSuccess
+                 ? StopperResult::kStopCrash
+                 : StopperResult::kStopFailure;
+    });
+
+    auto sim_type = instance_.modem_simulator_sim_type();
+    cmd.AddParameter(std::string{"-sim_type="} + std::to_string(sim_type));
+    cmd.AddParameter("-server_fds=");
+    bool first_socket = true;
+    for (const auto& socket : sockets_) {
+      if (!first_socket) {
+        cmd.AppendToLastParameter(",");
+      }
+      cmd.AppendToLastParameter(socket);
+      first_socket = false;
+    }
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(cmd));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "ModemSimulator"; }
+  bool Enabled() const override {
+    if (!instance_.enable_modem_simulator()) {
+      LOG(DEBUG) << "Modem simulator not enabled";
+    }
+    return instance_.enable_modem_simulator();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    int instance_number = instance_.modem_simulator_instance_number();
+    CF_EXPECT(instance_number >= 0 && instance_number < 4,
+              "Modem simulator instance number should range between 0 and 3");
+    auto ports = instance_.modem_simulator_ports();
+    for (int i = 0; i < instance_number; ++i) {
+      auto pos = ports.find(',');
+      auto temp = (pos != std::string::npos) ? ports.substr(0, pos) : ports;
+      auto port = std::stoi(temp);
+      ports = ports.substr(pos + 1);
+
+      auto modem_sim_socket = SharedFD::VsockServer(port, SOCK_STREAM);
+      CF_EXPECT(modem_sim_socket->IsOpen(), modem_sim_socket->StrError());
+      sockets_.emplace_back(std::move(modem_sim_socket));
+    }
+    return {};
+  }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  std::vector<SharedFD> sockets_;
+};
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+launchModemComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, ModemSimulator>()
+      .addMultibinding<SetupFeature, ModemSimulator>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/netsim_server.cpp b/host/commands/run_cvd/launch/netsim_server.cpp
new file mode 100644
index 0000000..87a08c4
--- /dev/null
+++ b/host/commands/run_cvd/launch/netsim_server.cpp
@@ -0,0 +1,180 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+// NetsimServer launches netsim server with fifos for radio HALs.
+//
+// netsimd -s '{devices:[
+//  {"name":"0.0.0.0:5000", "chips":[
+//    {"kind":"BLUETOOTH", "fdIn":10, "fdOut":11}]},
+//  {"name":"0.0.0.0:5010", "chips":[
+//    {"kind":"BLUETOOTH", "fdIn":14, "fdOut":15}]}]}
+
+// Chip and Device classes pass SharedFD fifos between ResultSetup and Commands
+// and format the netsim json command line.
+
+class Chip {
+ public:
+  SharedFD fd_in;
+  SharedFD fd_out;
+
+  Chip(std::string kind) : kind_(kind) {}
+
+  // Append the chip information as Json to the command.
+  void Append(Command& c) const {
+    c.AppendToLastParameter(R"({"kind":")", kind_, R"(","fdIn":)", fd_in,
+                            R"(,"fdOut":)", fd_out, "}");
+  }
+
+ private:
+  std::string kind_;
+};
+
+class Device {
+ public:
+  Device(std::string name) : name_(name) {}
+
+  void Append(Command& c) const {
+    c.AppendToLastParameter(R"({"name":")", name_, R"(","chips":[)");
+    for (int i = 0; i < chips.size(); ++i) {
+      chips[i].Append(c);
+      if (chips.size() - i > 1) {
+        c.AppendToLastParameter(",");
+      }
+    }
+    c.AppendToLastParameter("]}");
+  }
+
+  std::vector<Chip> chips;
+
+ private:
+  std::string name_;
+};
+
+class NetsimServer : public CommandSource {
+ public:
+  INJECT(NetsimServer(const CuttlefishConfig& config,
+                      const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command cmd(NetsimdBinary());
+    cmd.AddParameter("-s");
+    AddDevicesParameter(cmd);
+    // Release SharedFDs, they've been duped by Command
+    devices_.clear();
+    // Bluetooth controller properties file
+    cmd.AddParameter("--rootcanal_controller_properties_file=",
+                     config_.rootcanal_config_file());
+    // Default commands file
+    cmd.AddParameter("--rootcanal_default_commands_file=",
+                     config_.rootcanal_default_commands_file());
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(cmd));
+    return commands;
+  }
+
+  // Convert devices_ to json for netsimd -s <arg>. The devices_, created and
+  // validated during ResultSetup, contains all the SharedFDs and meta-data.
+
+  void AddDevicesParameter(Command& c) {
+    c.AddParameter(R"({"devices":[)");
+    for (int i = 0; i < devices_.size(); ++i) {
+      devices_[i].Append(c);
+      if (devices_.size() - i > 1) {
+        c.AppendToLastParameter(",");
+      }
+    }
+    c.AppendToLastParameter("]}");
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "Netsim"; }
+  bool Enabled() const override { return instance_.start_netsim(); }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+
+  Result<void> ResultSetup() {
+    auto netsimd = HostBinaryPath("netsimd");
+    CF_EXPECT(FileExists(netsimd),
+              "Failed to find netsimd binary: " << netsimd);
+
+    for (const auto& instance : config_.Instances()) {
+      Device device(instance.adb_ip_and_port());
+      // Add bluetooth chip if enabled
+      if (config_.netsim_radio_enabled(
+              CuttlefishConfig::NetsimRadio::Bluetooth)) {
+        Chip chip("BLUETOOTH");
+        CF_EXPECT(MakeFifo(instance, "bt_fifo_vm.in", chip.fd_in));
+        CF_EXPECT(MakeFifo(instance, "bt_fifo_vm.out", chip.fd_out));
+        device.chips.emplace_back(chip);
+      }
+      // Add other chips if enabled
+      devices_.emplace_back(device);
+    }
+    return {};
+  }
+
+  Result<void> MakeFifo(const CuttlefishConfig::InstanceSpecific& instance,
+                        const char* relative_path, SharedFD& fd) {
+    auto path = instance.PerInstanceInternalPath(relative_path);
+    unlink(path.c_str());
+    CF_EXPECT(mkfifo(path.c_str(), 0660) == 0,
+              "Failed to create fifo for Netsim: " << strerror(errno));
+
+    fd = SharedFD::Open(path, O_RDWR);
+
+    CF_EXPECT(fd->IsOpen(),
+              "Failed to open fifo for Netsim: " << fd->StrError());
+
+    return {};
+  }
+
+ private:
+  std::vector<Device> devices_;
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+NetsimServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, NetsimServer>()
+      .addMultibinding<SetupFeature, NetsimServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/open_wrt.cpp b/host/commands/run_cvd/launch/open_wrt.cpp
new file mode 100644
index 0000000..49f5924
--- /dev/null
+++ b/host/commands/run_cvd/launch/open_wrt.cpp
@@ -0,0 +1,135 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/network.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+#include "host/libs/config/openwrt_args.h"
+#include "host/libs/vm_manager/crosvm_builder.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+
+namespace cuttlefish {
+namespace {
+
+using APBootFlow = CuttlefishConfig::InstanceSpecific::APBootFlow;
+
+class OpenWrt : public CommandSource {
+ public:
+  INJECT(OpenWrt(const CuttlefishConfig& config,
+                 const CuttlefishConfig::InstanceSpecific& instance,
+                 LogTeeCreator& log_tee))
+      : config_(config), instance_(instance), log_tee_(log_tee) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    constexpr auto crosvm_for_ap_socket = "ap_control.sock";
+
+    CrosvmBuilder ap_cmd;
+    ap_cmd.ApplyProcessRestarter(instance_.crosvm_binary(),
+                                 kOpenwrtVmResetExitCode);
+    ap_cmd.Cmd().AddParameter("run");
+    ap_cmd.AddControlSocket(
+        instance_.PerInstanceInternalUdsPath(crosvm_for_ap_socket),
+        instance_.crosvm_binary());
+
+    ap_cmd.Cmd().AddParameter("--core-scheduling=false");
+
+    if (!config_.vhost_user_mac80211_hwsim().empty()) {
+      ap_cmd.Cmd().AddParameter("--vhost-user-mac80211-hwsim=",
+                                config_.vhost_user_mac80211_hwsim());
+    }
+    SharedFD wifi_tap = ap_cmd.AddTap(instance_.wifi_tap_name());
+    if (instance_.enable_sandbox()) {
+      ap_cmd.Cmd().AddParameter("--seccomp-policy-dir=",
+                                instance_.seccomp_policy_dir());
+    } else {
+      ap_cmd.Cmd().AddParameter("--disable-sandbox");
+    }
+    ap_cmd.AddReadWriteDisk(instance_.PerInstancePath("ap_overlay.img"));
+
+    auto boot_logs_path =
+        instance_.PerInstanceLogPath("crosvm_openwrt_boot.log");
+    auto logs_path = instance_.PerInstanceLogPath("crosvm_openwrt.log");
+    ap_cmd.AddSerialConsoleReadOnly(boot_logs_path);
+    ap_cmd.AddHvcReadOnly(logs_path);
+
+    auto openwrt_args = OpenwrtArgsFromConfig(instance_);
+    switch (instance_.ap_boot_flow()) {
+      case APBootFlow::Grub:
+        ap_cmd.AddReadWriteDisk(instance_.persistent_ap_composite_disk_path());
+        ap_cmd.Cmd().AddParameter("--bios=", instance_.bootloader());
+        break;
+      case APBootFlow::LegacyDirect:
+        ap_cmd.Cmd().AddParameter("--params=\"root=/dev/vda1\"");
+        for (auto& openwrt_arg : openwrt_args) {
+          ap_cmd.Cmd().AddParameter("--params=" + openwrt_arg.first + "=" +
+                                    openwrt_arg.second);
+        }
+        ap_cmd.Cmd().AddParameter(config_.ap_kernel_image());
+        break;
+      default:
+        // must not be happened
+        break;
+    }
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(
+        std::move(log_tee_.CreateLogTee(ap_cmd.Cmd(), "openwrt")));
+    commands.emplace_back(std::move(ap_cmd.Cmd()));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "OpenWrt"; }
+  bool Enabled() const override {
+    return instance_.ap_boot_flow() != APBootFlow::None &&
+           config_.vm_manager() == vm_manager::CrosvmManager::name();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  LogTeeCreator& log_tee_;
+
+  static constexpr int kOpenwrtVmResetExitCode = 32;
+};
+
+}  // namespace
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+OpenWrtComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, OpenWrt>()
+      .addMultibinding<SetupFeature, OpenWrt>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/openwrt_control_server.cpp b/host/commands/run_cvd/launch/openwrt_control_server.cpp
new file mode 100644
index 0000000..c875b58
--- /dev/null
+++ b/host/commands/run_cvd/launch/openwrt_control_server.cpp
@@ -0,0 +1,82 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/launch/grpc_socket_creator.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class OpenwrtControlServer : public CommandSource {
+ public:
+  INJECT(
+      OpenwrtControlServer(const CuttlefishConfig::InstanceSpecific& instance,
+                           GrpcSocketCreator& grpc_socket))
+      : instance_(instance), grpc_socket_(grpc_socket) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command openwrt_control_server_cmd(OpenwrtControlServerBinary());
+    openwrt_control_server_cmd.AddParameter(
+        "--grpc_uds_path=", grpc_socket_.CreateGrpcSocket(Name()));
+    openwrt_control_server_cmd.AddParameter(
+        "--bridged_wifi_tap=",
+        std::to_string(instance_.use_bridged_wifi_tap()));
+    openwrt_control_server_cmd.AddParameter("--launcher_log_path=",
+                                            instance_.launcher_log_path());
+    openwrt_control_server_cmd.AddParameter(
+        "--openwrt_log_path=",
+        AbsolutePath(instance_.PerInstanceLogPath("crosvm_openwrt.log")));
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(openwrt_control_server_cmd));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "OpenwrtControlServer"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  GrpcSocketCreator& grpc_socket_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 GrpcSocketCreator>>
+OpenwrtControlServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, OpenwrtControlServer>()
+      .addMultibinding<SetupFeature, OpenwrtControlServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/pica.cpp b/host/commands/run_cvd/launch/pica.cpp
new file mode 100644
index 0000000..7352261
--- /dev/null
+++ b/host/commands/run_cvd/launch/pica.cpp
@@ -0,0 +1,78 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <unordered_set>
+#include <vector>
+
+#include "host/commands/run_cvd/launch/log_tee_creator.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class Pica : public CommandSource {
+ public:
+  INJECT(Pica(const CuttlefishConfig& config,
+                   const CuttlefishConfig::InstanceSpecific& instance,
+                   LogTeeCreator& log_tee))
+      : config_(config), instance_(instance), log_tee_(log_tee) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    if (!Enabled()) {
+      return {};
+    }
+    Command command(PicaBinary());
+
+    // UCI server port
+    command.AddParameter("--uci-port=", config_.pica_uci_port());
+
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(log_tee_.CreateLogTee(command, "pica")));
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "Pica"; }
+  bool Enabled() const override {
+    return config_.enable_host_uwb_connector() && instance_.start_pica();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  LogTeeCreator& log_tee_;
+};
+
+}  // namespace
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+PicaComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, Pica>()
+      .addMultibinding<SetupFeature, Pica>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/root_canal.cpp b/host/commands/run_cvd/launch/root_canal.cpp
new file mode 100644
index 0000000..d982c40
--- /dev/null
+++ b/host/commands/run_cvd/launch/root_canal.cpp
@@ -0,0 +1,125 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/launch/log_tee_creator.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class RootCanal : public CommandSource {
+ public:
+  INJECT(RootCanal(const CuttlefishConfig& config,
+                   const CuttlefishConfig::InstanceSpecific& instance,
+                   LogTeeCreator& log_tee))
+      : config_(config), instance_(instance), log_tee_(log_tee) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    // Create the root-canal command with the process_restarter
+    // as runner to restart root-canal when it crashes.
+    Command rootcanal(ProcessRestarterBinary());
+    rootcanal.AddParameter("-when_killed");
+    rootcanal.AddParameter("-when_dumped");
+    rootcanal.AddParameter("-when_exited_with_failure");
+    rootcanal.AddParameter("--");
+    rootcanal.AddParameter(RootCanalBinary());
+
+    // Port configuration.
+    rootcanal.AddParameter("--test_port=", config_.rootcanal_test_port());
+    rootcanal.AddParameter("--hci_port=", config_.rootcanal_hci_port());
+    rootcanal.AddParameter("--link_port=", config_.rootcanal_link_port());
+    rootcanal.AddParameter("--link_ble_port=",
+                           config_.rootcanal_link_ble_port());
+
+    // Bluetooth configuration.
+    rootcanal.AddParameter("--controller_properties_file=",
+                           config_.rootcanal_config_file());
+    rootcanal.AddParameter("--default_commands_file=",
+                           config_.rootcanal_default_commands_file());
+
+    // Add parameters from passthrough option --rootcanal-args
+    for (auto const& arg : config_.rootcanal_args()) {
+      rootcanal.AddParameter(arg);
+    }
+
+    // Add command for forwarding the HCI port to a vsock server.
+    Command hci_vsock_proxy(SocketVsockProxyBinary());
+    hci_vsock_proxy.AddParameter("--server_type=vsock");
+    hci_vsock_proxy.AddParameter("--server_vsock_port=",
+                                 config_.rootcanal_hci_port());
+    hci_vsock_proxy.AddParameter("--client_type=tcp");
+    hci_vsock_proxy.AddParameter("--client_tcp_host=127.0.0.1");
+    hci_vsock_proxy.AddParameter("--client_tcp_port=",
+                                 config_.rootcanal_hci_port());
+
+    // Add command for forwarding the test port to a vsock server.
+    Command test_vsock_proxy(SocketVsockProxyBinary());
+    test_vsock_proxy.AddParameter("--server_type=vsock");
+    test_vsock_proxy.AddParameter("--server_vsock_port=",
+                                  config_.rootcanal_test_port());
+    test_vsock_proxy.AddParameter("--client_type=tcp");
+    test_vsock_proxy.AddParameter("--client_tcp_host=127.0.0.1");
+    test_vsock_proxy.AddParameter("--client_tcp_port=",
+                                  config_.rootcanal_test_port());
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(
+        std::move(log_tee_.CreateLogTee(rootcanal, "rootcanal")));
+    commands.emplace_back(std::move(rootcanal));
+    commands.emplace_back(std::move(hci_vsock_proxy));
+    commands.emplace_back(std::move(test_vsock_proxy));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "RootCanal"; }
+  bool Enabled() const override {
+    return config_.enable_host_bluetooth_connector() && instance_.start_rootcanal();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  LogTeeCreator& log_tee_;
+};
+
+}  // namespace
+
+fruit::Component<
+    fruit::Required<const CuttlefishConfig,
+                    const CuttlefishConfig::InstanceSpecific, LogTeeCreator>>
+RootCanalComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, RootCanal>()
+      .addMultibinding<SetupFeature, RootCanal>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/secure_env.cpp b/host/commands/run_cvd/launch/secure_env.cpp
new file mode 100644
index 0000000..b5e0cf8
--- /dev/null
+++ b/host/commands/run_cvd/launch/secure_env.cpp
@@ -0,0 +1,125 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class SecureEnvironment : public CommandSource, public KernelLogPipeConsumer {
+ public:
+  INJECT(SecureEnvironment(const CuttlefishConfig& config,
+                           const CuttlefishConfig::InstanceSpecific& instance,
+                           KernelLogPipeProvider& kernel_log_pipe_provider))
+      : config_(config),
+        instance_(instance),
+        kernel_log_pipe_provider_(kernel_log_pipe_provider) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(SecureEnvBinary());
+    command.AddParameter("-confui_server_fd=", confui_server_fd_);
+    command.AddParameter("-keymaster_fd_out=", fifos_[0]);
+    command.AddParameter("-keymaster_fd_in=", fifos_[1]);
+    command.AddParameter("-gatekeeper_fd_out=", fifos_[2]);
+    command.AddParameter("-gatekeeper_fd_in=", fifos_[3]);
+
+    const auto& secure_hals = config_.secure_hals();
+    bool secure_keymint = secure_hals.count(SecureHal::Keymint) > 0;
+#ifdef CUTTLEFISH_KEYMINT_RUST
+    command.AddParameter("-keymint_impl=",
+                         secure_keymint ? "rust-tpm" : "rust-software");
+#else
+    command.AddParameter("-keymint_impl=", secure_keymint ? "tpm" : "software");
+#endif
+    bool secure_gatekeeper = secure_hals.count(SecureHal::Gatekeeper) > 0;
+    auto gatekeeper_impl = secure_gatekeeper ? "tpm" : "software";
+    command.AddParameter("-gatekeeper_impl=", gatekeeper_impl);
+    command.AddParameter("-kernel_events_fd=", kernel_log_pipe_);
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "SecureEnvironment"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override {
+    return {&kernel_log_pipe_provider_};
+  }
+  Result<void> ResultSetup() override {
+    std::vector<std::string> fifo_paths = {
+        instance_.PerInstanceInternalPath("keymaster_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("keymaster_fifo_vm.out"),
+        instance_.PerInstanceInternalPath("gatekeeper_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("gatekeeper_fifo_vm.out"),
+    };
+    std::vector<SharedFD> fifos;
+    for (const auto& path : fifo_paths) {
+      unlink(path.c_str());
+      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
+      auto fd = SharedFD::Open(path, O_RDWR);
+      CF_EXPECT(fd->IsOpen(),
+                "Could not open " << path << ": " << fd->StrError());
+      fifos_.push_back(fd);
+    }
+
+    auto confui_socket_path =
+        instance_.PerInstanceInternalUdsPath("confui_sign.sock");
+    confui_server_fd_ = SharedFD::SocketLocalServer(confui_socket_path, false,
+                                                    SOCK_STREAM, 0600);
+    CF_EXPECT(confui_server_fd_->IsOpen(),
+              "Could not open " << confui_socket_path << ": "
+                                << confui_server_fd_->StrError());
+    kernel_log_pipe_ = kernel_log_pipe_provider_.KernelLogPipe();
+
+    return {};
+  }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD confui_server_fd_;
+  std::vector<SharedFD> fifos_;
+  KernelLogPipeProvider& kernel_log_pipe_provider_;
+  SharedFD kernel_log_pipe_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 KernelLogPipeProvider>>
+SecureEnvComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, SecureEnvironment>()
+      .addMultibinding<KernelLogPipeConsumer, SecureEnvironment>()
+      .addMultibinding<SetupFeature, SecureEnvironment>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/streamer.cpp b/host/commands/run_cvd/launch/streamer.cpp
new file mode 100644
index 0000000..fdc47c8
--- /dev/null
+++ b/host/commands/run_cvd/launch/streamer.cpp
@@ -0,0 +1,336 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <sstream>
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <fruit/fruit.h>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/reporting.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/vm_manager/qemu_manager.h"
+
+namespace cuttlefish {
+
+namespace {
+
+SharedFD CreateUnixInputServer(const std::string& path) {
+  auto server =
+      SharedFD::SocketLocalServer(path.c_str(), false, SOCK_STREAM, 0666);
+  if (!server->IsOpen()) {
+    LOG(ERROR) << "Unable to create unix input server: " << server->StrError();
+    return {};
+  }
+  return server;
+}
+
+std::vector<Command> LaunchCustomActionServers(
+    Command& webrtc_cmd,
+    const std::vector<CustomActionServerConfig>& custom_actions) {
+  bool first = true;
+  std::vector<Command> commands;
+  for (const auto& custom_action : custom_actions) {
+    // Create a socket pair that will be used for communication between
+    // WebRTC and the action server.
+    SharedFD webrtc_socket, action_server_socket;
+    if (!SharedFD::SocketPair(AF_LOCAL, SOCK_STREAM, 0, &webrtc_socket,
+          &action_server_socket)) {
+      LOG(ERROR) << "Unable to create custom action server socket pair: "
+        << strerror(errno);
+      continue;
+    }
+
+    // Launch the action server, providing its socket pair fd as the only
+    // argument.
+    auto binary = HostBinaryPath(custom_action.server);
+    Command command(binary);
+    command.AddParameter(action_server_socket);
+    commands.emplace_back(std::move(command));
+
+    // Pass the WebRTC socket pair fd to WebRTC.
+    if (first) {
+      first = false;
+      webrtc_cmd.AddParameter("-action_servers=", custom_action.server, ":",
+          webrtc_socket);
+    } else {
+      webrtc_cmd.AppendToLastParameter(",", custom_action.server, ":",
+          webrtc_socket);
+    }
+  }
+  return commands;
+}
+
+// Creates the frame and input sockets and add the relevant arguments to
+// webrtc commands
+class StreamerSockets : public virtual SetupFeature {
+ public:
+  INJECT(StreamerSockets(const CuttlefishConfig& config,
+                         const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
+
+  void AppendCommandArguments(Command& cmd) {
+    if (config_.vm_manager() == vm_manager::QemuManager::name()) {
+      cmd.AddParameter("-write_virtio_input");
+    }
+    if (!touch_servers_.empty()) {
+      cmd.AddParameter("-touch_fds=", touch_servers_[0]);
+      for (int i = 1; i < touch_servers_.size(); ++i) {
+        cmd.AppendToLastParameter(",", touch_servers_[i]);
+      }
+    }
+    cmd.AddParameter("-keyboard_fd=", keyboard_server_);
+    cmd.AddParameter("-frame_server_fd=", frames_server_);
+    if (instance_.enable_audio()) {
+      cmd.AddParameter("--audio_server_fd=", audio_server_);
+    }
+    cmd.AddParameter("--confui_in_fd=", confui_in_fd_);
+    cmd.AddParameter("--confui_out_fd=", confui_out_fd_);
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "StreamerSockets"; }
+  bool Enabled() const override {
+    bool is_qemu = config_.vm_manager() == vm_manager::QemuManager::name();
+    bool is_accelerated = instance_.gpu_mode() != kGpuModeGuestSwiftshader;
+    return !(is_qemu && is_accelerated);
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+
+  Result<void> ResultSetup() override {
+    auto use_vsockets = config_.vm_manager() == vm_manager::QemuManager::name();
+    for (int i = 0; i < instance_.display_configs().size(); ++i) {
+      SharedFD touch_socket =
+          use_vsockets ? SharedFD::VsockServer(instance_.touch_server_port(),
+                                               SOCK_STREAM)
+                       : CreateUnixInputServer(instance_.touch_socket_path(i));
+      CF_EXPECT(touch_socket->IsOpen(), touch_socket->StrError());
+      touch_servers_.emplace_back(std::move(touch_socket));
+    }
+    keyboard_server_ =
+        use_vsockets ? SharedFD::VsockServer(instance_.keyboard_server_port(),
+                                             SOCK_STREAM)
+                     : CreateUnixInputServer(instance_.keyboard_socket_path());
+    CF_EXPECT(keyboard_server_->IsOpen(), keyboard_server_->StrError());
+
+    frames_server_ = CreateUnixInputServer(instance_.frames_socket_path());
+    CF_EXPECT(frames_server_->IsOpen(), frames_server_->StrError());
+    // TODO(schuffelen): Make this a separate optional feature?
+    if (instance_.enable_audio()) {
+      auto path = config_.ForDefaultInstance().audio_server_path();
+      audio_server_ =
+          SharedFD::SocketLocalServer(path, false, SOCK_SEQPACKET, 0666);
+      CF_EXPECT(audio_server_->IsOpen(), audio_server_->StrError());
+    }
+    AddConfUiFifo();
+    return {};
+  }
+
+  Result<void> AddConfUiFifo() {
+    std::vector<std::string> fifo_files = {
+        instance_.PerInstanceInternalPath("confui_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("confui_fifo_vm.out")};
+    for (const auto& path : fifo_files) {
+      unlink(path.c_str());
+    }
+    std::vector<SharedFD> fds;
+    for (const auto& path : fifo_files) {
+      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
+      auto fd = SharedFD::Open(path, O_RDWR);
+      CF_EXPECT(fd->IsOpen(),
+                "Could not open " << path << ": " << fd->StrError());
+      fds.emplace_back(fd);
+    }
+    confui_in_fd_ = fds[0];
+    confui_out_fd_ = fds[1];
+    return {};
+  }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  std::vector<SharedFD> touch_servers_;
+  SharedFD keyboard_server_;
+  SharedFD frames_server_;
+  SharedFD audio_server_;
+  SharedFD confui_in_fd_;   // host -> guest
+  SharedFD confui_out_fd_;  // guest -> host
+};
+
+class WebRtcServer : public virtual CommandSource,
+                     public DiagnosticInformation,
+                     public KernelLogPipeConsumer {
+ public:
+  INJECT(WebRtcServer(const CuttlefishConfig& config,
+                      const CuttlefishConfig::InstanceSpecific& instance,
+                      StreamerSockets& sockets,
+                      KernelLogPipeProvider& log_pipe_provider,
+                      const CustomActionConfigProvider& custom_action_config))
+      : config_(config),
+        instance_(instance),
+        sockets_(sockets),
+        log_pipe_provider_(log_pipe_provider),
+        custom_action_config_(custom_action_config) {}
+  // DiagnosticInformation
+  std::vector<std::string> Diagnostics() const override {
+    if (!Enabled() ||
+        !(config_.ForDefaultInstance().start_webrtc_sig_server() ||
+          config_.ForDefaultInstance().start_webrtc_sig_server_proxy())) {
+      // When WebRTC is enabled but an operator other than the one launched by
+      // run_cvd is used there is no way to know the url to which to point the
+      // browser to.
+      return {};
+    }
+    std::ostringstream out;
+    out << "Point your browser to https://localhost:"
+        << config_.sig_server_port() << " to interact with the device.";
+    return {out.str()};
+  }
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    std::vector<MonitorCommand> commands;
+    if (instance_.start_webrtc_sig_server()) {
+      Command sig_server(WebRtcSigServerBinary());
+      sig_server.AddParameter("-assets_dir=", instance_.webrtc_assets_dir());
+      sig_server.AddParameter("-use_secure_http=",
+                              config_.sig_server_secure() ? "true" : "false");
+      if (!config_.webrtc_certs_dir().empty()) {
+        sig_server.AddParameter("-certs_dir=", config_.webrtc_certs_dir());
+      }
+      sig_server.AddParameter("-http_server_port=", config_.sig_server_port());
+      commands.emplace_back(std::move(sig_server));
+    }
+
+    if (instance_.start_webrtc_sig_server_proxy()) {
+      Command sig_proxy(WebRtcSigServerProxyBinary());
+      sig_proxy.AddParameter("-server_port=", config_.sig_server_port());
+      commands.emplace_back(std::move(sig_proxy));
+    }
+
+    auto stopper = [host_socket = std::move(host_socket_)](Subprocess* proc) {
+      struct timeval timeout;
+      timeout.tv_sec = 3;
+      timeout.tv_usec = 0;
+      CHECK(host_socket->SetSockOpt(SOL_SOCKET, SO_RCVTIMEO, &timeout,
+                                    sizeof(timeout)) == 0)
+          << "Could not set receive timeout";
+
+      WriteAll(host_socket, "C");
+      char response[1];
+      int read_ret = host_socket->Read(response, sizeof(response));
+      if (read_ret != 0) {
+        LOG(ERROR) << "Failed to read response from webrtc";
+        return KillSubprocess(proc);
+      }
+      return KillSubprocess(proc) == StopperResult::kStopSuccess
+                 ? StopperResult::kStopCrash
+                 : StopperResult::kStopFailure;
+    };
+
+    Command webrtc(WebRtcBinary(), stopper);
+    webrtc.UnsetFromEnvironment("http_proxy");
+    sockets_.AppendCommandArguments(webrtc);
+    if (config_.vm_manager() == vm_manager::CrosvmManager::name()) {
+      webrtc.AddParameter("-switches_fd=", switches_server_);
+    }
+    // Currently there is no way to ensure the signaling server will already
+    // have bound the socket to the port by the time the webrtc process runs
+    // (the common technique of doing it from the launcher is not possible here
+    // as the server library being used creates its own sockets). However, this
+    // issue is mitigated slightly by doing some retrying and backoff in the
+    // webrtc process when connecting to the websocket, so it shouldn't be an
+    // issue most of the time.
+    webrtc.AddParameter("--command_fd=", client_socket_);
+    webrtc.AddParameter("-kernel_log_events_fd=", kernel_log_events_pipe_);
+    webrtc.AddParameter("-client_dir=",
+                        DefaultHostArtifactsPath("usr/share/webrtc/assets"));
+
+    // TODO get from launcher params
+    const auto& actions =
+        custom_action_config_.CustomActionServers(instance_.id());
+    for (auto& action : LaunchCustomActionServers(webrtc, actions)) {
+      commands.emplace_back(std::move(action));
+    }
+    commands.emplace_back(std::move(webrtc));
+    return commands;
+  }
+
+  // SetupFeature
+  bool Enabled() const override {
+    return sockets_.Enabled() && instance_.enable_webrtc();
+  }
+
+ private:
+  std::string Name() const override { return "WebRtcServer"; }
+  std::unordered_set<SetupFeature*> Dependencies() const override {
+    return {static_cast<SetupFeature*>(&sockets_),
+            static_cast<SetupFeature*>(&log_pipe_provider_)};
+  }
+
+  Result<void> ResultSetup() override {
+    CF_EXPECT(SharedFD::SocketPair(AF_LOCAL, SOCK_STREAM, 0, &client_socket_,
+                                   &host_socket_),
+              client_socket_->StrError());
+    if (config_.vm_manager() == vm_manager::CrosvmManager::name()) {
+      switches_server_ =
+          CreateUnixInputServer(instance_.switches_socket_path());
+      CF_EXPECT(switches_server_->IsOpen(), switches_server_->StrError());
+    }
+    kernel_log_events_pipe_ = log_pipe_provider_.KernelLogPipe();
+    CF_EXPECT(kernel_log_events_pipe_->IsOpen(),
+              kernel_log_events_pipe_->StrError());
+    return {};
+  }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  StreamerSockets& sockets_;
+  KernelLogPipeProvider& log_pipe_provider_;
+  const CustomActionConfigProvider& custom_action_config_;
+  SharedFD kernel_log_events_pipe_;
+  SharedFD client_socket_;
+  SharedFD host_socket_;
+  SharedFD switches_server_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 const CustomActionConfigProvider>>
+launchStreamerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, WebRtcServer>()
+      .addMultibinding<DiagnosticInformation, WebRtcServer>()
+      .addMultibinding<KernelLogPipeConsumer, WebRtcServer>()
+      .addMultibinding<SetupFeature, StreamerSockets>()
+      .addMultibinding<SetupFeature, WebRtcServer>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/tombstone_receiver.cpp b/host/commands/run_cvd/launch/tombstone_receiver.cpp
new file mode 100644
index 0000000..f8c62a9
--- /dev/null
+++ b/host/commands/run_cvd/launch/tombstone_receiver.cpp
@@ -0,0 +1,82 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+
+class TombstoneReceiver : public CommandSource {
+ public:
+  INJECT(TombstoneReceiver(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(TombstoneReceiverBinary());
+    command.AddParameter("-server_fd=", socket_);
+    command.AddParameter("-tombstone_dir=", tombstone_dir_);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "TombstoneReceiver"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    tombstone_dir_ = instance_.PerInstancePath("tombstones");
+    if (!DirectoryExists(tombstone_dir_)) {
+      LOG(DEBUG) << "Setting up " << tombstone_dir_;
+      CF_EXPECT(mkdir(tombstone_dir_.c_str(),
+                      S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) == 0,
+                "Failed to create tombstone directory: "
+                    << tombstone_dir_ << ". Error: " << strerror(errno));
+    }
+
+    auto port = instance_.tombstone_receiver_port();
+    socket_ = SharedFD::VsockServer(port, SOCK_STREAM);
+    CF_EXPECT(socket_->IsOpen(), "Unable to create tombstone server socket: "
+                                     << socket_->StrError());
+    return {};
+  }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  SharedFD socket_;
+  std::string tombstone_dir_;
+};
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>>
+TombstoneReceiverComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, TombstoneReceiver>()
+      .addMultibinding<SetupFeature, TombstoneReceiver>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/uwb_connector.cpp b/host/commands/run_cvd/launch/uwb_connector.cpp
new file mode 100644
index 0000000..77e3bfa
--- /dev/null
+++ b/host/commands/run_cvd/launch/uwb_connector.cpp
@@ -0,0 +1,83 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#define UCI_HEADER_SIZE 4
+#define UCI_MAX_PAYLOAD_SIZE 255
+#define UCI_MAX_PACKET_SIZE (UCI_HEADER_SIZE + UCI_MAX_PAYLOAD_SIZE)
+
+constexpr const size_t kBufferSize = UCI_MAX_PACKET_SIZE * 2;
+
+namespace cuttlefish {
+namespace {
+
+class UwbConnector : public CommandSource {
+ public:
+  INJECT(UwbConnector(const CuttlefishConfig& config,
+                            const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command command(HostBinaryPath("tcp_connector"));
+    command.AddParameter("-fifo_out=", fifos_[0]);
+    command.AddParameter("-fifo_in=", fifos_[1]);
+    command.AddParameter("-data_port=", config_.pica_uci_port());
+    command.AddParameter("-buffer_size=", kBufferSize);
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(command));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "UwbConnector"; }
+  bool Enabled() const override { return config_.enable_host_uwb_connector(); }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() {
+    std::vector<std::string> fifo_paths = {
+        instance_.PerInstanceInternalPath("uwb_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("uwb_fifo_vm.out"),
+    };
+    for (const auto& path : fifo_paths) {
+      unlink(path.c_str());
+      CF_EXPECT(mkfifo(path.c_str(), 0660) == 0, "Could not create " << path);
+      auto fd = SharedFD::Open(path, O_RDWR);
+      CF_EXPECT(fd->IsOpen(),
+                "Could not open " << path << ": " << fd->StrError());
+      fifos_.push_back(fd);
+    }
+    return {};
+  }
+
+ private:
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  std::vector<SharedFD> fifos_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>>
+UwbConnectorComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, UwbConnector>()
+      .addMultibinding<SetupFeature, UwbConnector>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch/wmediumd_server.cpp b/host/commands/run_cvd/launch/wmediumd_server.cpp
new file mode 100644
index 0000000..4c5ec0f
--- /dev/null
+++ b/host/commands/run_cvd/launch/wmediumd_server.cpp
@@ -0,0 +1,136 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/commands/run_cvd/launch/launch.h"
+
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/launch/grpc_socket_creator.h"
+#include "host/commands/run_cvd/launch/log_tee_creator.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class WmediumdServer : public CommandSource {
+ public:
+  INJECT(WmediumdServer(const CuttlefishConfig& config,
+                        const CuttlefishConfig::InstanceSpecific& instance,
+                        LogTeeCreator& log_tee, GrpcSocketCreator& grpc_socket))
+      : config_(config),
+        instance_(instance),
+        log_tee_(log_tee),
+        grpc_socket_(grpc_socket) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    Command cmd(WmediumdBinary());
+    cmd.AddParameter("-u", config_.vhost_user_mac80211_hwsim());
+    cmd.AddParameter("-a", config_.wmediumd_api_server_socket());
+    cmd.AddParameter("-c", config_path_);
+
+    cmd.AddParameter("--grpc_uds_path=", grpc_socket_.CreateGrpcSocket(Name()));
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(log_tee_.CreateLogTee(cmd, "wmediumd")));
+    commands.emplace_back(std::move(cmd));
+    return commands;
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "WmediumdServer"; }
+  bool Enabled() const override {
+    return instance_.start_wmediumd();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    // If wmediumd configuration is given, use it
+    if (!config_.wmediumd_config().empty()) {
+      config_path_ = config_.wmediumd_config();
+      return {};
+    }
+    // Otherwise, generate wmediumd configuration using the current wifi mac
+    // prefix before start
+    config_path_ = instance_.PerInstanceInternalPath("wmediumd.cfg");
+    Command gen_config_cmd(WmediumdGenConfigBinary());
+    gen_config_cmd.AddParameter("-o", config_path_);
+    gen_config_cmd.AddParameter("-p", instance_.wifi_mac_prefix());
+
+    int success = gen_config_cmd.Start().Wait();
+    CF_EXPECT(success == 0, "Unable to run " << gen_config_cmd.Executable()
+                                             << ". Exited with status "
+                                             << success);
+    return {};
+  }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  LogTeeCreator& log_tee_;
+  GrpcSocketCreator& grpc_socket_;
+  std::string config_path_;
+};
+
+// SetupFeature class for waiting wmediumd server to be settled.
+// This class is used by the instance that does not launches wmediumd.
+// TODO(b/276832089) remove this when run_env implementation is completed.
+class ValidateWmediumdService : public SetupFeature {
+ public:
+  INJECT(ValidateWmediumdService(
+      const CuttlefishConfig& config,
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
+  std::string Name() const override { return "ValidateWmediumdService"; }
+  bool Enabled() const override {
+    return config_.virtio_mac80211_hwsim() && !instance_.start_wmediumd();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  Result<void> ResultSetup() override {
+    CF_EXPECT(WaitForUnixSocket(config_.wmediumd_api_server_socket(), 30));
+    CF_EXPECT(WaitForUnixSocket(config_.vhost_user_mac80211_hwsim(), 30));
+
+    return {};
+  }
+
+ private:
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific,
+                                 LogTeeCreator, GrpcSocketCreator>>
+WmediumdServerComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, WmediumdServer>()
+      .addMultibinding<SetupFeature, WmediumdServer>()
+      .addMultibinding<SetupFeature, ValidateWmediumdService>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch_modem.cpp b/host/commands/run_cvd/launch_modem.cpp
deleted file mode 100644
index 8648b89..0000000
--- a/host/commands/run_cvd/launch_modem.cpp
+++ /dev/null
@@ -1,141 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "host/commands/run_cvd/launch.h"
-
-#include <android-base/logging.h>
-#include <string.h>
-#include <sstream>
-#include <string>
-#include <unordered_set>
-#include <utility>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/result.h"
-#include "common/libs/utils/subprocess.h"
-#include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/known_paths.h"
-
-namespace cuttlefish {
-
-static bool StopModemSimulator(int id) {
-  std::string socket_name = "modem_simulator" + std::to_string(id);
-  auto monitor_sock =
-      SharedFD::SocketLocalClient(socket_name, true, SOCK_STREAM);
-  if (!monitor_sock->IsOpen()) {
-    LOG(ERROR) << "The connection to modem simulator is closed";
-    return false;
-  }
-  std::string msg("STOP");
-  if (monitor_sock->Write(msg.data(), msg.size()) < 0) {
-    monitor_sock->Close();
-    LOG(ERROR) << "Failed to send 'STOP' to modem simulator";
-    return false;
-  }
-  char buf[64] = {0};
-  if (monitor_sock->Read(buf, sizeof(buf)) <= 0) {
-    monitor_sock->Close();
-    LOG(ERROR) << "Failed to read message from modem simulator";
-    return false;
-  }
-  if (strcmp(buf, "OK")) {
-    monitor_sock->Close();
-    LOG(ERROR) << "Read '" << buf << "' instead of 'OK' from modem simulator";
-    return false;
-  }
-
-  return true;
-}
-
-class ModemSimulator : public CommandSource {
- public:
-  INJECT(ModemSimulator(const CuttlefishConfig& config,
-                        const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    Command cmd(ModemSimulatorBinary(), [this](Subprocess* proc) {
-      auto stopped = StopModemSimulator(instance_.modem_simulator_host_id());
-      if (stopped) {
-        return StopperResult::kStopSuccess;
-      }
-      LOG(WARNING) << "Failed to stop modem simulator nicely, "
-                   << "attempting to KILL";
-      return KillSubprocess(proc) == StopperResult::kStopSuccess
-                 ? StopperResult::kStopCrash
-                 : StopperResult::kStopFailure;
-    });
-
-    auto sim_type = config_.modem_simulator_sim_type();
-    cmd.AddParameter(std::string{"-sim_type="} + std::to_string(sim_type));
-    cmd.AddParameter("-server_fds=");
-    bool first_socket = true;
-    for (const auto& socket : sockets_) {
-      if (!first_socket) {
-        cmd.AppendToLastParameter(",");
-      }
-      cmd.AppendToLastParameter(socket);
-      first_socket = false;
-    }
-
-    std::vector<Command> commands;
-    commands.emplace_back(std::move(cmd));
-    return commands;
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "ModemSimulator"; }
-  bool Enabled() const override {
-    if (!config_.enable_modem_simulator()) {
-      LOG(DEBUG) << "Modem simulator not enabled";
-    }
-    return config_.enable_modem_simulator();
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  Result<void> ResultSetup() override {
-    int instance_number = config_.modem_simulator_instance_number();
-    CF_EXPECT(instance_number >= 0 && instance_number < 4,
-              "Modem simulator instance number should range between 0 and 3");
-    auto ports = instance_.modem_simulator_ports();
-    for (int i = 0; i < instance_number; ++i) {
-      auto pos = ports.find(',');
-      auto temp = (pos != std::string::npos) ? ports.substr(0, pos) : ports;
-      auto port = std::stoi(temp);
-      ports = ports.substr(pos + 1);
-
-      auto modem_sim_socket = SharedFD::VsockServer(port, SOCK_STREAM);
-      CF_EXPECT(modem_sim_socket->IsOpen(), modem_sim_socket->StrError());
-      sockets_.emplace_back(std::move(modem_sim_socket));
-    }
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  std::vector<SharedFD> sockets_;
-};
-
-fruit::Component<fruit::Required<const CuttlefishConfig,
-                                 const CuttlefishConfig::InstanceSpecific>>
-launchModemComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CommandSource, ModemSimulator>()
-      .addMultibinding<SetupFeature, ModemSimulator>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/launch_streamer.cpp b/host/commands/run_cvd/launch_streamer.cpp
deleted file mode 100644
index ef9b0ca..0000000
--- a/host/commands/run_cvd/launch_streamer.cpp
+++ /dev/null
@@ -1,305 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "host/commands/run_cvd/launch.h"
-
-#include <android-base/logging.h>
-#include <sstream>
-#include <string>
-#include <utility>
-
-#include "common/libs/fs/shared_buf.h"
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/files.h"
-#include "common/libs/utils/result.h"
-#include "host/commands/run_cvd/reporting.h"
-#include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/known_paths.h"
-#include "host/libs/vm_manager/crosvm_manager.h"
-#include "host/libs/vm_manager/qemu_manager.h"
-
-namespace cuttlefish {
-
-namespace {
-
-SharedFD CreateUnixInputServer(const std::string& path) {
-  auto server =
-      SharedFD::SocketLocalServer(path.c_str(), false, SOCK_STREAM, 0666);
-  if (!server->IsOpen()) {
-    LOG(ERROR) << "Unable to create unix input server: " << server->StrError();
-    return {};
-  }
-  return server;
-}
-
-std::vector<Command> LaunchCustomActionServers(
-    Command& webrtc_cmd,
-    const std::vector<CustomActionConfig>& custom_actions) {
-  bool first = true;
-  std::vector<Command> commands;
-  for (const auto& custom_action : custom_actions) {
-    if (custom_action.server) {
-      // Create a socket pair that will be used for communication between
-      // WebRTC and the action server.
-      SharedFD webrtc_socket, action_server_socket;
-      if (!SharedFD::SocketPair(AF_LOCAL, SOCK_STREAM, 0, &webrtc_socket,
-                                &action_server_socket)) {
-        LOG(ERROR) << "Unable to create custom action server socket pair: "
-                   << strerror(errno);
-        continue;
-      }
-
-      // Launch the action server, providing its socket pair fd as the only
-      // argument.
-      auto binary = HostBinaryPath(*(custom_action.server));
-      Command command(binary);
-      command.AddParameter(action_server_socket);
-      commands.emplace_back(std::move(command));
-
-      // Pass the WebRTC socket pair fd to WebRTC.
-      if (first) {
-        first = false;
-        webrtc_cmd.AddParameter("-action_servers=", *custom_action.server, ":",
-                                webrtc_socket);
-      } else {
-        webrtc_cmd.AppendToLastParameter(",", *custom_action.server, ":",
-                                         webrtc_socket);
-      }
-    }
-  }
-  return commands;
-}
-
-// Creates the frame and input sockets and add the relevant arguments to
-// webrtc commands
-class StreamerSockets : public virtual SetupFeature {
- public:
-  INJECT(StreamerSockets(const CuttlefishConfig& config,
-                         const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
-
-  void AppendCommandArguments(Command& cmd) {
-    if (config_.vm_manager() == vm_manager::QemuManager::name()) {
-      cmd.AddParameter("-write_virtio_input");
-    }
-    if (!touch_servers_.empty()) {
-      cmd.AddParameter("-touch_fds=", touch_servers_[0]);
-      for (int i = 1; i < touch_servers_.size(); ++i) {
-        cmd.AppendToLastParameter(",", touch_servers_[i]);
-      }
-    }
-    cmd.AddParameter("-keyboard_fd=", keyboard_server_);
-    cmd.AddParameter("-frame_server_fd=", frames_server_);
-    if (config_.enable_audio()) {
-      cmd.AddParameter("--audio_server_fd=", audio_server_);
-    }
-  }
-
-  // SetupFeature
-  std::string Name() const override { return "StreamerSockets"; }
-  bool Enabled() const override {
-    bool is_qemu = config_.vm_manager() == vm_manager::QemuManager::name();
-    bool is_accelerated = config_.gpu_mode() != kGpuModeGuestSwiftshader;
-    return !(is_qemu && is_accelerated);
-  }
-
- private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-
-  Result<void> ResultSetup() override {
-    auto use_vsockets = config_.vm_manager() == vm_manager::QemuManager::name();
-    for (int i = 0; i < config_.display_configs().size(); ++i) {
-      SharedFD touch_socket =
-          use_vsockets ? SharedFD::VsockServer(instance_.touch_server_port(),
-                                               SOCK_STREAM)
-                       : CreateUnixInputServer(instance_.touch_socket_path(i));
-      CF_EXPECT(touch_socket->IsOpen(), touch_socket->StrError());
-      touch_servers_.emplace_back(std::move(touch_socket));
-    }
-    keyboard_server_ =
-        use_vsockets ? SharedFD::VsockServer(instance_.keyboard_server_port(),
-                                             SOCK_STREAM)
-                     : CreateUnixInputServer(instance_.keyboard_socket_path());
-    CF_EXPECT(keyboard_server_->IsOpen(), keyboard_server_->StrError());
-
-    frames_server_ = CreateUnixInputServer(instance_.frames_socket_path());
-    CF_EXPECT(frames_server_->IsOpen(), frames_server_->StrError());
-    // TODO(schuffelen): Make this a separate optional feature?
-    if (config_.enable_audio()) {
-      auto path = config_.ForDefaultInstance().audio_server_path();
-      audio_server_ =
-          SharedFD::SocketLocalServer(path, false, SOCK_SEQPACKET, 0666);
-      CF_EXPECT(audio_server_->IsOpen(), audio_server_->StrError());
-    }
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  std::vector<SharedFD> touch_servers_;
-  SharedFD keyboard_server_;
-  SharedFD frames_server_;
-  SharedFD audio_server_;
-};
-
-class WebRtcServer : public virtual CommandSource,
-                     public DiagnosticInformation {
- public:
-  INJECT(WebRtcServer(const CuttlefishConfig& config,
-                      const CuttlefishConfig::InstanceSpecific& instance,
-                      StreamerSockets& sockets,
-                      KernelLogPipeProvider& log_pipe_provider,
-                      const CustomActionConfigProvider& custom_action_config))
-      : config_(config),
-        instance_(instance),
-        sockets_(sockets),
-        log_pipe_provider_(log_pipe_provider),
-        custom_action_config_(custom_action_config) {}
-  // DiagnosticInformation
-  std::vector<std::string> Diagnostics() const override {
-    if (!Enabled() || !config_.ForDefaultInstance().start_webrtc_sig_server()) {
-      // When WebRTC is enabled but an operator other than the one launched by
-      // run_cvd is used there is no way to know the url to which to point the
-      // browser to.
-      return {};
-    }
-    std::ostringstream out;
-    out << "Point your browser to https://" << config_.sig_server_address()
-        << ":" << config_.sig_server_port() << " to interact with the device.";
-    return {out.str()};
-  }
-
-  // CommandSource
-  std::vector<Command> Commands() override {
-    std::vector<Command> commands;
-    if (instance_.start_webrtc_sig_server()) {
-      Command sig_server(WebRtcSigServerBinary());
-      sig_server.AddParameter("-assets_dir=", config_.webrtc_assets_dir());
-      sig_server.AddParameter(
-          "-use_secure_http=",
-          config_.sig_server_secure() ? "true" : "false");
-      if (!config_.webrtc_certs_dir().empty()) {
-        sig_server.AddParameter("-certs_dir=", config_.webrtc_certs_dir());
-      }
-      sig_server.AddParameter("-http_server_port=", config_.sig_server_port());
-      commands.emplace_back(std::move(sig_server));
-    }
-
-    if (instance_.start_webrtc_sig_server_proxy()) {
-      Command sig_proxy(WebRtcSigServerProxyBinary());
-      sig_proxy.AddParameter("-server_port=", config_.sig_server_port());
-      commands.emplace_back(std::move(sig_proxy));
-    }
-
-    auto stopper = [host_socket = std::move(host_socket_)](Subprocess* proc) {
-      struct timeval timeout;
-      timeout.tv_sec = 3;
-      timeout.tv_usec = 0;
-      CHECK(host_socket->SetSockOpt(SOL_SOCKET, SO_RCVTIMEO, &timeout,
-                                    sizeof(timeout)) == 0)
-          << "Could not set receive timeout";
-
-      WriteAll(host_socket, "C");
-      char response[1];
-      int read_ret = host_socket->Read(response, sizeof(response));
-      if (read_ret != 0) {
-        LOG(ERROR) << "Failed to read response from webrtc";
-        return KillSubprocess(proc);
-      }
-      return KillSubprocess(proc) == StopperResult::kStopSuccess
-                 ? StopperResult::kStopCrash
-                 : StopperResult::kStopFailure;
-    };
-
-    Command webrtc(WebRtcBinary(), stopper);
-    webrtc.UnsetFromEnvironment("http_proxy");
-    sockets_.AppendCommandArguments(webrtc);
-    if (config_.vm_manager() == vm_manager::CrosvmManager::name()) {
-      webrtc.AddParameter("-switches_fd=", switches_server_);
-    }
-    // Currently there is no way to ensure the signaling server will already
-    // have bound the socket to the port by the time the webrtc process runs
-    // (the common technique of doing it from the launcher is not possible here
-    // as the server library being used creates its own sockets). However, this
-    // issue is mitigated slightly by doing some retrying and backoff in the
-    // webrtc process when connecting to the websocket, so it shouldn't be an
-    // issue most of the time.
-    webrtc.AddParameter("--command_fd=", client_socket_);
-    webrtc.AddParameter("-kernel_log_events_fd=", kernel_log_events_pipe_);
-    webrtc.AddParameter("-client_dir=",
-                        DefaultHostArtifactsPath("usr/share/webrtc/assets"));
-
-    // TODO get from launcher params
-    const auto& actions = custom_action_config_.CustomActions();
-    for (auto& action : LaunchCustomActionServers(webrtc, actions)) {
-      commands.emplace_back(std::move(action));
-    }
-    commands.emplace_back(std::move(webrtc));
-
-    return commands;
-  }
-
-  // SetupFeature
-  bool Enabled() const override {
-    return sockets_.Enabled() && config_.enable_webrtc();
-  }
-
- private:
-  std::string Name() const override { return "WebRtcServer"; }
-  std::unordered_set<SetupFeature*> Dependencies() const override {
-    return {static_cast<SetupFeature*>(&sockets_),
-            static_cast<SetupFeature*>(&log_pipe_provider_)};
-  }
-
-  Result<void> ResultSetup() override {
-    CF_EXPECT(SharedFD::SocketPair(AF_LOCAL, SOCK_STREAM, 0, &client_socket_,
-                                   &host_socket_),
-              client_socket_->StrError());
-    if (config_.vm_manager() == vm_manager::CrosvmManager::name()) {
-      switches_server_ =
-          CreateUnixInputServer(instance_.switches_socket_path());
-      CF_EXPECT(switches_server_->IsOpen(), switches_server_->StrError());
-    }
-    kernel_log_events_pipe_ = log_pipe_provider_.KernelLogPipe();
-    CF_EXPECT(kernel_log_events_pipe_->IsOpen(),
-              kernel_log_events_pipe_->StrError());
-    return {};
-  }
-
-  const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
-  StreamerSockets& sockets_;
-  KernelLogPipeProvider& log_pipe_provider_;
-  const CustomActionConfigProvider& custom_action_config_;
-  SharedFD kernel_log_events_pipe_;
-  SharedFD client_socket_;
-  SharedFD host_socket_;
-  SharedFD switches_server_;
-};
-
-}  // namespace
-
-fruit::Component<fruit::Required<const CuttlefishConfig, KernelLogPipeProvider,
-                                 const CuttlefishConfig::InstanceSpecific,
-                                 const CustomActionConfigProvider>>
-launchStreamerComponent() {
-  return fruit::createComponent()
-      .addMultibinding<CommandSource, WebRtcServer>()
-      .addMultibinding<DiagnosticInformation, WebRtcServer>()
-      .addMultibinding<SetupFeature, StreamerSockets>()
-      .addMultibinding<SetupFeature, WebRtcServer>();
-}
-
-}  // namespace cuttlefish
diff --git a/host/commands/run_cvd/main.cc b/host/commands/run_cvd/main.cc
index 9332ccc..d645e40 100644
--- a/host/commands/run_cvd/main.cc
+++ b/host/commands/run_cvd/main.cc
@@ -34,30 +34,31 @@
 #include "common/libs/utils/subprocess.h"
 #include "common/libs/utils/tee_logging.h"
 #include "host/commands/run_cvd/boot_state_machine.h"
-#include "host/commands/run_cvd/launch.h"
+#include "host/commands/run_cvd/launch/launch.h"
 #include "host/commands/run_cvd/process_monitor.h"
 #include "host/commands/run_cvd/reporting.h"
 #include "host/commands/run_cvd/runner_defs.h"
 #include "host/commands/run_cvd/server_loop.h"
 #include "host/commands/run_cvd/validate.h"
 #include "host/libs/config/adb/adb.h"
+#include "host/libs/config/fastboot/fastboot.h"
 #include "host/libs/config/config_flag.h"
 #include "host/libs/config/config_fragment.h"
 #include "host/libs/config/custom_actions.h"
 #include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/inject.h"
+#include "host/libs/metrics/metrics_receiver.h"
 #include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 
 namespace {
 
-class CuttlefishEnvironment : public SetupFeature,
-                              public DiagnosticInformation {
+class CuttlefishEnvironment : public DiagnosticInformation {
  public:
   INJECT(
-      CuttlefishEnvironment(const CuttlefishConfig& config,
-                            const CuttlefishConfig::InstanceSpecific& instance))
-      : config_(config), instance_(instance) {}
+      CuttlefishEnvironment(const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // DiagnosticInformation
   std::vector<std::string> Diagnostics() const override {
@@ -65,56 +66,84 @@
     return {
         "Launcher log: " + instance_.launcher_log_path(),
         "Instance configuration: " + config_path,
-        "Instance environment: " + config_.cuttlefish_env_path(),
     };
   }
 
-  // SetupFeature
-  std::string Name() const override { return "CuttlefishEnvironment"; }
-  bool Enabled() const override { return true; }
-
  private:
-  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Setup() override {
-    auto env =
-        SharedFD::Open(config_.cuttlefish_env_path(), O_CREAT | O_RDWR, 0755);
-    if (!env->IsOpen()) {
-      LOG(ERROR) << "Unable to create cuttlefish.env file";
-      return false;
+  const CuttlefishConfig::InstanceSpecific& instance_;
+};
+
+class InstanceLifecycle : public LateInjected {
+ public:
+  INJECT(InstanceLifecycle(const CuttlefishConfig& config,
+                           ServerLoop& server_loop))
+      : config_(config), server_loop_(server_loop) {}
+
+  Result<void> LateInject(fruit::Injector<>& injector) override {
+    config_fragments_ = injector.getMultibindings<ConfigFragment>();
+    setup_features_ = injector.getMultibindings<SetupFeature>();
+    diagnostics_ = injector.getMultibindings<DiagnosticInformation>();
+    return {};
+  }
+
+  Result<void> Run() {
+    for (auto& fragment : config_fragments_) {
+      CF_EXPECT(config_.LoadFragment(*fragment));
     }
-    std::string config_env = "export CUTTLEFISH_PER_INSTANCE_PATH=\"" +
-                             instance_.PerInstancePath(".") + "\"\n";
-    config_env += "export ANDROID_SERIAL=" + instance_.adb_ip_and_port() + "\n";
-    auto written = WriteAll(env, config_env);
-    if (written != config_env.size()) {
-      LOG(ERROR) << "Failed to write all of \"" << config_env << "\", "
-                 << "only wrote " << written << " bytes. Error was "
-                 << env->StrError();
-      return false;
-    }
-    return true;
+
+    // One of the setup features can consume most output, so print this early.
+    DiagnosticInformation::PrintAll(diagnostics_);
+
+    CF_EXPECT(SetupFeature::RunSetup(setup_features_));
+
+    CF_EXPECT(server_loop_.Run());
+
+    return {};
   }
 
  private:
   const CuttlefishConfig& config_;
-  const CuttlefishConfig::InstanceSpecific& instance_;
+  ServerLoop& server_loop_;
+  std::vector<ConfigFragment*> config_fragments_;
+  std::vector<SetupFeature*> setup_features_;
+  std::vector<DiagnosticInformation*> diagnostics_;
 };
 
-fruit::Component<ServerLoop> runCvdComponent(
+fruit::Component<> runCvdComponent(
     const CuttlefishConfig* config,
     const CuttlefishConfig::InstanceSpecific* instance) {
   return fruit::createComponent()
       .addMultibinding<DiagnosticInformation, CuttlefishEnvironment>()
-      .addMultibinding<SetupFeature, CuttlefishEnvironment>()
+      .addMultibinding<InstanceLifecycle, InstanceLifecycle>()
+      .addMultibinding<LateInjected, InstanceLifecycle>()
       .bindInstance(*config)
       .bindInstance(*instance)
       .install(AdbConfigComponent)
       .install(AdbConfigFragmentComponent)
+      .install(FastbootConfigComponent)
+      .install(FastbootConfigFragmentComponent)
       .install(bootStateMachineComponent)
       .install(ConfigFlagPlaceholder)
       .install(CustomActionsComponent)
       .install(LaunchAdbComponent)
-      .install(launchComponent)
+      .install(LaunchFastbootComponent)
+      .install(BluetoothConnectorComponent)
+      .install(UwbConnectorComponent)
+      .install(ConfigServerComponent)
+      .install(ConsoleForwarderComponent)
+      .install(EchoServerComponent)
+      .install(GnssGrpcProxyServerComponent)
+      .install(LogcatReceiverComponent)
+      .install(KernelLogMonitorComponent)
+      .install(MetricsServiceComponent)
+      .install(OpenWrtComponent)
+      .install(OpenwrtControlServerComponent)
+      .install(PicaComponent)
+      .install(RootCanalComponent)
+      .install(NetsimServerComponent)
+      .install(SecureEnvComponent)
+      .install(TombstoneReceiverComponent)
+      .install(WmediumdServerComponent)
       .install(launchModemComponent)
       .install(launchStreamerComponent)
       .install(serverLoopComponent)
@@ -154,12 +183,14 @@
                    const CuttlefishConfig::InstanceSpecific& instance) {
   auto log_path = instance.launcher_log_path();
 
-  std::ofstream launcher_log_ofstream(log_path.c_str());
-  auto assembly_path = config.AssemblyPath("assemble_cvd.log");
-  std::ifstream assembly_log_ifstream(assembly_path);
-  if (assembly_log_ifstream) {
-    auto assemble_log = ReadFile(assembly_path);
-    launcher_log_ofstream << assemble_log;
+  if (!FileHasContent(log_path)) {
+    std::ofstream launcher_log_ofstream(log_path.c_str());
+    auto assembly_path = config.AssemblyPath("assemble_cvd.log");
+    std::ifstream assembly_log_ifstream(assembly_path);
+    if (assembly_log_ifstream) {
+      auto assemble_log = ReadFile(assembly_path);
+      launcher_log_ofstream << assemble_log;
+    }
   }
   std::string prefix;
   if (config.Instances().size() > 1) {
@@ -194,35 +225,17 @@
   ConfigureLogs(*config, instance);
   CF_EXPECT(ChdirIntoRuntimeDir(instance));
 
-  fruit::Injector<ServerLoop> injector(runCvdComponent, config, &instance);
+  fruit::Injector<> injector(runCvdComponent, config, &instance);
 
-  for (auto& fragment : injector.getMultibindings<ConfigFragment>()) {
-    CF_EXPECT(config->LoadFragment(*fragment));
+  for (auto& late_injected : injector.getMultibindings<LateInjected>()) {
+    CF_EXPECT(late_injected->LateInject(injector));
   }
 
-  // One of the setup features can consume most output, so print this early.
-  DiagnosticInformation::PrintAll(
-      injector.getMultibindings<DiagnosticInformation>());
+  MetricsReceiver::LogMetricsVMStart();
 
-  const auto& features = injector.getMultibindings<SetupFeature>();
-  CF_EXPECT(SetupFeature::RunSetup(features));
-
-  // Monitor and restart host processes supporting the CVD
-  ProcessMonitor::Properties process_monitor_properties;
-  process_monitor_properties.RestartSubprocesses(
-      config->restart_subprocesses());
-
-  for (auto& command_source : injector.getMultibindings<CommandSource>()) {
-    if (command_source->Enabled()) {
-      process_monitor_properties.AddCommands(command_source->Commands());
-    }
-  }
-
-  ProcessMonitor process_monitor(std::move(process_monitor_properties));
-
-  CF_EXPECT(process_monitor.StartAndMonitorProcesses());
-
-  injector.get<ServerLoop&>().Run(process_monitor);  // Should not return
+  auto instance_bindings = injector.getMultibindings<InstanceLifecycle>();
+  CF_EXPECT(instance_bindings.size() == 1);
+  CF_EXPECT(instance_bindings[0]->Run());  // Should not return
 
   return CF_ERR("The server loop returned, it should never happen!!");
 }
@@ -231,6 +244,10 @@
 
 int main(int argc, char** argv) {
   auto result = cuttlefish::RunCvdMain(argc, argv);
-  CHECK(result.ok()) << result.error();
-  return 0;
+  if (result.ok()) {
+    return 0;
+  }
+  LOG(ERROR) << result.error().Message();
+  LOG(DEBUG) << result.error().Trace();
+  abort();
 }
diff --git a/host/commands/run_cvd/process_monitor.cc b/host/commands/run_cvd/process_monitor.cc
index ae720a8..3e250c6 100644
--- a/host/commands/run_cvd/process_monitor.cc
+++ b/host/commands/run_cvd/process_monitor.cc
@@ -26,20 +26,154 @@
 #include <stdio.h>
 
 #include <algorithm>
+#include <atomic>
 #include <future>
+#include <memory>
 #include <thread>
 
 #include <android-base/logging.h>
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_select.h"
+#include "common/libs/utils/result.h"
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
 
 namespace cuttlefish {
 
+namespace {
+
 struct ParentToChildMessage {
   bool stop;
 };
 
+void LogSubprocessExit(const std::string& name, pid_t pid, int wstatus) {
+  LOG(INFO) << "Detected unexpected exit of monitored subprocess " << name;
+  if (WIFEXITED(wstatus)) {
+    LOG(INFO) << "Subprocess " << name << " (" << pid
+              << ") has exited with exit code " << WEXITSTATUS(wstatus);
+  } else if (WIFSIGNALED(wstatus)) {
+    LOG(ERROR) << "Subprocess " << name << " (" << pid
+               << ") was interrupted by a signal: " << WTERMSIG(wstatus);
+  } else {
+    LOG(INFO) << "subprocess " << name << " (" << pid
+              << ") has exited for unknown reasons";
+  }
+}
+
+void LogSubprocessExit(const std::string& name, const siginfo_t& infop) {
+  LOG(INFO) << "Detected unexpected exit of monitored subprocess " << name;
+  if (infop.si_code == CLD_EXITED) {
+    LOG(INFO) << "Subprocess " << name << " (" << infop.si_pid
+              << ") has exited with exit code " << infop.si_status;
+  } else if (infop.si_code == CLD_KILLED) {
+    LOG(ERROR) << "Subprocess " << name << " (" << infop.si_pid
+               << ") was interrupted by a signal: " << infop.si_status;
+  } else {
+    LOG(INFO) << "subprocess " << name << " (" << infop.si_pid
+              << ") has exited for unknown reasons (code = " << infop.si_code
+              << ", status = " << infop.si_status << ")";
+  }
+}
+
+Result<void> StartSubprocesses(std::vector<MonitorEntry>& entries) {
+  LOG(DEBUG) << "Starting monitored subprocesses";
+  for (auto& monitored : entries) {
+    LOG(INFO) << monitored.cmd->GetShortName();
+    auto options = SubprocessOptions().InGroup(true);
+    monitored.proc.reset(new Subprocess(monitored.cmd->Start(options)));
+    CF_EXPECT(monitored.proc->Started(), "Failed to start subprocess");
+  }
+  return {};
+}
+
+Result<void> ReadMonitorSocketLoopForStop(std::atomic_bool& running,
+                                          SharedFD& monitor_socket) {
+  LOG(DEBUG) << "Waiting for a `stop` message from the parent";
+  while (running.load()) {
+    ParentToChildMessage message;
+    CF_EXPECT(ReadExactBinary(monitor_socket, &message) == sizeof(message),
+              "Could not read message from parent");
+    if (message.stop) {
+      running.store(false);
+      // Wake up the wait() loop by giving it an exited child process
+      if (fork() == 0) {
+        std::exit(0);
+      }
+    }
+  }
+  return {};
+}
+
+Result<void> MonitorLoop(const std::atomic_bool& running,
+                         const bool restart_subprocesses,
+                         std::vector<MonitorEntry>& monitored) {
+  while (running.load()) {
+    int wstatus;
+    pid_t pid = wait(&wstatus);
+    int error_num = errno;
+    CF_EXPECT(pid != -1, "Wait failed: " << strerror(error_num));
+    if (!WIFSIGNALED(wstatus) && !WIFEXITED(wstatus)) {
+      LOG(DEBUG) << "Unexpected status from wait: " << wstatus
+                  << " for pid " << pid;
+      continue;
+    }
+    if (!running.load()) {  // Avoid extra restarts near the end
+      break;
+    }
+    auto matches = [pid](const auto& it) { return it.proc->pid() == pid; };
+    auto it = std::find_if(monitored.begin(), monitored.end(), matches);
+    if (it == monitored.end()) {
+      LogSubprocessExit("(unknown)", pid, wstatus);
+    } else {
+      LogSubprocessExit(it->cmd->GetShortName(), it->proc->pid(), wstatus);
+      if (restart_subprocesses) {
+        auto options = SubprocessOptions().InGroup(true);
+        it->proc.reset(new Subprocess(it->cmd->Start(options)));
+      } else {
+        bool is_critical = it->is_critical;
+        monitored.erase(it);
+        if (running.load() && is_critical) {
+          LOG(ERROR) << "Stopping all monitored processes due to unexpected "
+                        "exit of critical process";
+          Command stop_cmd(StopCvdBinary());
+          stop_cmd.Start();
+        }
+      }
+    }
+  }
+  return {};
+}
+
+Result<void> StopSubprocesses(std::vector<MonitorEntry>& monitored) {
+  LOG(DEBUG) << "Stopping monitored subprocesses";
+  auto stop = [](const auto& it) {
+    auto stop_result = it.proc->Stop();
+    if (stop_result == StopperResult::kStopFailure) {
+      LOG(WARNING) << "Error in stopping \"" << it.cmd->GetShortName() << "\"";
+      return false;
+    }
+    siginfo_t infop;
+    auto success = it.proc->Wait(&infop, WEXITED);
+    if (success < 0) {
+      LOG(WARNING) << "Failed to wait for process " << it.cmd->GetShortName();
+      return false;
+    }
+    if (stop_result == StopperResult::kStopCrash) {
+      LogSubprocessExit(it.cmd->GetShortName(), infop);
+    }
+    return true;
+  };
+  // Processes were started in the order they appear in the vector, stop them in
+  // reverse order for symmetry.
+  size_t stopped = std::count_if(monitored.rbegin(), monitored.rend(), stop);
+  CF_EXPECT(stopped == monitored.size(), "Didn't stop all subprocesses");
+  return {};
+}
+
+}  // namespace
+
 ProcessMonitor::Properties& ProcessMonitor::Properties::RestartSubprocesses(
     bool r) & {
   restart_subprocesses_ = r;
@@ -48,22 +182,18 @@
 
 ProcessMonitor::Properties ProcessMonitor::Properties::RestartSubprocesses(
     bool r) && {
-  restart_subprocesses_ = r;
-  return std::move(*this);
+  return std::move(RestartSubprocesses(r));
 }
 
 ProcessMonitor::Properties& ProcessMonitor::Properties::AddCommand(
-    Command cmd) & {
-  auto& entry = entries_.emplace_back();
-  entry.cmd.reset(new Command(std::move(cmd)));
+    MonitorCommand cmd) & {
+  entries_.emplace_back(std::move(cmd.command), cmd.is_critical);
   return *this;
 }
 
 ProcessMonitor::Properties ProcessMonitor::Properties::AddCommand(
-    Command cmd) && {
-  auto& entry = entries_.emplace_back();
-  entry.cmd.reset(new Command(std::move(cmd)));
-  return std::move(*this);
+    MonitorCommand cmd) && {
+  return std::move(AddCommand(std::move(cmd)));
 }
 
 ProcessMonitor::ProcessMonitor(ProcessMonitor::Properties&& properties)
@@ -102,11 +232,14 @@
   if (monitor_ == 0) {
     monitor_socket_ = client_pipe;
     host_pipe->Close();
-    auto monitor = MonitorRoutine();
-    if (!monitor.ok()) {
-      LOG(ERROR) << "Monitoring processes failed:\n" << monitor.error();
+    auto monitor_result = MonitorRoutine();
+    if (!monitor_result.ok()) {
+      LOG(ERROR) << "Monitoring processes failed:\n"
+                 << monitor_result.error().Message();
+      LOG(DEBUG) << "Monitoring processes failed:\n"
+                 << monitor_result.error().Trace();
     }
-    std::exit(monitor.ok() ? 0 : 1);
+    std::exit(monitor_result.ok() ? 0 : 1);
   } else {
     client_pipe->Close();
     monitor_socket_ = host_pipe;
@@ -114,122 +247,25 @@
   }
 }
 
-static void LogSubprocessExit(const std::string& name, pid_t pid, int wstatus) {
-  LOG(INFO) << "Detected unexpected exit of monitored subprocess " << name;
-  if (WIFEXITED(wstatus)) {
-    LOG(INFO) << "Subprocess " << name << " (" << pid
-              << ") has exited with exit code " << WEXITSTATUS(wstatus);
-  } else if (WIFSIGNALED(wstatus)) {
-    LOG(ERROR) << "Subprocess " << name << " (" << pid
-               << ") was interrupted by a signal: " << WTERMSIG(wstatus);
-  } else {
-    LOG(INFO) << "subprocess " << name << " (" << pid
-              << ") has exited for unknown reasons";
-  }
-}
-
-static void LogSubprocessExit(const std::string& name, const siginfo_t& infop) {
-  LOG(INFO) << "Detected unexpected exit of monitored subprocess " << name;
-  if (infop.si_code == CLD_EXITED) {
-    LOG(INFO) << "Subprocess " << name << " (" << infop.si_pid
-              << ") has exited with exit code " << infop.si_status;
-  } else if (infop.si_code == CLD_KILLED) {
-    LOG(ERROR) << "Subprocess " << name << " (" << infop.si_pid
-               << ") was interrupted by a signal: " << infop.si_status;
-  } else {
-    LOG(INFO) << "subprocess " << name << " (" << infop.si_pid
-              << ") has exited for unknown reasons (code = " << infop.si_code
-              << ", status = " << infop.si_status << ")";
-  }
-}
-
 Result<void> ProcessMonitor::MonitorRoutine() {
   // Make this process a subreaper to reliably catch subprocess exits.
   // See https://man7.org/linux/man-pages/man2/prctl.2.html
   prctl(PR_SET_CHILD_SUBREAPER, 1);
-  prctl(PR_SET_PDEATHSIG, SIGHUP); // Die when parent dies
-
-  LOG(DEBUG) << "Starting monitoring subprocesses";
-  for (auto& monitored : properties_.entries_) {
-    LOG(INFO) << monitored.cmd->GetShortName();
-    auto options = SubprocessOptions().InGroup(true);
-    monitored.proc.reset(new Subprocess(monitored.cmd->Start(options)));
-    CF_EXPECT(monitored.proc->Started(), "Failed to start process");
-  }
-
-  bool running = true;
-  auto policy = std::launch::async;
-  auto parent_comms = std::async(policy, [&running, this]() -> Result<void> {
-    LOG(DEBUG) << "Waiting for a `stop` message from the parent.";
-    while (running) {
-      ParentToChildMessage message;
-      CF_EXPECT(ReadExactBinary(monitor_socket_, &message) == sizeof(message),
-                "Could not read message from parent.");
-      if (message.stop) {
-        running = false;
-        // Wake up the wait() loop by giving it an exited child process
-        if (fork() == 0) {
-          std::exit(0);
-        }
-      }
-    }
-    return {};
-  });
-
-  auto& monitored = properties_.entries_;
+  prctl(PR_SET_PDEATHSIG, SIGHUP);  // Die when parent dies
 
   LOG(DEBUG) << "Monitoring subprocesses";
-  while(running) {
-    int wstatus;
-    pid_t pid = wait(&wstatus);
-    int error_num = errno;
-    CF_EXPECT(pid != -1, "Wait failed: " << strerror(error_num));
-    if (!WIFSIGNALED(wstatus) && !WIFEXITED(wstatus)) {
-      LOG(DEBUG) << "Unexpected status from wait: " << wstatus
-                  << " for pid " << pid;
-      continue;
-    }
-    if (!running) { // Avoid extra restarts near the end
-      break;
-    }
-    auto matches = [pid](const auto& it) { return it.proc->pid() == pid; };
-    auto it = std::find_if(monitored.begin(), monitored.end(), matches);
-    if (it == monitored.end()) {
-      LogSubprocessExit("(unknown)", pid, wstatus);
-    } else {
-      LogSubprocessExit(it->cmd->GetShortName(), it->proc->pid(), wstatus);
-      if (properties_.restart_subprocesses_) {
-        auto options = SubprocessOptions().InGroup(true);
-        it->proc.reset(new Subprocess(it->cmd->Start(options)));
-      } else {
-        properties_.entries_.erase(it);
-      }
-    }
-  }
+  StartSubprocesses(properties_.entries_);
 
-  CF_EXPECT(parent_comms.get());  // Should have exited if `running` is false
-  auto stop = [](const auto& it) {
-    auto stop_result = it.proc->Stop();
-    if (stop_result == StopperResult::kStopFailure) {
-      LOG(WARNING) << "Error in stopping \"" << it.cmd->GetShortName() << "\"";
-      return false;
-    }
-    siginfo_t infop;
-    auto success = it.proc->Wait(&infop, WEXITED);
-    if (success < 0) {
-      LOG(WARNING) << "Failed to wait for process " << it.cmd->GetShortName();
-      return false;
-    }
-    if (stop_result == StopperResult::kStopCrash) {
-      LogSubprocessExit(it.cmd->GetShortName(), infop);
-    }
-    return true;
-  };
-  // Processes were started in the order they appear in the vector, stop them in
-  // reverse order for symmetry.
-  size_t stopped = std::count_if(monitored.rbegin(), monitored.rend(), stop);
+  std::atomic_bool running(true);
+  auto parent_comms =
+      std::async(std::launch::async, ReadMonitorSocketLoopForStop,
+                 std::ref(running), std::ref(monitor_socket_));
+
+  MonitorLoop(running, properties_.restart_subprocesses_, properties_.entries_);
+  CF_EXPECT(parent_comms.get(), "Should have exited if monitoring stopped");
+
+  StopSubprocesses(properties_.entries_);
   LOG(DEBUG) << "Done monitoring subprocesses";
-  CF_EXPECT(stopped == monitored.size(), "Didn't stop all subprocesses");
   return {};
 }
 
diff --git a/host/commands/run_cvd/process_monitor.h b/host/commands/run_cvd/process_monitor.h
index c3f3127..7edb77b 100644
--- a/host/commands/run_cvd/process_monitor.h
+++ b/host/commands/run_cvd/process_monitor.h
@@ -18,19 +18,26 @@
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
 #include <vector>
 
 #include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
+#include "host/libs/config/command_source.h"
 
 namespace cuttlefish {
 
 struct MonitorEntry {
   std::unique_ptr<Command> cmd;
   std::unique_ptr<Subprocess> proc;
+  bool is_critical;
+
+  MonitorEntry(Command command, bool is_critical)
+      : cmd(new Command(std::move(command))), is_critical(is_critical) {}
 };
 
-// Keeps track of launched subprocesses, restarts them if they unexpectedly exit
+// Launches and keeps track of subprocesses, decides response if they
+// unexpectedly exit
 class ProcessMonitor {
  public:
   class Properties {
@@ -38,8 +45,8 @@
     Properties& RestartSubprocesses(bool) &;
     Properties RestartSubprocesses(bool) &&;
 
-    Properties& AddCommand(Command) &;
-    Properties AddCommand(Command) &&;
+    Properties& AddCommand(MonitorCommand) &;
+    Properties AddCommand(MonitorCommand) &&;
 
     template <typename T>
     Properties& AddCommands(T commands) & {
@@ -51,10 +58,7 @@
 
     template <typename T>
     Properties AddCommands(T commands) && {
-      for (auto& command : commands) {
-        AddCommand(std::move(command));
-      }
-      return std::move(*this);
+      return std::move(AddCommands(std::move(commands)));
     }
 
    private:
diff --git a/host/commands/run_cvd/server_loop.cpp b/host/commands/run_cvd/server_loop.cpp
index dfe98ab..6347670 100644
--- a/host/commands/run_cvd/server_loop.cpp
+++ b/host/commands/run_cvd/server_loop.cpp
@@ -16,18 +16,24 @@
 
 #include "host/commands/run_cvd/server_loop.h"
 
+#include <unistd.h>
+
+#include <algorithm>
+#include <memory>
+#include <string>
+
 #include <fruit/fruit.h>
 #include <gflags/gflags.h>
-#include <unistd.h>
-#include <string>
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/config/command_source.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/data_image.h"
 #include "host/libs/config/feature.h"
+#include "host/libs/config/inject.h"
 
 namespace cuttlefish {
 
@@ -38,7 +44,8 @@
                        const std::string& output_overlay_path) {
   Command crosvm_qcow2_cmd(crosvm_path);
   crosvm_qcow2_cmd.AddParameter("create_qcow2");
-  crosvm_qcow2_cmd.AddParameter("--backing_file=", backing_file);
+  crosvm_qcow2_cmd.AddParameter("--backing-file");
+  crosvm_qcow2_cmd.AddParameter(backing_file);
   crosvm_qcow2_cmd.AddParameter(output_overlay_path);
   int success = crosvm_qcow2_cmd.Start().Wait();
   if (success != 0) {
@@ -49,14 +56,37 @@
   return true;
 }
 
-class ServerLoopImpl : public ServerLoop, public SetupFeature {
+class ServerLoopImpl : public ServerLoop,
+                       public SetupFeature,
+                       public LateInjected {
  public:
   INJECT(ServerLoopImpl(const CuttlefishConfig& config,
                         const CuttlefishConfig::InstanceSpecific& instance))
       : config_(config), instance_(instance) {}
 
+  Result<void> LateInject(fruit::Injector<>& injector) override {
+    command_sources_ = injector.getMultibindings<CommandSource>();
+    return {};
+  }
+
   // ServerLoop
-  void Run(ProcessMonitor& process_monitor) override {
+  Result<void> Run() override {
+    // Monitor and restart host processes supporting the CVD
+    ProcessMonitor::Properties process_monitor_properties;
+    process_monitor_properties.RestartSubprocesses(
+        instance_.restart_subprocesses());
+
+    for (auto& command_source : command_sources_) {
+      if (command_source->Enabled()) {
+        auto commands = CF_EXPECT(command_source->Commands());
+        process_monitor_properties.AddCommands(std::move(commands));
+      }
+    }
+
+    ProcessMonitor process_monitor(std::move(process_monitor_properties));
+
+    CF_EXPECT(process_monitor.StartAndMonitorProcesses());
+
     while (true) {
       // TODO: use select to handle simultaneous connections.
       auto client = SharedFD::Accept(*server_);
@@ -70,7 +100,10 @@
               client->Write(&response, sizeof(response));
               std::exit(0);
             } else {
-              LOG(ERROR) << "Failed to stop subprocesses:\n" << stop.error();
+              LOG(ERROR) << "Failed to stop subprocesses:\n"
+                         << stop.error().Message();
+              LOG(DEBUG) << "Failed to stop subprocesses:\n"
+                         << stop.error().Trace();
               auto response = LauncherResponse::kError;
               client->Write(&response, sizeof(response));
             }
@@ -84,9 +117,21 @@
           }
           case LauncherAction::kPowerwash: {
             LOG(INFO) << "Received a Powerwash request from the monitor socket";
+            const auto& disks = instance_.virtual_disk_paths();
+            auto overlay = instance_.PerInstancePath("overlay.img");
+            if (std::find(disks.begin(), disks.end(), overlay) == disks.end()) {
+              LOG(ERROR) << "Powerwash unsupported with --use_overlay=false";
+              auto response = LauncherResponse::kError;
+              client->Write(&response, sizeof(response));
+              break;
+            }
+
             auto stop = process_monitor.StopMonitoredProcesses();
             if (!stop.ok()) {
-              LOG(ERROR) << "Stopping processes failed:\n" << stop.error();
+              LOG(ERROR) << "Stopping processes failed:\n"
+                         << stop.error().Message();
+              LOG(DEBUG) << "Stopping processes failed:\n"
+                         << stop.error().Trace();
               auto response = LauncherResponse::kError;
               client->Write(&response, sizeof(response));
               break;
@@ -110,7 +155,10 @@
           case LauncherAction::kRestart: {
             auto stop = process_monitor.StopMonitoredProcesses();
             if (!stop.ok()) {
-              LOG(ERROR) << "Stopping processes failed:\n" << stop.error();
+              LOG(ERROR) << "Stopping processes failed:\n"
+                         << stop.error().Message();
+              LOG(DEBUG) << "Stopping processes failed:\n"
+                         << stop.error().Trace();
               auto response = LauncherResponse::kError;
               client->Write(&response, sizeof(response));
               break;
@@ -167,10 +215,14 @@
         instance_.PerInstanceInternalPath("gatekeeper_fifo_vm.out"),
         instance_.PerInstanceInternalPath("bt_fifo_vm.in"),
         instance_.PerInstanceInternalPath("bt_fifo_vm.out"),
+        instance_.PerInstanceInternalPath("uwb_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("uwb_fifo_vm.out"),
         instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.in"),
         instance_.PerInstanceInternalPath("gnsshvc_fifo_vm.out"),
         instance_.PerInstanceInternalPath("locationhvc_fifo_vm.in"),
         instance_.PerInstanceInternalPath("locationhvc_fifo_vm.out"),
+        instance_.PerInstanceInternalPath("confui_fifo_vm.in"),
+        instance_.PerInstanceInternalPath("confui_fifo_vm.out"),
     };
     for (const auto& pipe : pipes) {
       unlink(pipe.c_str());
@@ -180,6 +232,9 @@
   bool PowerwashFiles() {
     DeleteFifos();
 
+    // TODO(b/269669405): Figure out why this file is not being deleted
+    unlink(instance_.PerInstanceInternalUdsPath("crosvm_control.sock").c_str());
+
     // TODO(schuffelen): Clean up duplication with assemble_cvd
     unlink(instance_.PerInstancePath("NVChip").c_str());
 
@@ -202,15 +257,27 @@
     auto sdcard_mb_size = (sdcard_size + (1 << 20) - 1) / (1 << 20);
     LOG(DEBUG) << "Size in mb is " << sdcard_mb_size;
     CreateBlankImage(sdcard_path, sdcard_mb_size, "sdcard");
-    std::vector<std::string> overlay_files{"overlay.img"};
-    if (instance_.start_ap()) {
-      overlay_files.emplace_back("ap_overlay.img");
+
+    struct OverlayFile {
+      std::string name;
+      std::string composite_disk_path;
+
+      OverlayFile(std::string name, std::string composite_disk_path)
+          : name(std::move(name)), composite_disk_path(std::move(composite_disk_path)) {}
+    };
+    std::vector<OverlayFile> overlay_files{
+      OverlayFile("overlay.img", instance_.os_composite_disk_path())
+    };
+    if (instance_.ap_boot_flow() != CuttlefishConfig::InstanceSpecific::APBootFlow::None) {
+      overlay_files.emplace_back(
+        OverlayFile("ap_overlay.img", instance_.ap_composite_disk_path()));
     }
-    for (auto overlay_file : {"overlay.img", "ap_overlay.img"}) {
-      auto overlay_path = instance_.PerInstancePath(overlay_file);
+    for (const auto& overlay_file : overlay_files) {
+      auto overlay_path = instance_.PerInstancePath(overlay_file.name.c_str());
+      auto composite_disk_path = overlay_file.composite_disk_path.c_str();
+
       unlink(overlay_path.c_str());
-      if (!CreateQcowOverlay(config_.crosvm_binary(),
-                             config_.os_composite_disk_path(), overlay_path)) {
+      if (!CreateQcowOverlay(instance_.crosvm_binary(), composite_disk_path, overlay_path)) {
         LOG(ERROR) << "CreateQcowOverlay failed";
         return false;
       }
@@ -226,7 +293,7 @@
     followup_stdin->UNMANAGED_Dup2(0);
 
     auto argv_vec = gflags::GetArgvs();
-    char** argv = new char*[argv_vec.size() + 2];
+    std::unique_ptr<char*[]> argv(new char*[argv_vec.size() + 2]);
     for (size_t i = 0; i < argv_vec.size(); i++) {
       argv[i] = argv_vec[i].data();
     }
@@ -236,13 +303,14 @@
     argv[argv_vec.size()] = reboot_notification.data();
     argv[argv_vec.size() + 1] = nullptr;
 
-    execv("/proc/self/exe", argv);
+    execv("/proc/self/exe", argv.get());
     // execve should not return, so something went wrong.
     PLOG(ERROR) << "execv returned: ";
   }
 
   const CuttlefishConfig& config_;
   const CuttlefishConfig::InstanceSpecific& instance_;
+  std::vector<CommandSource*> command_sources_;
   SharedFD server_;
 };
 
@@ -256,6 +324,7 @@
 serverLoopComponent() {
   return fruit::createComponent()
       .bind<ServerLoop, ServerLoopImpl>()
+      .addMultibinding<LateInjected, ServerLoopImpl>()
       .addMultibinding<SetupFeature, ServerLoopImpl>();
 }
 
diff --git a/host/commands/run_cvd/server_loop.h b/host/commands/run_cvd/server_loop.h
index 2364cb9..9a116d2 100644
--- a/host/commands/run_cvd/server_loop.h
+++ b/host/commands/run_cvd/server_loop.h
@@ -27,7 +27,7 @@
 class ServerLoop {
  public:
   virtual ~ServerLoop();
-  virtual void Run(ProcessMonitor& process_monitor) = 0;
+  virtual Result<void> Run() = 0;
 };
 
 fruit::Component<fruit::Required<const CuttlefishConfig,
diff --git a/host/commands/secure_env/Android.bp b/host/commands/secure_env/Android.bp
index cd6e5a0..9472874 100644
--- a/host/commands/secure_env/Android.bp
+++ b/host/commands/secure_env/Android.bp
@@ -24,11 +24,7 @@
         "libbase",
         "libcppbor_external",
         "libcppcose_rkp",
-        "libcuttlefish_fs",
-        "libcuttlefish_kernel_log_monitor_utils",
         "libcuttlefish_security",
-        "libcuttlefish_utils",
-        "libfruit",
         "libgatekeeper",
         "libjsoncpp",
         "libkeymaster_portable",
@@ -38,28 +34,41 @@
         "libcrypto",
         "libcutils",
         "libpuresoftkeymasterdevice_host",
-        "ms-tpm-20-ref-lib",
         "tpm2-tss2-esys",
         "tpm2-tss2-mu",
         "tpm2-tss2-rc",
         "tpm2-tss2-tcti",
     ],
     static_libs: [
-        "libcuttlefish_host_config",
-        "libgflags",
+        "libc++_static",
         "libscrypt_static",
+        "ms-tpm-20-ref-lib",
     ],
     cflags: [
         "-fno-rtti", // Required for libkeymaster_portable
     ],
+    target: {
+        not_windows: {
+            static_libs: [
+                "libcuttlefish_host_config",
+            ],
+            shared_libs: [
+                "libcuttlefish_fs",
+                "libcuttlefish_kernel_log_monitor_utils",
+                "libcuttlefish_utils",
+                "libfruit",
+            ],
+        },
+        windows: {
+            cflags: [
+                "-DNOGDI",
+            ],
+        },
+    },
 }
 
-cc_library_host_static {
-    name: "libsecure_env",
-    srcs: [
+common_libsecure_srcs = [
         "composite_serialization.cpp",
-        "confui_sign_server.cpp",
-        "device_tpm.cpp",
         "encrypted_serializable.cpp",
         "fragile_tpm_storage.cpp",
         "gatekeeper_responder.cpp",
@@ -69,11 +78,11 @@
         "json_serializable.cpp",
         "keymaster_responder.cpp",
         "primary_key_builder.cpp",
-        "secure_env.cpp",
         "tpm_attestation_record.cpp",
         "tpm_auth.cpp",
         "tpm_commands.cpp",
         "tpm_encrypt_decrypt.cpp",
+        "tpm_ffi.cpp",
         "tpm_gatekeeper.cpp",
         "tpm_hmac.cpp",
         "tpm_key_blob_maker.cpp",
@@ -83,21 +92,90 @@
         "tpm_remote_provisioning_context.cpp",
         "tpm_resource_manager.cpp",
         "tpm_serialize.cpp",
+]
+
+cc_library_host_static {
+    name: "libsecure_env_linux",
+    srcs: common_libsecure_srcs + [
+        "confui_sign_server.cpp",
+        "device_tpm.cpp",
     ],
     defaults: ["cuttlefish_buildhost_only", "secure_env_defaults"],
 }
 
+// Rust FFI bindings to access TPM-specific functionality in C/C++.
+rust_bindgen_host {
+    name: "libsecure_env_tpm",
+    source_stem: "libsecure_env_tpm",
+    crate_name: "secure_env_tpm",
+    wrapper_src: "tpm_ffi.h",
+    vendor_available: true,
+    static_libs: [
+        "libsecure_env_linux",
+    ],
+    defaults: [
+        "cuttlefish_buildhost_only",
+        "secure_env_defaults",
+    ],
+}
+
 cc_binary_host {
     name: "secure_env",
-    srcs: [
-        "secure_env.cpp",
-    ],
     static_libs: [
-        "libsecure_env",
+        "libgflags_cuttlefish",
     ],
-    defaults: ["cuttlefish_buildhost_only", "secure_env_defaults"],
+    target: {
+        windows: {
+            enabled: true,
+            srcs: [
+                "secure_env_windows_main.cpp",
+            ],
+            shared_libs: [
+                "libsecure_env_win",
+            ],
+        },
+        linux: {
+            srcs: [
+                "secure_env_linux_main.cpp",
+            ],
+            static_libs: [
+                "libsecure_env_linux",
+            ],
+            shared_libs: [
+                "libkmr_cf_ffi",
+            ],
+        },
+    },
+    defaults: [
+        "cuttlefish_buildhost_only",
+        "secure_env_defaults",
+    ],
 }
 
+cc_library {
+    name: "libsecure_env_win",
+    srcs: common_libsecure_srcs + [
+        "secure_env_windows_lib.cpp",
+    ],
+    header_libs: [
+        "cuttlefish_common_headers",
+    ],
+    target: {
+        windows: {
+            enabled: true,
+        },
+        not_windows: {
+            enabled: false,
+        },
+        host: {
+            compile_multilib: "64",
+        },
+    },
+    device_supported: false,
+    host_supported: true,
+    defaults: ["secure_env_defaults"],
+ }
+
 cc_test_host {
     name: "libsecure_env_test",
     srcs: [
@@ -105,7 +183,7 @@
         "encrypted_serializable_test.cpp",
     ],
     static_libs: [
-        "libsecure_env",
+        "libsecure_env_linux",
     ],
     defaults: ["cuttlefish_buildhost_only", "secure_env_defaults"],
     test_options: {
diff --git a/host/commands/secure_env/README.md b/host/commands/secure_env/README.md
new file mode 100644
index 0000000..a697520
--- /dev/null
+++ b/host/commands/secure_env/README.md
@@ -0,0 +1,14 @@
+Host process substituting for a TEE (Trusted Execution Environment). Used to
+run backing implementations of Android HALs that normally delegate to a TEE
+environment, specifically ConfirmationUI, Gatekeeper, and Keymint.
+
+Gatekeeper and Keymint communicate with `secure_env` through virtio-console
+channels connected to FIFO files on the host. The display part of
+ConfirmationUI is runs in the `webRTC` host process, which delegates signing
+operations only to `secure_env`.
+
+Before entering the kernel, the u-boot bootloader writes some information
+about the device image files into the keymint channel to prepare it with
+the authenticated version number of the operating system.
+
+[![linkage](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/secure_env/doc/linkage.svg)
diff --git a/host/commands/secure_env/confui_sign_server.cpp b/host/commands/secure_env/confui_sign_server.cpp
index 618f3e2..f82ee85 100644
--- a/host/commands/secure_env/confui_sign_server.cpp
+++ b/host/commands/secure_env/confui_sign_server.cpp
@@ -20,6 +20,15 @@
 #include "host/commands/secure_env/primary_key_builder.h"
 #include "host/commands/secure_env/tpm_hmac.h"
 #include "host/libs/config/cuttlefish_config.h"
+#include "tpm_keymaster_context.h"
+
+namespace {
+
+// Defined in
+// hardware/interfaces/confirmationui/1.0/IConfirmationResultCallback.hal
+constexpr const char kConfirmationTokenMessageTag[] = "confirmation token";
+
+}  // namespace
 
 namespace cuttlefish {
 ConfUiSignServer::ConfUiSignServer(TpmResourceManager& tpm_resource_manager,
@@ -28,7 +37,7 @@
   auto config = cuttlefish::CuttlefishConfig::Get();
   CHECK(config) << "Config must not be null";
   auto instance = config->ForDefaultInstance();
-  server_socket_path_ = instance.PerInstanceInternalPath("confui_sign.sock");
+  server_socket_path_ = instance.PerInstanceInternalUdsPath("confui_sign.sock");
 }
 
 [[noreturn]] void ConfUiSignServer::MainLoop() {
@@ -53,31 +62,22 @@
     }
     auto request = request_opt.value();
 
-    // get signing key
-    auto signing_key_builder = PrimaryKeyBuilder();
-    signing_key_builder.SigningKey();
-    signing_key_builder.UniqueData("confirmation_token");
-    auto signing_key = signing_key_builder.CreateKey(tpm_resource_manager_);
-    if (!signing_key) {
-      LOG(ERROR) << "Could not generate signing key";
-      sign_sender.Send(confui::SignMessageError::kUnknownError, {});
-      continue;
-    }
+    // hmac over (prefix || data)
+    std::vector<std::uint8_t> data{std::begin(kConfirmationTokenMessageTag),
+                                   std::end(kConfirmationTokenMessageTag) - 1};
 
-    // hmac
-    auto hmac = TpmHmac(tpm_resource_manager_, signing_key->get(),
-                        TpmAuth(ESYS_TR_PASSWORD), request.payload_.data(),
-                        request.payload_.size());
+    data.insert(data.end(), request.payload_.data(),
+                request.payload_.data() + request.payload_.size());
+    auto hmac = TpmHmacWithContext(tpm_resource_manager_, "confirmation_token",
+                                   data.data(), data.size());
     if (!hmac) {
       LOG(ERROR) << "Could not calculate confirmation token hmac";
       sign_sender.Send(confui::SignMessageError::kUnknownError, {});
       continue;
     }
-    if (hmac->size == 0) {
-      LOG(ERROR) << "hmac was too short";
-      sign_sender.Send(confui::SignMessageError::kUnknownError, {});
-      continue;
-    }
+    CHECK(hmac->size == keymaster::kConfirmationTokenSize)
+        << "Hmac size for confirmation UI must be "
+        << keymaster::kConfirmationTokenSize;
 
     // send hmac
     std::vector<std::uint8_t> hmac_buffer(hmac->buffer,
diff --git a/host/commands/secure_env/doc/linkage.dot b/host/commands/secure_env/doc/linkage.dot
new file mode 100644
index 0000000..069ff82
--- /dev/null
+++ b/host/commands/secure_env/doc/linkage.dot
@@ -0,0 +1,52 @@
+digraph {
+  browser [label = "Browser"]
+  confirmationui_sign [color = "red", label = "internal/confui_sign.sock", shape = "rectangle"]
+  run_cvd
+  secure_env [label = < <B>secure_env</B> >, penwidth=2]
+  vmm [label = "crosvm / qemu"]
+  webRTC
+
+  subgraph fifos {
+    rank = same;
+
+    host_keymint_in [color = "blue", label = "internal/keymaster_fifo_vm.in", shape = "rectangle"]
+    host_keymint_out [color = "blue", label = "internal/keymaster_fifo_vm.out", shape = "rectangle"]
+    host_gatekeeper_in [color = "green", label = "internal/gatekeeper_fifo_vm.in", shape = "rectangle"]
+    host_gatekeeper_out [color = "green", label = "internal/gatekeeper_fifo_vm.out", shape = "rectangle"]
+    host_confirmationui_in [color = "red", label = "internal/confui_fifo_vm.in", shape = "rectangle"]
+    host_confirmationui_out [color = "red", label = "internal/confui_fifo_vm.out", shape = "rectangle"]
+  }
+
+  subgraph cluster_android {
+    label = "Android VM"
+    u_boot [label = "u-boot"]
+    confirmationui [color = "red", label = "ConfirmationUI HAL"]
+    gatekeeper [color = "green", label = "Gatekeeper HAL"]
+    keymint [color = "blue", label = "Keymint HAL"]
+
+    subgraph consoles {
+      	rank = same;
+        confirmationui_console [color = "red", label = "/dev/hvc8", shape = "rectangle"]
+        gatekeeper_console [color = "green", label = "/dev/hvc4", shape = "rectangle"]
+        keymint_console [color = "blue", label = "/dev/hvc3", shape = "rectangle"]
+    }
+  }
+
+  browser -> webRTC
+  run_cvd -> secure_env
+
+  webRTC -> host_confirmationui_out -> vmm [dir = "back", color = "red"]
+  webRTC -> host_confirmationui_in -> vmm [color = "red"]
+  vmm -> confirmationui_console -> confirmationui [dir = "both", color = "red"]
+  { rank = same; secure_env -> confirmationui_sign -> webRTC [color = "red", dir = "both"] }
+
+  secure_env -> host_gatekeeper_out -> vmm [color = "green", dir = "back"]
+  secure_env -> host_gatekeeper_in -> vmm [color = "green"]
+  vmm -> gatekeeper_console -> gatekeeper [color = "green", dir = "both"]
+
+  secure_env -> host_keymint_out -> vmm [color = "blue", dir = "back"]
+  secure_env -> host_keymint_in -> vmm [color = "blue"]
+  vmm -> keymint_console -> keymint [color = "blue", dir = "both"]
+  keymint_console -> u_boot [color = "blue", dir = "both"]
+
+}
diff --git a/host/commands/secure_env/doc/linkage.png b/host/commands/secure_env/doc/linkage.png
new file mode 100644
index 0000000..2549569
--- /dev/null
+++ b/host/commands/secure_env/doc/linkage.png
Binary files differ
diff --git a/host/commands/secure_env/doc/linkage.svg b/host/commands/secure_env/doc/linkage.svg
new file mode 100644
index 0000000..6bb643f
--- /dev/null
+++ b/host/commands/secure_env/doc/linkage.svg
@@ -0,0 +1,281 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="1180pt" height="423pt"
+ viewBox="0.00 0.00 1179.50 423.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 419)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-419 1175.5,-419 1175.5,4 -4,4"/>
+<g id="clust2" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="297.5,-8 297.5,-155 879.5,-155 879.5,-8 297.5,-8"/>
+<text text-anchor="middle" x="588.5" y="-139.8" font-family="Times,serif" font-size="14.00">Android VM</text>
+</g>
+<!-- browser -->
+<g id="node1" class="node">
+<title>browser</title>
+<ellipse fill="none" stroke="black" cx="904.5" cy="-397" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="904.5" y="-393.3" font-family="Times,serif" font-size="14.00">Browser</text>
+</g>
+<!-- webRTC -->
+<g id="node6" class="node">
+<title>webRTC</title>
+<ellipse fill="none" stroke="black" cx="904.5" cy="-325" rx="42.49" ry="18"/>
+<text text-anchor="middle" x="904.5" y="-321.3" font-family="Times,serif" font-size="14.00">webRTC</text>
+</g>
+<!-- browser&#45;&gt;webRTC -->
+<g id="edge1" class="edge">
+<title>browser&#45;&gt;webRTC</title>
+<path fill="none" stroke="black" d="M904.5,-378.7C904.5,-370.98 904.5,-361.71 904.5,-353.11"/>
+<polygon fill="black" stroke="black" points="908,-353.1 904.5,-343.1 901,-353.1 908,-353.1"/>
+</g>
+<!-- confirmationui_sign -->
+<g id="node2" class="node">
+<title>confirmationui_sign</title>
+<polygon fill="none" stroke="red" points="788.5,-343 634.5,-343 634.5,-307 788.5,-307 788.5,-343"/>
+<text text-anchor="middle" x="711.5" y="-321.3" font-family="Times,serif" font-size="14.00">internal/confui_sign.sock</text>
+</g>
+<!-- confirmationui_sign&#45;&gt;webRTC -->
+<g id="edge10" class="edge">
+<title>confirmationui_sign&#45;&gt;webRTC</title>
+<path fill="none" stroke="red" d="M798.79,-325C816.4,-325 834.02,-325 851.64,-325"/>
+<polygon fill="red" stroke="red" points="798.78,-321.5 788.78,-325 798.78,-328.5 798.78,-321.5"/>
+<polygon fill="red" stroke="red" points="852.01,-328.5 862.01,-325 852.01,-321.5 852.01,-328.5"/>
+</g>
+<!-- run_cvd -->
+<g id="node3" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" cx="502.5" cy="-397" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="502.5" y="-393.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- secure_env -->
+<g id="node4" class="node">
+<title>secure_env</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="502.5" cy="-325" rx="57.39" ry="18"/>
+<text text-anchor="start" x="466.5" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="470.5" y="-322.3" font-family="Times,serif" font-weight="bold" font-size="14.00">secure_env</text>
+<text text-anchor="start" x="534.5" y="-322.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- run_cvd&#45;&gt;secure_env -->
+<g id="edge2" class="edge">
+<title>run_cvd&#45;&gt;secure_env</title>
+<path fill="none" stroke="black" d="M502.5,-378.7C502.5,-370.98 502.5,-361.71 502.5,-353.11"/>
+<polygon fill="black" stroke="black" points="506,-353.1 502.5,-343.1 499,-353.1 506,-353.1"/>
+</g>
+<!-- secure_env&#45;&gt;confirmationui_sign -->
+<g id="edge9" class="edge">
+<title>secure_env&#45;&gt;confirmationui_sign</title>
+<path fill="none" stroke="red" d="M570.21,-325C588.23,-325 606.26,-325 624.28,-325"/>
+<polygon fill="red" stroke="red" points="570.06,-321.5 560.06,-325 570.06,-328.5 570.06,-321.5"/>
+<polygon fill="red" stroke="red" points="624.3,-328.5 634.3,-325 624.3,-321.5 624.3,-328.5"/>
+</g>
+<!-- host_keymint_in -->
+<g id="node7" class="node">
+<title>host_keymint_in</title>
+<polygon fill="none" stroke="blue" points="183,-271 0,-271 0,-235 183,-235 183,-271"/>
+<text text-anchor="middle" x="91.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/keymaster_fifo_vm.in</text>
+</g>
+<!-- secure_env&#45;&gt;host_keymint_in -->
+<g id="edge19" class="edge">
+<title>secure_env&#45;&gt;host_keymint_in</title>
+<path fill="none" stroke="blue" d="M453.33,-315.63C389.35,-304.73 275.95,-285.41 193.11,-271.31"/>
+<polygon fill="blue" stroke="blue" points="193.61,-267.84 183.17,-269.61 192.44,-274.74 193.61,-267.84"/>
+</g>
+<!-- host_keymint_out -->
+<g id="node8" class="node">
+<title>host_keymint_out</title>
+<polygon fill="none" stroke="blue" points="391.5,-271 201.5,-271 201.5,-235 391.5,-235 391.5,-271"/>
+<text text-anchor="middle" x="296.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/keymaster_fifo_vm.out</text>
+</g>
+<!-- secure_env&#45;&gt;host_keymint_out -->
+<g id="edge17" class="edge">
+<title>secure_env&#45;&gt;host_keymint_out</title>
+<path fill="none" stroke="blue" d="M455.48,-308.02C423,-296.99 379.85,-282.32 346.71,-271.06"/>
+<polygon fill="blue" stroke="blue" points="454.6,-311.42 465.19,-311.32 456.85,-304.79 454.6,-311.42"/>
+</g>
+<!-- host_gatekeeper_in -->
+<g id="node9" class="node">
+<title>host_gatekeeper_in</title>
+<polygon fill="none" stroke="green" points="595,-271 410,-271 410,-235 595,-235 595,-271"/>
+<text text-anchor="middle" x="502.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/gatekeeper_fifo_vm.in</text>
+</g>
+<!-- secure_env&#45;&gt;host_gatekeeper_in -->
+<g id="edge13" class="edge">
+<title>secure_env&#45;&gt;host_gatekeeper_in</title>
+<path fill="none" stroke="green" d="M502.5,-306.7C502.5,-298.98 502.5,-289.71 502.5,-281.11"/>
+<polygon fill="green" stroke="green" points="506,-281.1 502.5,-271.1 499,-281.1 506,-281.1"/>
+</g>
+<!-- host_gatekeeper_out -->
+<g id="node10" class="node">
+<title>host_gatekeeper_out</title>
+<polygon fill="none" stroke="green" points="805.5,-271 613.5,-271 613.5,-235 805.5,-235 805.5,-271"/>
+<text text-anchor="middle" x="709.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/gatekeeper_fifo_vm.out</text>
+</g>
+<!-- secure_env&#45;&gt;host_gatekeeper_out -->
+<g id="edge11" class="edge">
+<title>secure_env&#45;&gt;host_gatekeeper_out</title>
+<path fill="none" stroke="green" d="M549.75,-308.02C582.38,-296.99 625.74,-282.32 659.05,-271.06"/>
+<polygon fill="green" stroke="green" points="548.34,-304.8 539.99,-311.32 550.58,-311.44 548.34,-304.8"/>
+</g>
+<!-- vmm -->
+<g id="node5" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="605.5" cy="-181" rx="64.19" ry="18"/>
+<text text-anchor="middle" x="605.5" y="-177.3" font-family="Times,serif" font-size="14.00">crosvm / qemu</text>
+</g>
+<!-- confirmationui_console -->
+<g id="node17" class="node">
+<title>confirmationui_console</title>
+<polygon fill="none" stroke="red" points="773.5,-124 703.5,-124 703.5,-88 773.5,-88 773.5,-124"/>
+<text text-anchor="middle" x="738.5" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/hvc8</text>
+</g>
+<!-- vmm&#45;&gt;confirmationui_console -->
+<g id="edge7" class="edge">
+<title>vmm&#45;&gt;confirmationui_console</title>
+<path fill="none" stroke="red" d="M642.4,-159.74C659.81,-150.19 680.61,-138.77 698.4,-129.01"/>
+<polygon fill="red" stroke="red" points="640.46,-156.82 633.38,-164.7 643.83,-162.95 640.46,-156.82"/>
+<polygon fill="red" stroke="red" points="700.32,-131.95 707.4,-124.07 696.95,-125.81 700.32,-131.95"/>
+</g>
+<!-- gatekeeper_console -->
+<g id="node18" class="node">
+<title>gatekeeper_console</title>
+<polygon fill="none" stroke="green" points="640.5,-124 570.5,-124 570.5,-88 640.5,-88 640.5,-124"/>
+<text text-anchor="middle" x="605.5" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/hvc4</text>
+</g>
+<!-- vmm&#45;&gt;gatekeeper_console -->
+<g id="edge15" class="edge">
+<title>vmm&#45;&gt;gatekeeper_console</title>
+<path fill="none" stroke="green" d="M605.5,-152.49C605.5,-146.55 605.5,-140.27 605.5,-134.33"/>
+<polygon fill="green" stroke="green" points="602,-152.7 605.5,-162.7 609,-152.7 602,-152.7"/>
+<polygon fill="green" stroke="green" points="609,-134.18 605.5,-124.18 602,-134.18 609,-134.18"/>
+</g>
+<!-- keymint_console -->
+<g id="node19" class="node">
+<title>keymint_console</title>
+<polygon fill="none" stroke="blue" points="488.5,-124 418.5,-124 418.5,-88 488.5,-88 488.5,-124"/>
+<text text-anchor="middle" x="453.5" y="-102.3" font-family="Times,serif" font-size="14.00">/dev/hvc3</text>
+</g>
+<!-- vmm&#45;&gt;keymint_console -->
+<g id="edge21" class="edge">
+<title>vmm&#45;&gt;keymint_console</title>
+<path fill="none" stroke="blue" d="M565.23,-160.66C544.45,-150.68 519.06,-138.49 497.74,-128.25"/>
+<polygon fill="blue" stroke="blue" points="563.84,-163.88 574.37,-165.05 566.87,-157.57 563.84,-163.88"/>
+<polygon fill="blue" stroke="blue" points="499.1,-125.02 488.57,-123.84 496.07,-131.33 499.1,-125.02"/>
+</g>
+<!-- host_confirmationui_in -->
+<g id="node11" class="node">
+<title>host_confirmationui_in</title>
+<polygon fill="none" stroke="red" points="985,-271 824,-271 824,-235 985,-235 985,-271"/>
+<text text-anchor="middle" x="904.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/confui_fifo_vm.in</text>
+</g>
+<!-- webRTC&#45;&gt;host_confirmationui_in -->
+<g id="edge5" class="edge">
+<title>webRTC&#45;&gt;host_confirmationui_in</title>
+<path fill="none" stroke="red" d="M904.5,-306.7C904.5,-298.98 904.5,-289.71 904.5,-281.11"/>
+<polygon fill="red" stroke="red" points="908,-281.1 904.5,-271.1 901,-281.1 908,-281.1"/>
+</g>
+<!-- host_confirmationui_out -->
+<g id="node12" class="node">
+<title>host_confirmationui_out</title>
+<polygon fill="none" stroke="red" points="1171.5,-271 1003.5,-271 1003.5,-235 1171.5,-235 1171.5,-271"/>
+<text text-anchor="middle" x="1087.5" y="-249.3" font-family="Times,serif" font-size="14.00">internal/confui_fifo_vm.out</text>
+</g>
+<!-- webRTC&#45;&gt;host_confirmationui_out -->
+<g id="edge3" class="edge">
+<title>webRTC&#45;&gt;host_confirmationui_out</title>
+<path fill="none" stroke="red" d="M944.57,-308.67C973.65,-297.55 1013,-282.5 1043.05,-271"/>
+<polygon fill="red" stroke="red" points="943.22,-305.44 935.13,-312.28 945.72,-311.98 943.22,-305.44"/>
+</g>
+<!-- host_keymint_in&#45;&gt;vmm -->
+<g id="edge20" class="edge">
+<title>host_keymint_in&#45;&gt;vmm</title>
+<path fill="none" stroke="blue" d="M183.13,-236.42C186.29,-235.93 189.42,-235.46 192.5,-235 313.66,-217.04 455.35,-199.63 537.32,-189.92"/>
+<polygon fill="blue" stroke="blue" points="537.74,-193.4 547.26,-188.75 536.92,-186.44 537.74,-193.4"/>
+</g>
+<!-- host_keymint_out&#45;&gt;vmm -->
+<g id="edge18" class="edge">
+<title>host_keymint_out&#45;&gt;vmm</title>
+<path fill="none" stroke="blue" d="M381.69,-232.7C438.14,-219.91 510.16,-203.6 557.03,-192.98"/>
+<polygon fill="blue" stroke="blue" points="380.67,-229.34 371.69,-234.97 382.22,-236.17 380.67,-229.34"/>
+</g>
+<!-- host_gatekeeper_in&#45;&gt;vmm -->
+<g id="edge14" class="edge">
+<title>host_gatekeeper_in&#45;&gt;vmm</title>
+<path fill="none" stroke="green" d="M527.7,-234.88C541.54,-225.47 558.84,-213.71 573.62,-203.67"/>
+<polygon fill="green" stroke="green" points="575.67,-206.5 581.98,-197.99 571.74,-200.71 575.67,-206.5"/>
+</g>
+<!-- host_gatekeeper_out&#45;&gt;vmm -->
+<g id="edge12" class="edge">
+<title>host_gatekeeper_out&#45;&gt;vmm</title>
+<path fill="none" stroke="green" d="M675.47,-229.1C660.36,-218.92 642.93,-207.2 629.25,-197.99"/>
+<polygon fill="green" stroke="green" points="673.81,-232.2 684.06,-234.88 677.72,-226.39 673.81,-232.2"/>
+</g>
+<!-- host_confirmationui_in&#45;&gt;vmm -->
+<g id="edge6" class="edge">
+<title>host_confirmationui_in&#45;&gt;vmm</title>
+<path fill="none" stroke="red" d="M831.74,-234.97C779.94,-222.84 711.27,-206.76 663.22,-195.51"/>
+<polygon fill="red" stroke="red" points="663.83,-192.06 653.3,-193.19 662.24,-198.88 663.83,-192.06"/>
+</g>
+<!-- host_confirmationui_out&#45;&gt;vmm -->
+<g id="edge4" class="edge">
+<title>host_confirmationui_out&#45;&gt;vmm</title>
+<path fill="none" stroke="red" d="M993.38,-234.86C869.99,-215.37 737.36,-198.25 663.29,-189.04"/>
+<polygon fill="red" stroke="red" points="992.95,-238.33 1003.38,-236.45 994.05,-231.42 992.95,-238.33"/>
+</g>
+<!-- u_boot -->
+<g id="node13" class="node">
+<title>u_boot</title>
+<ellipse fill="none" stroke="black" cx="339.5" cy="-34" rx="33.6" ry="18"/>
+<text text-anchor="middle" x="339.5" y="-30.3" font-family="Times,serif" font-size="14.00">u&#45;boot</text>
+</g>
+<!-- confirmationui -->
+<g id="node14" class="node">
+<title>confirmationui</title>
+<ellipse fill="none" stroke="red" cx="783.5" cy="-34" rx="88.28" ry="18"/>
+<text text-anchor="middle" x="783.5" y="-30.3" font-family="Times,serif" font-size="14.00">ConfirmationUI HAL</text>
+</g>
+<!-- gatekeeper -->
+<g id="node15" class="node">
+<title>gatekeeper</title>
+<ellipse fill="none" stroke="green" cx="605.5" cy="-34" rx="71.49" ry="18"/>
+<text text-anchor="middle" x="605.5" y="-30.3" font-family="Times,serif" font-size="14.00">Gatekeeper HAL</text>
+</g>
+<!-- keymint -->
+<g id="node16" class="node">
+<title>keymint</title>
+<ellipse fill="none" stroke="blue" cx="453.5" cy="-34" rx="62.29" ry="18"/>
+<text text-anchor="middle" x="453.5" y="-30.3" font-family="Times,serif" font-size="14.00">Keymint HAL</text>
+</g>
+<!-- confirmationui_console&#45;&gt;confirmationui -->
+<g id="edge8" class="edge">
+<title>confirmationui_console&#45;&gt;confirmationui</title>
+<path fill="none" stroke="red" d="M755.16,-79.08C759.05,-73.03 763.19,-66.59 767.07,-60.56"/>
+<polygon fill="red" stroke="red" points="752.09,-77.39 749.62,-87.7 757.98,-81.18 752.09,-77.39"/>
+<polygon fill="red" stroke="red" points="770.04,-62.41 772.5,-52.1 764.15,-58.62 770.04,-62.41"/>
+</g>
+<!-- gatekeeper_console&#45;&gt;gatekeeper -->
+<g id="edge16" class="edge">
+<title>gatekeeper_console&#45;&gt;gatekeeper</title>
+<path fill="none" stroke="green" d="M605.5,-77.67C605.5,-72.69 605.5,-67.49 605.5,-62.51"/>
+<polygon fill="green" stroke="green" points="602,-77.7 605.5,-87.7 609,-77.7 602,-77.7"/>
+<polygon fill="green" stroke="green" points="609,-62.1 605.5,-52.1 602,-62.1 609,-62.1"/>
+</g>
+<!-- keymint_console&#45;&gt;u_boot -->
+<g id="edge23" class="edge">
+<title>keymint_console&#45;&gt;u_boot</title>
+<path fill="none" stroke="blue" d="M416.87,-82.51C401.61,-73.14 384.11,-62.39 369.62,-53.49"/>
+<polygon fill="blue" stroke="blue" points="415.26,-85.63 425.61,-87.88 418.92,-79.66 415.26,-85.63"/>
+<polygon fill="blue" stroke="blue" points="371.38,-50.47 361.03,-48.22 367.72,-56.44 371.38,-50.47"/>
+</g>
+<!-- keymint_console&#45;&gt;keymint -->
+<g id="edge22" class="edge">
+<title>keymint_console&#45;&gt;keymint</title>
+<path fill="none" stroke="blue" d="M453.5,-77.67C453.5,-72.69 453.5,-67.49 453.5,-62.51"/>
+<polygon fill="blue" stroke="blue" points="450,-77.7 453.5,-87.7 457,-77.7 450,-77.7"/>
+<polygon fill="blue" stroke="blue" points="457,-62.1 453.5,-52.1 450,-62.1 457,-62.1"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/secure_env/gatekeeper_responder.cpp b/host/commands/secure_env/gatekeeper_responder.cpp
index 756bb72..9aa4959 100644
--- a/host/commands/secure_env/gatekeeper_responder.cpp
+++ b/host/commands/secure_env/gatekeeper_responder.cpp
@@ -20,10 +20,9 @@
 
 namespace cuttlefish {
 
-GatekeeperResponder::GatekeeperResponder(
-    cuttlefish::GatekeeperChannel& channel, gatekeeper::GateKeeper& gatekeeper)
-    : channel_(channel), gatekeeper_(gatekeeper) {
-}
+GatekeeperResponder::GatekeeperResponder(cuttlefish::GatekeeperChannel& channel,
+                                         gatekeeper::GateKeeper& gatekeeper)
+    : channel_(channel), gatekeeper_(gatekeeper) {}
 
 bool GatekeeperResponder::ProcessMessage() {
   auto request = channel_.ReceiveMessage();
diff --git a/host/commands/secure_env/gatekeeper_responder.h b/host/commands/secure_env/gatekeeper_responder.h
index fcf7b28..0462a1a 100644
--- a/host/commands/secure_env/gatekeeper_responder.h
+++ b/host/commands/secure_env/gatekeeper_responder.h
@@ -23,13 +23,14 @@
 
 class GatekeeperResponder {
 private:
-  cuttlefish::GatekeeperChannel& channel_;
-  gatekeeper::GateKeeper& gatekeeper_;
-public:
-  GatekeeperResponder(cuttlefish::GatekeeperChannel& channel,
-                      gatekeeper::GateKeeper& gatekeeper);
+ cuttlefish::GatekeeperChannel& channel_;
+ gatekeeper::GateKeeper& gatekeeper_;
 
-  bool ProcessMessage();
+public:
+ GatekeeperResponder(cuttlefish::GatekeeperChannel& channel,
+                     gatekeeper::GateKeeper& gatekeeper);
+
+ bool ProcessMessage();
 };
 
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/in_process_tpm.cpp b/host/commands/secure_env/in_process_tpm.cpp
index 551283e..8c49357 100644
--- a/host/commands/secure_env/in_process_tpm.cpp
+++ b/host/commands/secure_env/in_process_tpm.cpp
@@ -15,16 +15,19 @@
 
 #include "in_process_tpm.h"
 
-#include <endian.h>
 #include <stddef.h>
 
 #include <tss2/tss2_esys.h>
 #include <tss2/tss2_rc.h>
 
+#include <android-base/endian.h>
+
 #include "host/commands/secure_env/tpm_commands.h"
 
 extern "C" {
+#ifndef _WIN32
 typedef int SOCKET;
+#endif
 #include "TpmBuildSwitches.h"
 #include "TpmTcpProtocol.h"
 #include "Simulator_fp.h"
@@ -83,7 +86,7 @@
     LOG(VERBOSE) << "Sending TPM command "
                 << TpmCommandName(be32toh(header->ordinal));
     _IN_BUFFER input = {
-        .BufferSize = request.size(),
+        .BufferSize = static_cast<unsigned long>(request.size()),
         .Buffer = request.data(),
     };
     _OUT_BUFFER output = {
diff --git a/host/commands/secure_env/keymaster_responder.cpp b/host/commands/secure_env/keymaster_responder.cpp
index 688ddf3..26ab68f 100644
--- a/host/commands/secure_env/keymaster_responder.cpp
+++ b/host/commands/secure_env/keymaster_responder.cpp
@@ -68,6 +68,7 @@
     HANDLE_MESSAGE(IMPORT_WRAPPED_KEY, ImportWrappedKey)
     HANDLE_MESSAGE(GENERATE_RKP_KEY, GenerateRkpKey)
     HANDLE_MESSAGE(GENERATE_CSR, GenerateCsr)
+    HANDLE_MESSAGE(GENERATE_CSR_V2, GenerateCsrV2)
     HANDLE_MESSAGE(GENERATE_TIMESTAMP_TOKEN, GenerateTimestampToken)
 #undef HANDLE_MESSAGE
 #define HANDLE_MESSAGE_W_RETURN(ENUM_NAME, METHOD_NAME)              \
@@ -90,6 +91,8 @@
     HANDLE_MESSAGE_W_RETURN(CONFIGURE_VERIFIED_BOOT_INFO,
                             ConfigureVerifiedBootInfo)
     HANDLE_MESSAGE_W_RETURN(GET_ROOT_OF_TRUST, GetRootOfTrust)
+    HANDLE_MESSAGE_W_RETURN(SET_ATTESTATION_IDS, SetAttestationIds)
+    HANDLE_MESSAGE_W_RETURN(SET_ATTESTATION_IDS_KM3, SetAttestationIdsKM3)
 #undef HANDLE_MESSAGE_W_RETURN
 #define HANDLE_MESSAGE_W_RETURN_NO_ARG(ENUM_NAME, METHOD_NAME) \
   case ENUM_NAME: {                                            \
@@ -99,6 +102,7 @@
     HANDLE_MESSAGE_W_RETURN_NO_ARG(GET_HMAC_SHARING_PARAMETERS,
                                    GetHmacSharingParameters)
     HANDLE_MESSAGE_W_RETURN_NO_ARG(EARLY_BOOT_ENDED, EarlyBootEnded)
+    HANDLE_MESSAGE_W_RETURN_NO_ARG(GET_HW_INFO, GetHwInfo)
 #undef HANDLE_MESSAGE_W_RETURN_NO_ARG
     case ADD_RNG_ENTROPY: {
       AddEntropyRequest request(keymaster_.message_version());
diff --git a/host/commands/secure_env/keymaster_responder.h b/host/commands/secure_env/keymaster_responder.h
index 2bbd893..e656abb 100644
--- a/host/commands/secure_env/keymaster_responder.h
+++ b/host/commands/secure_env/keymaster_responder.h
@@ -22,10 +22,11 @@
 namespace cuttlefish {
 
 class KeymasterResponder {
-private:
+ private:
   cuttlefish::KeymasterChannel& channel_;
   keymaster::AndroidKeymaster& keymaster_;
-public:
+
+ public:
   KeymasterResponder(cuttlefish::KeymasterChannel& channel,
                      keymaster::AndroidKeymaster& keymaster);
 
diff --git a/host/commands/secure_env/primary_key_builder.cpp b/host/commands/secure_env/primary_key_builder.cpp
index b333002..173f31e 100644
--- a/host/commands/secure_env/primary_key_builder.cpp
+++ b/host/commands/secure_env/primary_key_builder.cpp
@@ -136,4 +136,9 @@
   };
 }
 
+TpmObjectSlot PrimaryKeyBuilder::CreateSigningKey(
+    TpmResourceManager& resource_manager, const std::string& unique_data) {
+  return SigningKeyCreator(unique_data)(resource_manager);
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/primary_key_builder.h b/host/commands/secure_env/primary_key_builder.h
index 70170e7..7335956 100644
--- a/host/commands/secure_env/primary_key_builder.h
+++ b/host/commands/secure_env/primary_key_builder.h
@@ -32,8 +32,12 @@
   void ParentKey();
   void UniqueData(const std::string&);
 
-  TpmObjectSlot CreateKey(TpmResourceManager&);
-private:
+  TpmObjectSlot CreateKey(TpmResourceManager& resource_manager);
+
+  static TpmObjectSlot CreateSigningKey(TpmResourceManager& resource_manager,
+                                        const std::string& unique_data);
+
+ private:
   TPMT_PUBLIC public_area_;
 };
 
diff --git a/host/commands/secure_env/proxy_keymaster_context.h b/host/commands/secure_env/proxy_keymaster_context.h
index e3bf426..d40b5b2 100644
--- a/host/commands/secure_env/proxy_keymaster_context.h
+++ b/host/commands/secure_env/proxy_keymaster_context.h
@@ -131,6 +131,14 @@
         wrapped_key_params, wrapped_key_format, wrapped_key_material);
   }
 
+  keymaster_error_t CheckConfirmationToken(
+      const std::uint8_t* input_data, size_t input_data_size,
+      const uint8_t confirmation_token[keymaster::kConfirmationTokenSize])
+      const {
+    return wrapped_.CheckConfirmationToken(input_data, input_data_size,
+                                           confirmation_token);
+  }
+
   keymaster::RemoteProvisioningContext* GetRemoteProvisioningContext()
       const override {
     return wrapped_.GetRemoteProvisioningContext();
@@ -155,6 +163,16 @@
     return wrapped_.GetBootPatchlevel();
   }
 
+  keymaster_error_t SetAttestationIds(
+      const keymaster::SetAttestationIdsRequest& request) override {
+    return wrapped_.SetAttestationIds(request);
+  }
+
+  keymaster_error_t SetAttestationIdsKM3(
+      const keymaster::SetAttestationIdsKM3Request& request) override {
+    return wrapped_.SetAttestationIdsKM3(request);
+  }
+
  private:
   KeymasterContext& wrapped_;
 };
diff --git a/host/commands/secure_env/rust/Android.bp b/host/commands/secure_env/rust/Android.bp
new file mode 100644
index 0000000..c1f69f3
--- /dev/null
+++ b/host/commands/secure_env/rust/Android.bp
@@ -0,0 +1,67 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_library_host {
+    name: "libkmr_cf",
+    srcs: [ "lib.rs" ],
+    crate_name: "kmr_cf",
+    rustlibs: [
+        "libhex",
+        "libkmr_common",
+        "libkmr_crypto_boring",
+        "libkmr_ta",
+        "libkmr_wire",
+        "liblibc",
+        "liblog_rust",
+        "libsecure_env_tpm",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+rust_ffi_host {
+    name: "libkmr_cf_ffi",
+    compile_multilib: "64",
+    srcs: [ "ffi.rs" ],
+    crate_name: "kmr_cf_ffi",
+    rustlibs: [
+        "libkmr_cf",
+        "libkmr_wire",
+        "liblibc",
+        "liblog_rust",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
+rust_test_host {
+    name: "libkmr_cf_test",
+    srcs: ["lib.rs"],
+    rustlibs: [
+        "libhex",
+        "libkmr_common",
+        "libkmr_crypto_boring",
+        "libkmr_ta",
+        "libkmr_tests",
+        "libkmr_wire",
+        "liblibc",
+        "liblog_rust",
+        "libsecure_env_tpm",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+    test_suites: ["general-tests"],
+}
\ No newline at end of file
diff --git a/host/commands/secure_env/rust/attest.rs b/host/commands/secure_env/rust/attest.rs
new file mode 100644
index 0000000..b8410d6
--- /dev/null
+++ b/host/commands/secure_env/rust/attest.rs
@@ -0,0 +1,423 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Attestation keys and certificates.
+//!
+//! Hard-coded keys and certs copied from system/keymaster/context/soft_attestation_cert.cpp
+
+use kmr_common::{
+    crypto::ec, crypto::rsa, crypto::CurveType, crypto::KeyMaterial, wire::keymint,
+    wire::keymint::EcCurve, Error,
+};
+use kmr_ta::device::{RetrieveCertSigningInfo, SigningAlgorithm, SigningKeyType};
+
+/// RSA attestation private key in PKCS#1 format.
+///
+/// Decoded contents (using [der2ascii](https://github.com/google/der-ascii)):
+///
+/// ```
+/// SEQUENCE {
+///   INTEGER { 0 }
+///   INTEGER { `00c08323dc56881bb8302069f5b08561c6eebe7f05e2f5a842048abe8b47be76feaef25cf29b2afa3200141601429989a15fcfc6815eb363583c2fd2f20be4983283dd814b16d7e185417ae54abc296a3a6db5c004083b68c556c1f02339916419864d50b74d40aeca484c77356c895a0c275abfac499d5d7d2362f29c5e02e871` }
+///   INTEGER { 65537 }
+///   INTEGER { `00be860b0b99a802a6fb1a59438a7bb715065b09a36dc6e9cacc6bf3c02c34d7d79e94c6606428d88c7b7f6577c1cdea64074abe8e7286df1f0811dc9728260868de95d32efc96b6d084ff271a5f60defcc703e7a38e6e29ba9a3c5fc2c28076b6a896af1d34d78828ce9bddb1f34f9c9404430781298e201316725bbdbc993a41` }
+///   INTEGER { `00e1c6d927646c0916ec36826d594983740c21f1b074c4a1a59867c669795c85d3dc464c5b929e94bfb34e0dcc5014b10f13341ab7fdd5f60414d2a326cad41cc5` }
+///   INTEGER { `00da485997785cd5630fb0fd8c5254f98e538e18983aae9e6b7e6a5a7b5d343755b9218ebd40320d28387d789f76fa218bcc2d8b68a5f6418fbbeca5179ab3afbd` }
+///   INTEGER { `50fefc32649559616ed6534e154509329d93a3d810dbe5bdb982292cf78bd8badb8020ae8d57f4b71d05386ffe9e9db271ca3477a34999db76f8e5ece9c0d49d` }
+///   INTEGER { `15b74cf27cceff8bb36bf04d9d8346b09a2f70d2f4439b0f26ac7e03f7e9d1f77d4b915fd29b2823f03acb5d5200e0857ff2a803e93eee96d6235ce95442bc21` }
+///   INTEGER { `0090a745da8970b2cd649660324228c5f82856ffd665ba9a85c8d60f1b8bee717ecd2c72eae01dad86ba7654d4cf45adb5f1f2b31d9f8122cfa5f1a5570f9b2d25` }
+/// }
+/// ```
+const RSA_ATTEST_KEY: &str = concat!(
+    "3082025d02010002818100c08323dc56881bb8302069f5b08561c6eebe7f05e2",
+    "f5a842048abe8b47be76feaef25cf29b2afa3200141601429989a15fcfc6815e",
+    "b363583c2fd2f20be4983283dd814b16d7e185417ae54abc296a3a6db5c00408",
+    "3b68c556c1f02339916419864d50b74d40aeca484c77356c895a0c275abfac49",
+    "9d5d7d2362f29c5e02e871020301000102818100be860b0b99a802a6fb1a5943",
+    "8a7bb715065b09a36dc6e9cacc6bf3c02c34d7d79e94c6606428d88c7b7f6577",
+    "c1cdea64074abe8e7286df1f0811dc9728260868de95d32efc96b6d084ff271a",
+    "5f60defcc703e7a38e6e29ba9a3c5fc2c28076b6a896af1d34d78828ce9bddb1",
+    "f34f9c9404430781298e201316725bbdbc993a41024100e1c6d927646c0916ec",
+    "36826d594983740c21f1b074c4a1a59867c669795c85d3dc464c5b929e94bfb3",
+    "4e0dcc5014b10f13341ab7fdd5f60414d2a326cad41cc5024100da485997785c",
+    "d5630fb0fd8c5254f98e538e18983aae9e6b7e6a5a7b5d343755b9218ebd4032",
+    "0d28387d789f76fa218bcc2d8b68a5f6418fbbeca5179ab3afbd024050fefc32",
+    "649559616ed6534e154509329d93a3d810dbe5bdb982292cf78bd8badb8020ae",
+    "8d57f4b71d05386ffe9e9db271ca3477a34999db76f8e5ece9c0d49d024015b7",
+    "4cf27cceff8bb36bf04d9d8346b09a2f70d2f4439b0f26ac7e03f7e9d1f77d4b",
+    "915fd29b2823f03acb5d5200e0857ff2a803e93eee96d6235ce95442bc210241",
+    "0090a745da8970b2cd649660324228c5f82856ffd665ba9a85c8d60f1b8bee71",
+    "7ecd2c72eae01dad86ba7654d4cf45adb5f1f2b31d9f8122cfa5f1a5570f9b2d",
+    "25",
+);
+
+/// Attestation certificate corresponding to [`RSA_ATTEST_KEY`], signed by the key in
+/// [`RSA_ATTEST_ROOT_CERT`].
+///
+/// Decoded contents:
+///
+/// ```
+/// Certificate:
+///     Data:
+///         Version: 3 (0x2)
+///         Serial Number: 4096 (0x1000)
+///     Signature Algorithm: SHA256-RSA
+///         Issuer: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California
+///         Validity:
+///             Not Before: 2016-01-04 12:40:53 +0000 UTC
+///             Not After : 2035-12-30 12:40:53 +0000 UTC
+///         Subject: C=US, O=Google, Inc., OU=Android, ST=California, CN=Android Software Attestation Key
+///         Subject Public Key Info:
+///             Public Key Algorithm: rsaEncryption
+///                 Public Key: (1024 bit)
+///                 Modulus:
+///                     c0:83:23:dc:56:88:1b:b8:30:20:69:f5:b0:85:61:
+///                     c6:ee:be:7f:05:e2:f5:a8:42:04:8a:be:8b:47:be:
+///                     76:fe:ae:f2:5c:f2:9b:2a:fa:32:00:14:16:01:42:
+///                     99:89:a1:5f:cf:c6:81:5e:b3:63:58:3c:2f:d2:f2:
+///                     0b:e4:98:32:83:dd:81:4b:16:d7:e1:85:41:7a:e5:
+///                     4a:bc:29:6a:3a:6d:b5:c0:04:08:3b:68:c5:56:c1:
+///                     f0:23:39:91:64:19:86:4d:50:b7:4d:40:ae:ca:48:
+///                     4c:77:35:6c:89:5a:0c:27:5a:bf:ac:49:9d:5d:7d:
+///                     23:62:f2:9c:5e:02:e8:71:
+///                 Exponent: 65537 (0x10001)
+///         X509v3 extensions:
+///             X509v3 Authority Key Identifier:
+///                 keyid:29faf1accc4dd24c96402775b6b0e932e507fe2e
+///             X509v3 Subject Key Identifier:
+///                 keyid:d40c101bf8cd63b9f73952b50e135ca6d7999386
+///             X509v3 Key Usage: critical
+///                 Digital Signature, Certificate Signing
+///             X509v3 Basic Constraints: critical
+///                 CA:true, pathlen:0
+///     Signature Algorithm: SHA256-RSA
+///          9e:2d:48:5f:8c:67:33:dc:1a:85:ad:99:d7:50:23:ea:14:ec:
+///          43:b0:e1:9d:ea:c2:23:46:1e:72:b5:19:dc:60:22:e4:a5:68:
+///          31:6c:0b:55:c4:e6:9c:a2:2d:9f:3a:4f:93:6b:31:8b:16:78:
+///          16:0d:88:cb:d9:8b:cc:80:9d:84:f0:c2:27:e3:6b:38:f1:fd:
+///          d1:e7:17:72:31:59:35:7d:96:f3:c5:7f:ab:9d:8f:96:61:26:
+///          4f:b2:be:81:bb:0d:49:04:22:8a:ce:9f:f7:f5:42:2e:25:44:
+///          fa:21:07:12:5a:83:b5:55:ad:18:82:f8:40:14:9b:9c:20:63:
+///          04:7f:
+/// ```
+const RSA_ATTEST_CERT: &str = concat!(
+    "308202b63082021fa00302010202021000300d06092a864886f70d01010b0500",
+    "3063310b30090603550406130255533113301106035504080c0a43616c69666f",
+    "726e69613116301406035504070c0d4d6f756e7461696e205669657731153013",
+    "060355040a0c0c476f6f676c652c20496e632e3110300e060355040b0c07416e",
+    "64726f6964301e170d3136303130343132343035335a170d3335313233303132",
+    "343035335a3076310b30090603550406130255533113301106035504080c0a43",
+    "616c69666f726e696131153013060355040a0c0c476f6f676c652c20496e632e",
+    "3110300e060355040b0c07416e64726f69643129302706035504030c20416e64",
+    "726f696420536f667477617265204174746573746174696f6e204b657930819f",
+    "300d06092a864886f70d010101050003818d0030818902818100c08323dc5688",
+    "1bb8302069f5b08561c6eebe7f05e2f5a842048abe8b47be76feaef25cf29b2a",
+    "fa3200141601429989a15fcfc6815eb363583c2fd2f20be4983283dd814b16d7",
+    "e185417ae54abc296a3a6db5c004083b68c556c1f02339916419864d50b74d40",
+    "aeca484c77356c895a0c275abfac499d5d7d2362f29c5e02e8710203010001a3",
+    "663064301d0603551d0e04160414d40c101bf8cd63b9f73952b50e135ca6d799",
+    "9386301f0603551d2304183016801429faf1accc4dd24c96402775b6b0e932e5",
+    "07fe2e30120603551d130101ff040830060101ff020100300e0603551d0f0101",
+    "ff040403020284300d06092a864886f70d01010b0500038181009e2d485f8c67",
+    "33dc1a85ad99d75023ea14ec43b0e19deac223461e72b519dc6022e4a568316c",
+    "0b55c4e69ca22d9f3a4f936b318b1678160d88cbd98bcc809d84f0c227e36b38",
+    "f1fdd1e717723159357d96f3c57fab9d8f9661264fb2be81bb0d4904228ace9f",
+    "f7f5422e2544fa2107125a83b555ad1882f840149b9c2063047f",
+);
+
+/// Attestation self-signed root certificate holding the key that signed [`RSA_ATTEST_CERT`].
+///
+/// Decoded contents:
+///
+/// ```
+/// Certificate:
+///     Data:
+///         Version: 3 (0x2)
+///         Serial Number: 18416584322103887884 (0xff94d9dd9f07c80c)
+///     Signature Algorithm: SHA256-RSA
+///         Issuer: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California
+///         Validity:
+///             Not Before: 2016-01-04 12:31:08 +0000 UTC
+///             Not After : 2035-12-30 12:31:08 +0000 UTC
+///         Subject: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California
+///         Subject Public Key Info:
+///             Public Key Algorithm: rsaEncryption
+///                 Public Key: (1024 bit)
+///                 Modulus:
+///                     a2:6b:ad:eb:6e:2e:44:61:ef:d5:0e:82:e6:b7:94:
+///                     d1:75:23:1f:77:9b:63:91:63:ff:f7:aa:ff:0b:72:
+///                     47:4e:c0:2c:43:ec:33:7c:d7:ac:ed:40:3e:8c:28:
+///                     a0:66:d5:f7:87:0b:33:97:de:0e:b8:4e:13:40:ab:
+///                     af:a5:27:bf:95:69:a0:31:db:06:52:65:f8:44:59:
+///                     57:61:f0:bb:f2:17:4b:b7:41:80:64:c0:28:0e:8f:
+///                     52:77:8e:db:d2:47:b6:45:e9:19:c8:e9:8b:c3:db:
+///                     c2:91:3f:d7:d7:50:c4:1d:35:66:f9:57:e4:97:96:
+///                     0b:09:ac:ce:92:35:85:9b:
+///                 Exponent: 65537 (0x10001)
+///         X509v3 extensions:
+///             X509v3 Authority Key Identifier:
+///                 keyid:29faf1accc4dd24c96402775b6b0e932e507fe2e
+///             X509v3 Subject Key Identifier:
+///                 keyid:29faf1accc4dd24c96402775b6b0e932e507fe2e
+///             X509v3 Key Usage: critical
+///                 Digital Signature, Certificate Signing
+///             X509v3 Basic Constraints: critical
+///                 CA:true
+///     Signature Algorithm: SHA256-RSA
+///          4f:72:f3:36:59:8d:0e:c1:b9:74:5b:31:59:f6:f0:8d:25:49:
+///          30:9e:a3:1c:1c:29:d2:45:2d:20:b9:4d:5f:64:b4:e8:80:c7:
+///          78:7a:9c:39:de:a8:b3:f5:bf:2f:70:5f:47:10:5c:c5:e6:eb:
+///          4d:06:99:61:d2:ae:9a:07:ff:f7:7c:b8:ab:eb:9c:0f:24:07:
+///          5e:b1:7f:ba:79:71:fd:4d:5b:9e:df:14:a9:fe:df:ed:7c:c0:
+///          88:5d:f8:dd:9b:64:32:56:d5:35:9a:e2:13:f9:8f:ce:c1:7c:
+///          dc:ef:a4:aa:b2:55:c3:83:a9:2e:fb:5c:f6:62:f5:27:52:17:
+///          be:63:
+/// ```
+const RSA_ATTEST_ROOT_CERT: &str = concat!(
+    "308202a730820210a003020102020900ff94d9dd9f07c80c300d06092a864886",
+    "f70d01010b05003063310b30090603550406130255533113301106035504080c",
+    "0a43616c69666f726e69613116301406035504070c0d4d6f756e7461696e2056",
+    "69657731153013060355040a0c0c476f6f676c652c20496e632e3110300e0603",
+    "55040b0c07416e64726f6964301e170d3136303130343132333130385a170d33",
+    "35313233303132333130385a3063310b30090603550406130255533113301106",
+    "035504080c0a43616c69666f726e69613116301406035504070c0d4d6f756e74",
+    "61696e205669657731153013060355040a0c0c476f6f676c652c20496e632e31",
+    "10300e060355040b0c07416e64726f696430819f300d06092a864886f70d0101",
+    "01050003818d0030818902818100a26badeb6e2e4461efd50e82e6b794d17523",
+    "1f779b639163fff7aaff0b72474ec02c43ec337cd7aced403e8c28a066d5f787",
+    "0b3397de0eb84e1340abafa527bf9569a031db065265f844595761f0bbf2174b",
+    "b7418064c0280e8f52778edbd247b645e919c8e98bc3dbc2913fd7d750c41d35",
+    "66f957e497960b09acce9235859b0203010001a3633061301d0603551d0e0416",
+    "041429faf1accc4dd24c96402775b6b0e932e507fe2e301f0603551d23041830",
+    "16801429faf1accc4dd24c96402775b6b0e932e507fe2e300f0603551d130101",
+    "ff040530030101ff300e0603551d0f0101ff040403020284300d06092a864886",
+    "f70d01010b0500038181004f72f336598d0ec1b9745b3159f6f08d2549309ea3",
+    "1c1c29d2452d20b94d5f64b4e880c7787a9c39dea8b3f5bf2f705f47105cc5e6",
+    "eb4d069961d2ae9a07fff77cb8abeb9c0f24075eb17fba7971fd4d5b9edf14a9",
+    "fedfed7cc0885df8dd9b643256d5359ae213f98fcec17cdcefa4aab255c383a9",
+    "2efb5cf662f5275217be63",
+);
+
+/// EC attestation private key in `ECPrivateKey` format.
+///
+/// Decoded contents (using [der2ascii](https://github.com/google/der-ascii)):
+///
+/// ```
+/// SEQUENCE {
+///   INTEGER { 1 }
+///   OCTET_STRING { `21e086432a15198459cf363a50fc14c9daadf935f527c2dfd71e4d6dbc42e544` }
+///   [0] {
+///     # secp256r1
+///     OBJECT_IDENTIFIER { 1.2.840.10045.3.1.7 }
+///   }
+///   [1] {
+///     BIT_STRING { `00` `04eb9e79f8426359accb2a914c8986cc70ad90669382a9732613feaccbf821274c2174974a2afea5b94d7f66d4e065106635bc53b7a0a3a671583edb3e11ae1014` }
+///   }
+/// }
+/// ```
+const EC_ATTEST_KEY: &str = concat!(
+    "3077020101042021e086432a15198459cf363a50fc14c9daadf935f527c2dfd7",
+    "1e4d6dbc42e544a00a06082a8648ce3d030107a14403420004eb9e79f8426359",
+    "accb2a914c8986cc70ad90669382a9732613feaccbf821274c2174974a2afea5",
+    "b94d7f66d4e065106635bc53b7a0a3a671583edb3e11ae1014",
+);
+
+/// Attestation certificate corresponding to [`EC_ATTEST_KEY`], signed by the key in
+/// [`EC_ATTEST_ROOT_CERT`].
+///
+/// Decoded contents:
+///
+/// ```
+/// Certificate:
+///     Data:
+///         Version: 3 (0x2)
+///         Serial Number: 4097 (0x1001)
+///     Signature Algorithm: ECDSA-SHA256
+///         Issuer: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California, CN=Android Keystore Software Attestation Root
+///         Validity:
+///             Not Before: 2016-01-11 00:46:09 +0000 UTC
+///             Not After : 2026-01-08 00:46:09 +0000 UTC
+///         Subject: C=US, O=Google, Inc., OU=Android, ST=California, CN=Android Keystore Software Attestation Intermediate
+///         Subject Public Key Info:
+///             Public Key Algorithm: id-ecPublicKey
+///                 Public Key: (256 bit)
+///                 pub:
+///                     04:eb:9e:79:f8:42:63:59:ac:cb:2a:91:4c:89:86:
+///                     cc:70:ad:90:66:93:82:a9:73:26:13:fe:ac:cb:f8:
+///                     21:27:4c:21:74:97:4a:2a:fe:a5:b9:4d:7f:66:d4:
+///                     e0:65:10:66:35:bc:53:b7:a0:a3:a6:71:58:3e:db:
+///                     3e:11:ae:10:14:
+///                 ASN1 OID: prime256v1
+///         X509v3 extensions:
+///             X509v3 Authority Key Identifier:
+///                 keyid:c8ade9774c45c3a3cf0d1610e479433a215a30cf
+///             X509v3 Subject Key Identifier:
+///                 keyid:3ffcacd61ab13a9e8120b8d5251cc565bb1e91a9
+///             X509v3 Key Usage: critical
+///                 Digital Signature, Certificate Signing
+///             X509v3 Basic Constraints: critical
+///                 CA:true, pathlen:0
+///     Signature Algorithm: ECDSA-SHA256
+///          30:45:02:20:4b:8a:9b:7b:ee:82:bc:c0:33:87:ae:2f:c0:89:
+///          98:b4:dd:c3:8d:ab:27:2a:45:9f:69:0c:c7:c3:92:d4:0f:8e:
+///          02:21:00:ee:da:01:5d:b6:f4:32:e9:d4:84:3b:62:4c:94:04:
+///          ef:3a:7c:cc:bd:5e:fb:22:bb:e7:fe:b9:77:3f:59:3f:fb:
+/// ```
+const EC_ATTEST_CERT: &str = concat!(
+    "308202783082021ea00302010202021001300a06082a8648ce3d040302308198",
+    "310b30090603550406130255533113301106035504080c0a43616c69666f726e",
+    "69613116301406035504070c0d4d6f756e7461696e2056696577311530130603",
+    "55040a0c0c476f6f676c652c20496e632e3110300e060355040b0c07416e6472",
+    "6f69643133303106035504030c2a416e64726f6964204b657973746f72652053",
+    "6f667477617265204174746573746174696f6e20526f6f74301e170d31363031",
+    "31313030343630395a170d3236303130383030343630395a308188310b300906",
+    "03550406130255533113301106035504080c0a43616c69666f726e6961311530",
+    "13060355040a0c0c476f6f676c652c20496e632e3110300e060355040b0c0741",
+    "6e64726f6964313b303906035504030c32416e64726f6964204b657973746f72",
+    "6520536f667477617265204174746573746174696f6e20496e7465726d656469",
+    "6174653059301306072a8648ce3d020106082a8648ce3d03010703420004eb9e",
+    "79f8426359accb2a914c8986cc70ad90669382a9732613feaccbf821274c2174",
+    "974a2afea5b94d7f66d4e065106635bc53b7a0a3a671583edb3e11ae1014a366",
+    "3064301d0603551d0e041604143ffcacd61ab13a9e8120b8d5251cc565bb1e91",
+    "a9301f0603551d23041830168014c8ade9774c45c3a3cf0d1610e479433a215a",
+    "30cf30120603551d130101ff040830060101ff020100300e0603551d0f0101ff",
+    "040403020284300a06082a8648ce3d040302034800304502204b8a9b7bee82bc",
+    "c03387ae2fc08998b4ddc38dab272a459f690cc7c392d40f8e022100eeda015d",
+    "b6f432e9d4843b624c9404ef3a7cccbd5efb22bbe7feb9773f593ffb",
+);
+
+/// Attestation self-signed root certificate holding the key that signed [`EC_ATTEST_CERT`].
+///
+/// Decoded contents:
+///
+/// ```
+/// Certificate:
+///     Data:
+///         Version: 3 (0x2)
+///         Serial Number: 11674912229752527703 (0xa2059ed10e435b57)
+///     Signature Algorithm: ECDSA-SHA256
+///         Issuer: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California, CN=Android Keystore Software Attestation Root
+///         Validity:
+///             Not Before: 2016-01-11 00:43:50 +0000 UTC
+///             Not After : 2036-01-06 00:43:50 +0000 UTC
+///         Subject: C=US, O=Google, Inc., OU=Android, L=Mountain View, ST=California, CN=Android Keystore Software Attestation Root
+///         Subject Public Key Info:
+///             Public Key Algorithm: id-ecPublicKey
+///                 Public Key: (256 bit)
+///                 pub:
+///                     04:ee:5d:5e:c7:e1:c0:db:6d:03:a6:7e:e6:b6:1b:
+///                     ec:4d:6a:5d:6a:68:2e:0f:ff:7f:49:0e:7d:77:1f:
+///                     44:22:6d:bd:b1:af:fa:16:cb:c7:ad:c5:77:d2:56:
+///                     9c:aa:b7:b0:2d:54:01:5d:3e:43:2b:2a:8e:d7:4e:
+///                     ec:48:75:41:a4:
+///                 ASN1 OID: prime256v1
+///         X509v3 extensions:
+///             X509v3 Authority Key Identifier:
+///                 keyid:c8ade9774c45c3a3cf0d1610e479433a215a30cf
+///             X509v3 Subject Key Identifier:
+///                 keyid:c8ade9774c45c3a3cf0d1610e479433a215a30cf
+///             X509v3 Key Usage: critical
+///                 Digital Signature, Certificate Signing
+///             X509v3 Basic Constraints: critical
+///                 CA:true
+///     Signature Algorithm: ECDSA-SHA256
+///          30:44:02:20:35:21:a3:ef:8b:34:46:1e:9c:d5:60:f3:1d:58:
+///          89:20:6a:dc:a3:65:41:f6:0d:9e:ce:8a:19:8c:66:48:60:7b:
+///          02:20:4d:0b:f3:51:d9:30:7c:7d:5b:da:35:34:1d:a8:47:1b:
+///          63:a5:85:65:3c:ad:4f:24:a7:e7:4d:af:41:7d:f1:bf:
+/// ```
+const EC_ATTEST_ROOT_CERT: &str = concat!(
+    "3082028b30820232a003020102020900a2059ed10e435b57300a06082a8648ce",
+    "3d040302308198310b30090603550406130255533113301106035504080c0a43",
+    "616c69666f726e69613116301406035504070c0d4d6f756e7461696e20566965",
+    "7731153013060355040a0c0c476f6f676c652c20496e632e3110300e06035504",
+    "0b0c07416e64726f69643133303106035504030c2a416e64726f6964204b6579",
+    "73746f726520536f667477617265204174746573746174696f6e20526f6f7430",
+    "1e170d3136303131313030343335305a170d3336303130363030343335305a30",
+    "8198310b30090603550406130255533113301106035504080c0a43616c69666f",
+    "726e69613116301406035504070c0d4d6f756e7461696e205669657731153013",
+    "060355040a0c0c476f6f676c652c20496e632e3110300e060355040b0c07416e",
+    "64726f69643133303106035504030c2a416e64726f6964204b657973746f7265",
+    "20536f667477617265204174746573746174696f6e20526f6f74305930130607",
+    "2a8648ce3d020106082a8648ce3d03010703420004ee5d5ec7e1c0db6d03a67e",
+    "e6b61bec4d6a5d6a682e0fff7f490e7d771f44226dbdb1affa16cbc7adc577d2",
+    "569caab7b02d54015d3e432b2a8ed74eec487541a4a3633061301d0603551d0e",
+    "04160414c8ade9774c45c3a3cf0d1610e479433a215a30cf301f0603551d2304",
+    "1830168014c8ade9774c45c3a3cf0d1610e479433a215a30cf300f0603551d13",
+    "0101ff040530030101ff300e0603551d0f0101ff040403020284300a06082a86",
+    "48ce3d040302034700304402203521a3ef8b34461e9cd560f31d5889206adca3",
+    "6541f60d9ece8a198c6648607b02204d0bf351d9307c7d5bda35341da8471b63",
+    "a585653cad4f24a7e74daf417df1bf",
+);
+
+/// Per-algorithm attestation certificate signing information.
+pub(crate) struct CertSignAlgoInfo {
+    key: KeyMaterial,
+    chain: Vec<keymint::Certificate>,
+}
+
+pub(crate) struct CertSignInfo {
+    rsa_info: CertSignAlgoInfo,
+    ec_info: CertSignAlgoInfo,
+}
+
+impl CertSignInfo {
+    pub(crate) fn new() -> Self {
+        CertSignInfo {
+            rsa_info: CertSignAlgoInfo {
+                key: KeyMaterial::Rsa(rsa::Key(hex::decode(RSA_ATTEST_KEY).unwrap()).into()),
+                chain: vec![
+                    keymint::Certificate {
+                        encoded_certificate: hex::decode(RSA_ATTEST_CERT).unwrap(),
+                    },
+                    keymint::Certificate {
+                        encoded_certificate: hex::decode(RSA_ATTEST_ROOT_CERT).unwrap(),
+                    },
+                ],
+            },
+            ec_info: CertSignAlgoInfo {
+                key: KeyMaterial::Ec(
+                    EcCurve::P256,
+                    CurveType::Nist,
+                    ec::Key::P256(ec::NistKey(hex::decode(EC_ATTEST_KEY).unwrap())).into(),
+                ),
+                chain: vec![
+                    keymint::Certificate {
+                        encoded_certificate: hex::decode(EC_ATTEST_CERT).unwrap(),
+                    },
+                    keymint::Certificate {
+                        encoded_certificate: hex::decode(EC_ATTEST_ROOT_CERT).unwrap(),
+                    },
+                ],
+            },
+        }
+    }
+}
+
+impl RetrieveCertSigningInfo for CertSignInfo {
+    fn signing_key(&self, key_type: SigningKeyType) -> Result<KeyMaterial, Error> {
+        Ok(match key_type.algo_hint {
+            SigningAlgorithm::Rsa => self.rsa_info.key.clone(),
+            SigningAlgorithm::Ec => self.ec_info.key.clone(),
+        })
+    }
+
+    fn cert_chain(&self, key_type: SigningKeyType) -> Result<Vec<keymint::Certificate>, Error> {
+        Ok(match key_type.algo_hint {
+            SigningAlgorithm::Rsa => self.rsa_info.chain.clone(),
+            SigningAlgorithm::Ec => self.ec_info.chain.clone(),
+        })
+    }
+}
diff --git a/host/commands/secure_env/rust/clock.rs b/host/commands/secure_env/rust/clock.rs
new file mode 100644
index 0000000..a97a76e
--- /dev/null
+++ b/host/commands/secure_env/rust/clock.rs
@@ -0,0 +1,36 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Monotonic clock implementation.
+
+use kmr_common::crypto;
+
+/// Monotonic clock.
+#[derive(Default)]
+pub struct StdClock;
+
+impl crypto::MonotonicClock for StdClock {
+    fn now(&self) -> crypto::MillisecondsSinceEpoch {
+        let mut time = libc::timespec { tv_sec: 0, tv_nsec: 0 };
+        // Safety: `time` is a valid structure.
+        let rc =
+            unsafe { libc::clock_gettime(libc::CLOCK_BOOTTIME, &mut time as *mut libc::timespec) };
+        if rc < 0 {
+            log::warn!("failed to get time!");
+            return crypto::MillisecondsSinceEpoch(0);
+        }
+        crypto::MillisecondsSinceEpoch((time.tv_sec * 1000) + (time.tv_nsec / 1000 / 1000))
+    }
+}
diff --git a/host/commands/secure_env/rust/ffi.rs b/host/commands/secure_env/rust/ffi.rs
new file mode 100644
index 0000000..0310dd9
--- /dev/null
+++ b/host/commands/secure_env/rust/ffi.rs
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! KeyMint TA core for Cuttlefish.
+
+extern crate alloc;
+
+use kmr_wire::keymint::SecurityLevel;
+use libc::c_int;
+use log::error;
+
+/// FFI wrapper around [`kmr_cf::ta_main`].
+#[no_mangle]
+pub extern "C" fn kmr_ta_main(
+    fd_in: c_int,
+    fd_out: c_int,
+    security_level: c_int,
+    trm: *mut libc::c_void,
+) {
+    let security_level = match security_level {
+        x if x == SecurityLevel::TrustedEnvironment as i32 => SecurityLevel::TrustedEnvironment,
+        x if x == SecurityLevel::Strongbox as i32 => SecurityLevel::Strongbox,
+        x if x == SecurityLevel::Software as i32 => SecurityLevel::Software,
+        _ => {
+            error!("unexpected security level {}, running as SOFTWARE", security_level);
+            SecurityLevel::Software
+        }
+    };
+    kmr_cf::ta_main(fd_in, fd_out, security_level, trm)
+}
diff --git a/host/commands/secure_env/rust/kmr_ta.h b/host/commands/secure_env/rust/kmr_ta.h
new file mode 100644
index 0000000..ce70873
--- /dev/null
+++ b/host/commands/secure_env/rust/kmr_ta.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Main function for Rust implementation of KeyMint.
+// - fd_in: file descriptor for incoming serialized request messages
+// - fd_out: file descriptor for outgoing serialized response messages
+// - security_level: security level to advertize; should be one of the integer
+//   values from SecurityLevel.aidl.
+// - trm: pointer to a valid `TpmResourceManager`, which must remain valid
+//   for the entire duration of the function execution.
+void kmr_ta_main(int fd_in, int fd_out, int security_level, void* trm);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/host/commands/secure_env/rust/lib.rs b/host/commands/secure_env/rust/lib.rs
new file mode 100644
index 0000000..4d4e2f4
--- /dev/null
+++ b/host/commands/secure_env/rust/lib.rs
@@ -0,0 +1,213 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! KeyMint TA core for Cuttlefish.
+
+extern crate alloc;
+
+use kmr_common::crypto;
+use kmr_crypto_boring::{
+    aes::BoringAes, aes_cmac::BoringAesCmac, des::BoringDes, ec::BoringEc, eq::BoringEq,
+    hmac::BoringHmac, rng::BoringRng, rsa::BoringRsa,
+};
+use kmr_ta::device::{BootloaderDone, Implementation, TrustedPresenceUnsupported};
+use kmr_ta::{HardwareInfo, KeyMintTa, RpcInfo, RpcInfoV3};
+use kmr_wire::keymint::SecurityLevel;
+use kmr_wire::rpc::MINIMUM_SUPPORTED_KEYS_IN_CSR;
+use libc::c_int;
+use log::{error, info, trace};
+use std::ffi::CString;
+use std::io::{Read, Write};
+use std::os::unix::{ffi::OsStrExt, io::FromRawFd};
+
+pub mod attest;
+mod clock;
+pub mod rpc;
+mod soft;
+mod tpm;
+
+#[cfg(test)]
+mod tests;
+
+/// Main routine for the KeyMint TA. Only returns if there is a fatal error.
+pub fn ta_main(fd_in: c_int, fd_out: c_int, security_level: SecurityLevel, trm: *mut libc::c_void) {
+    log::set_logger(&AndroidCppLogger).unwrap();
+    log::set_max_level(log::LevelFilter::Debug); // Filtering happens elsewhere
+    info!(
+        "KeyMint TA running with fd_in={}, fd_out={}, security_level={:?}",
+        fd_in, fd_out, security_level,
+    );
+
+    // Safety: the following calls rely on this code being the sole owner of the file descriptors.
+    let mut infile = unsafe { std::fs::File::from_raw_fd(fd_in) };
+    let mut outfile = unsafe { std::fs::File::from_raw_fd(fd_out) };
+
+    let hw_info = HardwareInfo {
+        version_number: 1,
+        security_level,
+        impl_name: "Rust reference implementation for Cuttlefish",
+        author_name: "Google",
+        unique_id: "Cuttlefish KeyMint TA",
+    };
+
+    let rpc_info_v3 = RpcInfoV3 {
+        author_name: "Google",
+        unique_id: "Cuttlefish KeyMint TA",
+        fused: false,
+        supported_num_of_keys_in_csr: MINIMUM_SUPPORTED_KEYS_IN_CSR,
+    };
+
+    let mut rng = BoringRng::default();
+    let clock = clock::StdClock::default();
+    let rsa = BoringRsa::default();
+    let ec = BoringEc::default();
+    let tpm_hkdf = tpm::KeyDerivation::new(trm);
+    let soft_hkdf = BoringHmac;
+    let hkdf: &dyn kmr_common::crypto::Hkdf =
+        if security_level == SecurityLevel::TrustedEnvironment { &tpm_hkdf } else { &soft_hkdf };
+    let imp = crypto::Implementation {
+        rng: &mut rng,
+        clock: Some(&clock),
+        compare: &BoringEq,
+        aes: &BoringAes,
+        des: &BoringDes,
+        hmac: &BoringHmac,
+        rsa: &rsa,
+        ec: &ec,
+        ckdf: &BoringAesCmac,
+        hkdf,
+    };
+    let sign_info = attest::CertSignInfo::new();
+
+    let tpm_keys = tpm::Keys::new(trm);
+    let soft_keys = soft::Keys;
+    let keys: &dyn kmr_ta::device::RetrieveKeyMaterial =
+        if security_level == SecurityLevel::TrustedEnvironment { &tpm_keys } else { &soft_keys };
+    let tpm_rpc = tpm::RpcArtifacts::new(tpm::TpmHmac::new(trm));
+    let soft_rpc = soft::RpcArtifacts::new(soft::Derive::default());
+    let rpc: &dyn kmr_ta::device::RetrieveRpcArtifacts =
+        if security_level == SecurityLevel::TrustedEnvironment { &tpm_rpc } else { &soft_rpc };
+    let dev = Implementation {
+        keys,
+        sign_info: &sign_info,
+        // HAL populates attestation IDs from properties.
+        attest_ids: None,
+        // No secure storage.
+        sdd_mgr: None,
+        // `BOOTLOADER_ONLY` keys not supported.
+        bootloader: &BootloaderDone,
+        // `STORAGE_KEY` keys not supported.
+        sk_wrapper: None,
+        // `TRUSTED_USER_PRESENCE_REQUIRED` keys not supported
+        tup: &TrustedPresenceUnsupported,
+        // No support for converting previous implementation's keyblobs.
+        legacy_key: None,
+        rpc,
+    };
+    let mut ta = KeyMintTa::new(hw_info, RpcInfo::V3(rpc_info_v3), imp, dev);
+
+    let mut buf = [0; kmr_wire::DEFAULT_MAX_SIZE];
+    loop {
+        // Read a request message from the pipe, as a 4-byte BE length followed by the message.
+        let mut req_len_data = [0u8; 4];
+        if let Err(e) = infile.read_exact(&mut req_len_data) {
+            error!("FATAL: Failed to read request length from connection: {:?}", e);
+            return;
+        }
+        let req_len = u32::from_be_bytes(req_len_data) as usize;
+        if req_len > kmr_wire::DEFAULT_MAX_SIZE {
+            error!("FATAL: Request too long ({})", req_len);
+            return;
+        }
+        let req_data = &mut buf[..req_len];
+        if let Err(e) = infile.read_exact(req_data) {
+            error!(
+                "FATAL: Failed to read request data of length {} from connection: {:?}",
+                req_len, e
+            );
+            return;
+        }
+
+        // Pass to the TA to process.
+        trace!("-> TA: received data: (len={})", req_data.len());
+        let rsp = ta.process(req_data);
+        trace!("<- TA: send data: (len={})", rsp.len());
+
+        // Send the response message down the pipe, as a 4-byte BE length followed by the message.
+        let rsp_len: u32 = match rsp.len().try_into() {
+            Ok(l) => l,
+            Err(_e) => {
+                error!("FATAL: Response too long (len={})", rsp.len());
+                return;
+            }
+        };
+        let rsp_len_data = rsp_len.to_be_bytes();
+        if let Err(e) = outfile.write_all(&rsp_len_data[..]) {
+            error!("FATAL: Failed to write response length to connection: {:?}", e);
+            return;
+        }
+        if let Err(e) = outfile.write_all(&rsp) {
+            error!(
+                "FATAL: Failed to write response data of length {} to connection: {:?}",
+                rsp_len, e
+            );
+            return;
+        }
+        let _ = outfile.flush();
+    }
+}
+
+// TODO(schuffelen): Use android_logger when rust works with host glibc, see aosp/1415969
+struct AndroidCppLogger;
+
+impl log::Log for AndroidCppLogger {
+    fn enabled(&self, _metadata: &log::Metadata) -> bool {
+        // Filtering is done in the underlying C++ logger, so indicate to the Rust code that all
+        // logs should be included
+        true
+    }
+
+    fn log(&self, record: &log::Record) {
+        let file = record.file().unwrap_or("(no file)");
+        let file_basename =
+            std::path::Path::new(file).file_name().unwrap_or(std::ffi::OsStr::new("(no file)"));
+        let file = CString::new(file_basename.as_bytes())
+            .unwrap_or_else(|_| CString::new("(invalid file)").unwrap());
+        let line = record.line().unwrap_or(0);
+        let severity = match record.level() {
+            log::Level::Trace => 0,
+            log::Level::Debug => 1,
+            log::Level::Info => 2,
+            log::Level::Warn => 3,
+            log::Level::Error => 4,
+        };
+        let tag = CString::new("secure_env::".to_owned() + record.target())
+            .unwrap_or_else(|_| CString::new("(invalid tag)").unwrap());
+        let msg = CString::new(format!("{}", record.args()))
+            .unwrap_or_else(|_| CString::new("(invalid msg)").unwrap());
+        unsafe {
+            // Safety: All pointer arguments are generated from valid owned CString instances
+            secure_env_tpm::secure_env_log(
+                file.as_ptr(),
+                line,
+                severity,
+                tag.as_ptr(),
+                msg.as_ptr(),
+            );
+        }
+    }
+
+    fn flush(&self) {}
+}
diff --git a/host/commands/secure_env/rust/rpc.rs b/host/commands/secure_env/rust/rpc.rs
new file mode 100644
index 0000000..5826d57
--- /dev/null
+++ b/host/commands/secure_env/rust/rpc.rs
@@ -0,0 +1,169 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Emulated implementation of device traits for `IRemotelyProvisionedComponent`.
+
+use core::cell::RefCell;
+use kmr_common::crypto::{ec, ec::CoseKeyPurpose, Ec, KeyMaterial};
+use kmr_common::{crypto, explicit, rpc_err, vec_try, Error};
+use kmr_crypto_boring::{ec::BoringEc, hmac::BoringHmac};
+use kmr_ta::device::{
+    CsrSigningAlgorithm, DiceInfo, PubDiceArtifacts, RetrieveRpcArtifacts, RpcV2Req,
+};
+use kmr_wire::coset::{iana, CoseSign1Builder, HeaderBuilder};
+use kmr_wire::keymint::Digest;
+use kmr_wire::{cbor::value::Value, coset::AsCborValue, rpc, CborError};
+
+/// Trait to encapsulate deterministic derivation of secret data.
+pub trait DeriveBytes {
+    /// Derive `output_len` bytes of data from `context`, deterministically.
+    fn derive_bytes(&self, context: &[u8], output_len: usize) -> Result<Vec<u8>, Error>;
+}
+
+/// Common emulated implementation of RPC artifact retrieval.
+pub struct Artifacts<T: DeriveBytes> {
+    derive: T,
+    dice_artifacts: RefCell<Option<(DiceInfo, crypto::ec::Key)>>,
+}
+
+impl<T: DeriveBytes> RetrieveRpcArtifacts for Artifacts<T> {
+    fn derive_bytes_from_hbk(
+        &self,
+        _hkdf: &dyn crypto::Hkdf,
+        context: &[u8],
+        output_len: usize,
+    ) -> Result<Vec<u8>, Error> {
+        self.derive.derive_bytes(context, output_len)
+    }
+
+    fn get_dice_info<'a>(&self, _test_mode: rpc::TestMode) -> Result<DiceInfo, Error> {
+        if self.dice_artifacts.borrow().is_none() {
+            self.generate_dice_artifacts(rpc::TestMode(false))?;
+        }
+
+        let (dice_info, _) = self
+            .dice_artifacts
+            .borrow()
+            .as_ref()
+            .ok_or_else(|| rpc_err!(Failed, "DICE artifacts are not initialized."))?
+            .clone();
+        Ok(dice_info)
+    }
+
+    fn sign_data(
+        &self,
+        ec: &dyn crypto::Ec,
+        data: &[u8],
+        _rpc_v2: Option<RpcV2Req>,
+    ) -> Result<Vec<u8>, Error> {
+        // DICE artifacts should have been initialized via `get_dice_info` by the time this
+        // method is called.
+        let (dice_info, private_key) = self
+            .dice_artifacts
+            .borrow()
+            .as_ref()
+            .ok_or_else(|| rpc_err!(Failed, "DICE artifacts are not initialized."))?
+            .clone();
+
+        let mut op = match dice_info.signing_algorithm {
+            CsrSigningAlgorithm::ES256 => ec.begin_sign(private_key.into(), Digest::Sha256)?,
+            CsrSigningAlgorithm::ES384 => ec.begin_sign(private_key.into(), Digest::Sha384)?,
+            CsrSigningAlgorithm::EdDSA => ec.begin_sign(private_key.into(), Digest::None)?,
+        };
+        op.update(data)?;
+        op.finish()
+    }
+}
+
+impl<T: DeriveBytes> Artifacts<T> {
+    /// Constructor.
+    pub fn new(derive: T) -> Self {
+        Self { derive, dice_artifacts: RefCell::new(None) }
+    }
+
+    fn generate_dice_artifacts(&self, _test_mode: rpc::TestMode) -> Result<(), Error> {
+        let ec = BoringEc::default();
+        let secret = self.derive_bytes_from_hbk(&BoringHmac, b"Device Key Seed", 32)?;
+        let (pub_cose_key, private_key) = match ec::import_raw_ed25519_key(&secret)? {
+            KeyMaterial::Ec(curve, curve_type, key) => (
+                key.public_cose_key(
+                    &ec,
+                    curve,
+                    curve_type,
+                    CoseKeyPurpose::Sign,
+                    None, /* no key ID */
+                    rpc::TestMode(false),
+                )?,
+                key,
+            ),
+            _ => {
+                return Err(rpc_err!(
+                    Failed,
+                    "expected the Ec variant of KeyMaterial for the cdi leaf key."
+                ))
+            }
+        };
+
+        let cose_key_cbor = pub_cose_key.to_cbor_value().map_err(CborError::from)?;
+        let cose_key_cbor_data = kmr_ta::rkp::serialize_cbor(&cose_key_cbor)?;
+
+        // Construct `DiceChainEntryPayload`
+        let dice_chain_entry_payload = Value::Map(vec_try![
+            // Issuer
+            (Value::Integer(1.into()), Value::Text(String::from("Issuer"))),
+            // Subject
+            (Value::Integer(2.into()), Value::Text(String::from("Subject"))),
+            // Subject public key
+            (Value::Integer((-4670552).into()), Value::Bytes(cose_key_cbor_data)),
+            // Key Usage field contains a CBOR byte string of the bits which correspond
+            // to `keyCertSign` as per RFC 5280 Section 4.2.1.3 (in little-endian byte order)
+            (Value::Integer((-4670553).into()), Value::Bytes(vec_try![0x20]?)),
+        ]?);
+        let dice_chain_entry_payload_data = kmr_ta::rkp::serialize_cbor(&dice_chain_entry_payload)?;
+
+        // Construct `DiceChainEntry`
+        let protected = HeaderBuilder::new().algorithm(iana::Algorithm::EdDSA).build();
+        let dice_chain_entry = CoseSign1Builder::new()
+            .protected(protected)
+            .payload(dice_chain_entry_payload_data)
+            .try_create_signature(&[], |input| {
+                let mut op = ec.begin_sign(private_key.clone(), Digest::None)?;
+                op.update(input)?;
+                op.finish()
+            })?
+            .build();
+        let dice_chain_entry_cbor = dice_chain_entry.to_cbor_value().map_err(CborError::from)?;
+
+        // Construct `DiceCertChain`
+        let dice_cert_chain = Value::Array(vec_try![cose_key_cbor, dice_chain_entry_cbor]?);
+        let dice_cert_chain_data = kmr_ta::rkp::serialize_cbor(&dice_cert_chain)?;
+
+        // Construct `UdsCerts` as an empty CBOR map
+        let uds_certs_data = kmr_ta::rkp::serialize_cbor(&Value::Map(Vec::new()))?;
+
+        let pub_dice_artifacts =
+            PubDiceArtifacts { dice_cert_chain: dice_cert_chain_data, uds_certs: uds_certs_data };
+
+        let dice_info = DiceInfo {
+            pub_dice_artifacts,
+            signing_algorithm: CsrSigningAlgorithm::EdDSA,
+            rpc_v2_test_cdi_priv: None,
+        };
+
+        *self.dice_artifacts.borrow_mut() = Some((dice_info, explicit!(private_key)?));
+
+        Ok(())
+    }
+}
diff --git a/host/commands/secure_env/rust/soft.rs b/host/commands/secure_env/rust/soft.rs
new file mode 100644
index 0000000..2f7aa38
--- /dev/null
+++ b/host/commands/secure_env/rust/soft.rs
@@ -0,0 +1,66 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Software-only trait implementations using fake keys.
+
+use kmr_common::{
+    crypto,
+    crypto::{Hkdf, Rng},
+    Error,
+};
+use kmr_crypto_boring::{hmac::BoringHmac, rng::BoringRng};
+use kmr_ta::device::RetrieveKeyMaterial;
+
+/// Root key retrieval using hard-coded fake keys.
+pub struct Keys;
+
+impl RetrieveKeyMaterial for Keys {
+    fn root_kek(&self, _context: &[u8]) -> Result<crypto::OpaqueOr<crypto::hmac::Key>, Error> {
+        // Matches `MASTER_KEY` in system/keymaster/key_blob_utils/software_keyblobs.cpp
+        Ok(crypto::hmac::Key::new([0; 16].to_vec()).into())
+    }
+    fn kak(&self) -> Result<crypto::OpaqueOr<crypto::aes::Key>, Error> {
+        // Matches `kFakeKeyAgreementKey` in
+        // system/keymaster/km_openssl/soft_keymaster_enforcement.cpp.
+        Ok(crypto::aes::Key::Aes256([0; 32]).into())
+    }
+    fn unique_id_hbk(&self, _ckdf: &dyn crypto::Ckdf) -> Result<crypto::hmac::Key, Error> {
+        // Matches value used in system/keymaster/contexts/pure_soft_keymaster_context.cpp.
+        crypto::hmac::Key::new_from(b"MustBeRandomBits")
+    }
+}
+
+pub struct Derive {
+    hbk: Vec<u8>,
+}
+
+impl Default for Derive {
+    fn default() -> Self {
+        // Use random data as an emulation of a hardware-backed key.
+        let mut hbk = vec![0; 32];
+        let mut rng = BoringRng::default();
+        rng.fill_bytes(&mut hbk);
+        Self { hbk }
+    }
+}
+
+impl crate::rpc::DeriveBytes for Derive {
+    fn derive_bytes(&self, context: &[u8], output_len: usize) -> Result<Vec<u8>, Error> {
+        BoringHmac.hkdf(&[], &self.hbk, context, output_len)
+    }
+}
+
+/// RPC artifact retrieval using software fake key.
+pub type RpcArtifacts = crate::rpc::Artifacts<Derive>;
diff --git a/host/commands/secure_env/rust/tests.rs b/host/commands/secure_env/rust/tests.rs
new file mode 100644
index 0000000..b53fa1c
--- /dev/null
+++ b/host/commands/secure_env/rust/tests.rs
@@ -0,0 +1,7 @@
+//! Tests for Cuttlefish-specific code.
+
+#[test]
+fn test_signing_cert_parse() {
+    let sign_info = crate::attest::CertSignInfo::new();
+    kmr_tests::test_signing_cert_parse(sign_info, false);
+}
diff --git a/host/commands/secure_env/rust/tpm.rs b/host/commands/secure_env/rust/tpm.rs
new file mode 100644
index 0000000..864c029
--- /dev/null
+++ b/host/commands/secure_env/rust/tpm.rs
@@ -0,0 +1,193 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Device trait implementations using the TPM.
+
+use kmr_common::{
+    crypto, crypto::SHA256_DIGEST_LEN, km_err, vec_try, vec_try_with_capacity, Error,
+    FallibleAllocExt,
+};
+use kmr_ta::device::DeviceHmac;
+
+pub const ROOT_KEK_MARKER: &[u8] = b"CF Root KEK";
+
+/// Device HMAC implementation that uses the TPM.
+#[derive(Clone)]
+pub struct TpmHmac {
+    /// Opaque pointer to a `TpmResourceManager`.
+    trm: *mut libc::c_void,
+}
+
+impl TpmHmac {
+    pub fn new(trm: *mut libc::c_void) -> Self {
+        Self { trm }
+    }
+    fn tpm_hmac(&self, data: &[u8]) -> Result<Vec<u8>, Error> {
+        let mut tag = vec_try![0; 32]?;
+
+        // Safety: all slices are valid with correct lengths.
+        let rc = unsafe {
+            secure_env_tpm::tpm_hmac(
+                self.trm,
+                data.as_ptr(),
+                data.len() as u32,
+                tag.as_mut_ptr(),
+                tag.len() as u32,
+            )
+        };
+        if rc == 0 {
+            Ok(tag)
+        } else {
+            Err(km_err!(UnknownError, "HMAC calculation failed"))
+        }
+    }
+
+    fn hkdf_expand(&self, info: &[u8], out_len: usize) -> Result<Vec<u8>, Error> {
+        // HKDF expand: feed the derivation info into HMAC (using the TPM key) repeatedly.
+        let n = (out_len + SHA256_DIGEST_LEN - 1) / SHA256_DIGEST_LEN;
+        if n > 256 {
+            return Err(km_err!(UnknownError, "overflow in hkdf"));
+        }
+        let mut t = vec_try_with_capacity!(SHA256_DIGEST_LEN)?;
+        let mut okm = vec_try_with_capacity!(n * SHA256_DIGEST_LEN)?;
+        let n = n as u8;
+        for idx in 0..n {
+            let mut input = vec_try_with_capacity!(t.len() + info.len() + 1)?;
+            input.extend_from_slice(&t);
+            input.extend_from_slice(info);
+            input.push(idx + 1);
+
+            t = self.tpm_hmac(&input)?;
+            okm.try_extend_from_slice(&t)?;
+        }
+        okm.truncate(out_len);
+        Ok(okm)
+    }
+}
+
+impl kmr_ta::device::DeviceHmac for TpmHmac {
+    fn hmac(&self, _imp: &dyn crypto::Hmac, data: &[u8]) -> Result<Vec<u8>, Error> {
+        self.tpm_hmac(data)
+    }
+}
+
+impl crate::rpc::DeriveBytes for TpmHmac {
+    fn derive_bytes(&self, context: &[u8], output_len: usize) -> Result<Vec<u8>, Error> {
+        self.hkdf_expand(context, output_len)
+    }
+}
+
+// TPM-backed implementation of key retrieval/management functionality.
+pub struct Keys {
+    tpm_hmac: TpmHmac,
+}
+
+impl Keys {
+    pub fn new(trm: *mut libc::c_void) -> Self {
+        Self { tpm_hmac: TpmHmac::new(trm) }
+    }
+}
+
+impl kmr_ta::device::RetrieveKeyMaterial for Keys {
+    fn root_kek(&self, _context: &[u8]) -> Result<crypto::OpaqueOr<crypto::hmac::Key>, Error> {
+        Ok(crypto::OpaqueOr::Opaque(crypto::OpaqueKeyMaterial(ROOT_KEK_MARKER.to_vec())))
+    }
+
+    fn kak(&self) -> Result<crypto::OpaqueOr<crypto::aes::Key>, Error> {
+        // Generate a TPM-bound shared secret to use as the base of HMAC key negotiation.
+        let k = self.tpm_hmac.tpm_hmac(b"TPM ISharedSecret")?;
+        let k: [u8; 32] =
+            k.try_into().map_err(|_e| km_err!(UnknownError, "unexpected HMAC size"))?;
+        Ok(crypto::aes::Key::Aes256(k).into())
+    }
+
+    fn hmac_key_agreed(&self, _key: &crypto::hmac::Key) -> Option<Box<dyn DeviceHmac>> {
+        // After `ISharedSecret` negotiation completes, the spec implies that the calculated HMAC
+        // key should be used by subsequent device HMAC calculations.  However, this implementation
+        // uses a TPM-HMAC key instead, so that HMAC calculations agree between KeyMint and
+        // Gatekeeper / ConfirmationUI.
+        // TODO(b/242838132): consider installing the calculated key into the TPM and using it
+        // thereafter.
+        Some(Box::new(self.tpm_hmac.clone()))
+    }
+
+    fn unique_id_hbk(&self, _ckdf: &dyn crypto::Ckdf) -> Result<crypto::hmac::Key, Error> {
+        // Generate a TPM-bound HBK to use for unique ID generation.
+        let mut k = self.tpm_hmac.tpm_hmac(b"TPM unique ID HBK")?;
+        k.truncate(16);
+        Ok(crypto::hmac::Key(k))
+    }
+}
+
+pub struct KeyDerivation {
+    tpm_hmac: TpmHmac,
+}
+
+impl KeyDerivation {
+    pub fn new(trm: *mut libc::c_void) -> Self {
+        Self { tpm_hmac: TpmHmac::new(trm) }
+    }
+}
+
+impl kmr_common::crypto::Hkdf for KeyDerivation {
+    fn hkdf(
+        &self,
+        _salt: &[u8],
+        _ikm: &[u8],
+        _info: &[u8],
+        _out_len: usize,
+    ) -> Result<Vec<u8>, Error> {
+        // HKDF normally performs an initial extract step to create a pseudo-random key (PRK) for
+        // use in the HKDF expand processing.  This implementation uses a TPM HMAC key for HKDF
+        // expand processing instead, and so we cannot do a full HKDF call.
+        Err(km_err!(UnknownError, "unexpected call to full hkdf opearation"))
+    }
+
+    fn extract(
+        &self,
+        _salt: &[u8],
+        _ikm: &[u8],
+    ) -> Result<crypto::OpaqueOr<crypto::hmac::Key>, Error> {
+        // Because we are using a TPM HMAC key for HKDF; there is nothing to extract
+        Err(km_err!(UnknownError, "unexpected call to hkdf extract"))
+    }
+
+    fn expand(
+        &self,
+        prk: &crypto::OpaqueOr<crypto::hmac::Key>,
+        info: &[u8],
+        out_len: usize,
+    ) -> Result<Vec<u8>, Error> {
+        let key_material = match prk {
+            crypto::OpaqueOr::Opaque(key_material) => &key_material.0,
+            _ => {
+                return Err(km_err!(
+                    UnknownError,
+                    "unexpected root kek type used in key derivation"
+                ))
+            }
+        };
+        if key_material != ROOT_KEK_MARKER {
+            // This code expects that the value from `Keys::root_kek()` above will be passed
+            // unmodified to this function in its (only) use as key derivation.  If this is not the
+            // case, then the assumptions below around TPM use may no longer be correct.
+            return Err(km_err!(UnknownError, "unexpected root kek in key derivation"));
+        }
+        self.tpm_hmac.hkdf_expand(info, out_len)
+    }
+}
+
+/// RPC artifact retrieval using key material derived from the TPM.
+pub type RpcArtifacts = crate::rpc::Artifacts<TpmHmac>;
diff --git a/host/commands/secure_env/secure_env.cpp b/host/commands/secure_env/secure_env.cpp
deleted file mode 100644
index 97dd48c..0000000
--- a/host/commands/secure_env/secure_env.cpp
+++ /dev/null
@@ -1,275 +0,0 @@
-//
-// Copyright (C) 2020 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include <thread>
-
-#include <android-base/logging.h>
-#include <fruit/fruit.h>
-#include <gflags/gflags.h>
-#include <keymaster/android_keymaster.h>
-#include <keymaster/contexts/pure_soft_keymaster_context.h>
-#include <keymaster/soft_keymaster_logger.h>
-#include <tss2/tss2_esys.h>
-#include <tss2/tss2_rc.h>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/security/confui_sign.h"
-#include "common/libs/security/gatekeeper_channel.h"
-#include "common/libs/security/keymaster_channel.h"
-#include "host/commands/kernel_log_monitor/kernel_log_server.h"
-#include "host/commands/kernel_log_monitor/utils.h"
-#include "host/commands/secure_env/confui_sign_server.h"
-#include "host/commands/secure_env/device_tpm.h"
-#include "host/commands/secure_env/fragile_tpm_storage.h"
-#include "host/commands/secure_env/gatekeeper_responder.h"
-#include "host/commands/secure_env/in_process_tpm.h"
-#include "host/commands/secure_env/insecure_fallback_storage.h"
-#include "host/commands/secure_env/keymaster_responder.h"
-#include "host/commands/secure_env/proxy_keymaster_context.h"
-#include "host/commands/secure_env/soft_gatekeeper.h"
-#include "host/commands/secure_env/tpm_gatekeeper.h"
-#include "host/commands/secure_env/tpm_keymaster_context.h"
-#include "host/commands/secure_env/tpm_keymaster_enforcement.h"
-#include "host/commands/secure_env/tpm_resource_manager.h"
-#include "host/libs/config/logging.h"
-
-DEFINE_int32(confui_server_fd, -1, "A named socket to serve confirmation UI");
-DEFINE_int32(keymaster_fd_in, -1, "A pipe for keymaster communication");
-DEFINE_int32(keymaster_fd_out, -1, "A pipe for keymaster communication");
-DEFINE_int32(gatekeeper_fd_in, -1, "A pipe for gatekeeper communication");
-DEFINE_int32(gatekeeper_fd_out, -1, "A pipe for gatekeeper communication");
-DEFINE_int32(kernel_events_fd, -1,
-             "A pipe for monitoring events based on "
-             "messages written to the kernel log. This "
-             "is used by secure_env to monitor for "
-             "device reboots.");
-
-DEFINE_string(tpm_impl,
-              "in_memory",
-              "The TPM implementation. \"in_memory\" or \"host_device\"");
-
-DEFINE_string(keymint_impl, "tpm",
-              "The keymaster implementation. \"tpm\" or \"software\"");
-
-DEFINE_string(gatekeeper_impl, "tpm",
-              "The gatekeeper implementation. \"tpm\" or \"software\"");
-
-namespace cuttlefish {
-namespace {
-
-// Copied from AndroidKeymaster4Device
-constexpr size_t kOperationTableSize = 16;
-
-// Dup a command line file descriptor into a SharedFD.
-SharedFD DupFdFlag(gflags::int32 fd) {
-  CHECK(fd != -1);
-  SharedFD duped = SharedFD::Dup(fd);
-  CHECK(duped->IsOpen()) << "Could not dup output fd: " << duped->StrError();
-  // The original FD is intentionally kept open so that we can re-exec this
-  // process without having to do a bunch of argv book-keeping.
-  return duped;
-}
-
-// Re-launch this process with all the same flags it was originallys started
-// with.
-[[noreturn]] void ReExecSelf() {
-  // Allocate +1 entry for terminating nullptr.
-  std::vector<char*> argv(gflags::GetArgvs().size() + 1, nullptr);
-  for (size_t i = 0; i < gflags::GetArgvs().size(); ++i) {
-    argv[i] = strdup(gflags::GetArgvs()[i].c_str());
-    CHECK(argv[i] != nullptr) << "OOM";
-  }
-  execv("/proc/self/exe", argv.data());
-  char buf[128];
-  LOG(FATAL) << "Exec failed, secure_env is out of sync with the guest: "
-             << errno << "(" << strerror_r(errno, buf, sizeof(buf)) << ")";
-  abort();  // LOG(FATAL) isn't marked as noreturn
-}
-
-// Spin up a thread that monitors for a kernel loaded event, then re-execs
-// this process. This way, secure_env's boot tracking matches up with the guest.
-std::thread StartKernelEventMonitor(SharedFD kernel_events_fd) {
-  return std::thread([kernel_events_fd]() {
-    while (kernel_events_fd->IsOpen()) {
-      auto read_result = monitor::ReadEvent(kernel_events_fd);
-      CHECK(read_result.has_value()) << kernel_events_fd->StrError();
-      if (read_result->event == monitor::Event::BootloaderLoaded) {
-        LOG(DEBUG) << "secure_env detected guest reboot, restarting.";
-        ReExecSelf();
-      }
-    }
-  });
-}
-
-fruit::Component<fruit::Required<gatekeeper::SoftGateKeeper, TpmGatekeeper,
-                                 TpmResourceManager>,
-                 gatekeeper::GateKeeper, keymaster::KeymasterEnforcement>
-ChooseGatekeeperComponent() {
-  if (FLAGS_gatekeeper_impl == "software") {
-    return fruit::createComponent()
-        .bind<gatekeeper::GateKeeper, gatekeeper::SoftGateKeeper>()
-        .registerProvider([]() -> keymaster::KeymasterEnforcement* {
-          return new keymaster::SoftKeymasterEnforcement(64, 64);
-        });
-  } else if (FLAGS_gatekeeper_impl == "tpm") {
-    return fruit::createComponent()
-        .bind<gatekeeper::GateKeeper, TpmGatekeeper>()
-        .registerProvider(
-            [](TpmResourceManager& resource_manager,
-               TpmGatekeeper& gatekeeper) -> keymaster::KeymasterEnforcement* {
-              return new TpmKeymasterEnforcement(resource_manager, gatekeeper);
-            });
-  } else {
-    LOG(FATAL) << "Invalid gatekeeper implementation: "
-               << FLAGS_gatekeeper_impl;
-    abort();
-  }
-}
-
-fruit::Component<TpmResourceManager, gatekeeper::GateKeeper,
-                 keymaster::KeymasterEnforcement>
-SecureEnvComponent() {
-  return fruit::createComponent()
-      .registerProvider([]() -> Tpm* {  // fruit will take ownership
-        if (FLAGS_tpm_impl == "in_memory") {
-          return new InProcessTpm();
-        } else if (FLAGS_tpm_impl == "host_device") {
-          return new DeviceTpm("/dev/tpm0");
-        } else {
-          LOG(FATAL) << "Unknown TPM implementation: " << FLAGS_tpm_impl;
-          abort();
-        }
-      })
-      .registerProvider([](Tpm* tpm) {
-        if (tpm->TctiContext() == nullptr) {
-          LOG(FATAL) << "Unable to connect to TPM implementation.";
-        }
-        ESYS_CONTEXT* esys_ptr = nullptr;
-        std::unique_ptr<ESYS_CONTEXT, void (*)(ESYS_CONTEXT*)> esys(
-            nullptr, [](ESYS_CONTEXT* esys) { Esys_Finalize(&esys); });
-        auto rc = Esys_Initialize(&esys_ptr, tpm->TctiContext(), nullptr);
-        if (rc != TPM2_RC_SUCCESS) {
-          LOG(FATAL) << "Could not initialize esys: " << Tss2_RC_Decode(rc)
-                     << " (" << rc << ")";
-        }
-        esys.reset(esys_ptr);
-        return esys;
-      })
-      .registerProvider(
-          [](std::unique_ptr<ESYS_CONTEXT, void (*)(ESYS_CONTEXT*)>& esys) {
-            return new TpmResourceManager(
-                esys.get());  // fruit will take ownership
-          })
-      .registerProvider([](TpmResourceManager& resource_manager) {
-        return new FragileTpmStorage(resource_manager, "gatekeeper_secure");
-      })
-      .registerProvider([](TpmResourceManager& resource_manager) {
-        return new InsecureFallbackStorage(resource_manager,
-                                           "gatekeeper_insecure");
-      })
-      .registerProvider([](TpmResourceManager& resource_manager,
-                           FragileTpmStorage& secure_storage,
-                           InsecureFallbackStorage& insecure_storage) {
-        return new TpmGatekeeper(resource_manager, secure_storage,
-                                 insecure_storage);
-      })
-      .registerProvider([]() { return new gatekeeper::SoftGateKeeper(); })
-      .install(ChooseGatekeeperComponent);
-}
-
-}  // namespace
-
-int SecureEnvMain(int argc, char** argv) {
-  DefaultSubprocessLogging(argv);
-  gflags::ParseCommandLineFlags(&argc, &argv, true);
-  keymaster::SoftKeymasterLogger km_logger;
-
-  fruit::Injector<TpmResourceManager, gatekeeper::GateKeeper,
-                  keymaster::KeymasterEnforcement>
-      injector(SecureEnvComponent);
-  TpmResourceManager* resource_manager = injector.get<TpmResourceManager*>();
-  gatekeeper::GateKeeper* gatekeeper = injector.get<gatekeeper::GateKeeper*>();
-  keymaster::KeymasterEnforcement* keymaster_enforcement =
-      injector.get<keymaster::KeymasterEnforcement*>();
-
-  std::unique_ptr<keymaster::KeymasterContext> keymaster_context;
-  if (FLAGS_keymint_impl == "software") {
-    // TODO: See if this is the right KM version.
-    keymaster_context.reset(new keymaster::PureSoftKeymasterContext(
-        keymaster::KmVersion::KEYMINT_2, KM_SECURITY_LEVEL_SOFTWARE));
-  } else if (FLAGS_keymint_impl == "tpm") {
-    keymaster_context.reset(
-        new TpmKeymasterContext(*resource_manager, *keymaster_enforcement));
-  } else {
-    LOG(FATAL) << "Unknown keymaster implementation " << FLAGS_keymint_impl;
-    return -1;
-  }
-  // keymaster::AndroidKeymaster puts the context pointer into a UniquePtr,
-  // taking ownership.
-  keymaster::AndroidKeymaster keymaster{
-      new ProxyKeymasterContext(*keymaster_context), kOperationTableSize,
-      keymaster::MessageVersion(keymaster::KmVersion::KEYMINT_2,
-                                0 /* km_date */)};
-
-  auto confui_server_fd = DupFdFlag(FLAGS_confui_server_fd);
-  auto keymaster_in = DupFdFlag(FLAGS_keymaster_fd_in);
-  auto keymaster_out = DupFdFlag(FLAGS_keymaster_fd_out);
-  auto gatekeeper_in = DupFdFlag(FLAGS_gatekeeper_fd_in);
-  auto gatekeeper_out = DupFdFlag(FLAGS_gatekeeper_fd_out);
-  auto kernel_events_fd = DupFdFlag(FLAGS_kernel_events_fd);
-
-  std::vector<std::thread> threads;
-
-  threads.emplace_back([keymaster_in, keymaster_out, &keymaster]() {
-    while (true) {
-      KeymasterChannel keymaster_channel(keymaster_in, keymaster_out);
-
-      KeymasterResponder keymaster_responder(keymaster_channel, keymaster);
-
-      while (keymaster_responder.ProcessMessage()) {
-      }
-    }
-  });
-
-  threads.emplace_back([gatekeeper_in, gatekeeper_out, &gatekeeper]() {
-    while (true) {
-      GatekeeperChannel gatekeeper_channel(gatekeeper_in, gatekeeper_out);
-
-      GatekeeperResponder gatekeeper_responder(gatekeeper_channel, *gatekeeper);
-
-      while (gatekeeper_responder.ProcessMessage()) {
-      }
-    }
-  });
-
-  threads.emplace_back([confui_server_fd, resource_manager]() {
-    ConfUiSignServer confui_sign_server(*resource_manager, confui_server_fd);
-    // no return, infinite loop
-    confui_sign_server.MainLoop();
-  });
-  threads.emplace_back(StartKernelEventMonitor(kernel_events_fd));
-
-  for (auto& t : threads) {
-    t.join();
-  }
-
-  return 0;
-}
-
-}  // namespace cuttlefish
-
-int main(int argc, char** argv) {
-  return cuttlefish::SecureEnvMain(argc, argv);
-}
diff --git a/host/commands/secure_env/secure_env_linux_main.cpp b/host/commands/secure_env/secure_env_linux_main.cpp
new file mode 100644
index 0000000..ade41d3
--- /dev/null
+++ b/host/commands/secure_env/secure_env_linux_main.cpp
@@ -0,0 +1,304 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <thread>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <fruit/fruit.h>
+#include <gflags/gflags.h>
+#include <keymaster/android_keymaster.h>
+#include <keymaster/contexts/pure_soft_keymaster_context.h>
+#include <keymaster/soft_keymaster_logger.h>
+#include <tss2/tss2_esys.h>
+#include <tss2/tss2_rc.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/security/confui_sign.h"
+#include "common/libs/security/gatekeeper_channel_sharedfd.h"
+#include "common/libs/security/keymaster_channel_sharedfd.h"
+#include "host/commands/kernel_log_monitor/kernel_log_server.h"
+#include "host/commands/kernel_log_monitor/utils.h"
+#include "host/commands/secure_env/confui_sign_server.h"
+#include "host/commands/secure_env/device_tpm.h"
+#include "host/commands/secure_env/fragile_tpm_storage.h"
+#include "host/commands/secure_env/gatekeeper_responder.h"
+#include "host/commands/secure_env/in_process_tpm.h"
+#include "host/commands/secure_env/insecure_fallback_storage.h"
+#include "host/commands/secure_env/keymaster_responder.h"
+#include "host/commands/secure_env/proxy_keymaster_context.h"
+#include "host/commands/secure_env/rust/kmr_ta.h"
+#include "host/commands/secure_env/soft_gatekeeper.h"
+#include "host/commands/secure_env/tpm_gatekeeper.h"
+#include "host/commands/secure_env/tpm_keymaster_context.h"
+#include "host/commands/secure_env/tpm_keymaster_enforcement.h"
+#include "host/commands/secure_env/tpm_resource_manager.h"
+#include "host/libs/config/logging.h"
+
+DEFINE_int32(confui_server_fd, -1, "A named socket to serve confirmation UI");
+DEFINE_int32(keymaster_fd_in, -1, "A pipe for keymaster communication");
+DEFINE_int32(keymaster_fd_out, -1, "A pipe for keymaster communication");
+DEFINE_int32(gatekeeper_fd_in, -1, "A pipe for gatekeeper communication");
+DEFINE_int32(gatekeeper_fd_out, -1, "A pipe for gatekeeper communication");
+DEFINE_int32(kernel_events_fd, -1,
+             "A pipe for monitoring events based on "
+             "messages written to the kernel log. This "
+             "is used by secure_env to monitor for "
+             "device reboots.");
+
+DEFINE_string(tpm_impl, "in_memory",
+              "The TPM implementation. \"in_memory\" or \"host_device\"");
+
+DEFINE_string(keymint_impl, "tpm",
+              "The KeyMint implementation. \"tpm\", \"software\", \"rust-tpm\" "
+              "or \"rust-software\"");
+
+DEFINE_string(gatekeeper_impl, "tpm",
+              "The gatekeeper implementation. \"tpm\" or \"software\"");
+
+namespace cuttlefish {
+namespace {
+
+// Copied from AndroidKeymaster4Device
+constexpr size_t kOperationTableSize = 16;
+
+// Dup a command line file descriptor into a SharedFD.
+SharedFD DupFdFlag(gflags::int32 fd) {
+  CHECK(fd != -1);
+  SharedFD duped = SharedFD::Dup(fd);
+  CHECK(duped->IsOpen()) << "Could not dup output fd: " << duped->StrError();
+  // The original FD is intentionally kept open so that we can re-exec this
+  // process without having to do a bunch of argv book-keeping.
+  return duped;
+}
+
+// Re-launch this process with all the same flags it was originallys started
+// with.
+[[noreturn]] void ReExecSelf() {
+  // Allocate +1 entry for terminating nullptr.
+  std::vector<char*> argv(gflags::GetArgvs().size() + 1, nullptr);
+  for (size_t i = 0; i < gflags::GetArgvs().size(); ++i) {
+    argv[i] = strdup(gflags::GetArgvs()[i].c_str());
+    CHECK(argv[i] != nullptr) << "OOM";
+  }
+  execv("/proc/self/exe", argv.data());
+  char buf[128];
+  LOG(FATAL) << "Exec failed, secure_env is out of sync with the guest: "
+             << errno << "(" << strerror_r(errno, buf, sizeof(buf)) << ")";
+  abort();  // LOG(FATAL) isn't marked as noreturn
+}
+
+// Spin up a thread that monitors for a kernel loaded event, then re-execs
+// this process. This way, secure_env's boot tracking matches up with the guest.
+std::thread StartKernelEventMonitor(SharedFD kernel_events_fd) {
+  return std::thread([kernel_events_fd]() {
+    while (kernel_events_fd->IsOpen()) {
+      auto read_result = monitor::ReadEvent(kernel_events_fd);
+      CHECK(read_result.has_value()) << kernel_events_fd->StrError();
+      if (read_result->event == monitor::Event::BootloaderLoaded) {
+        LOG(DEBUG) << "secure_env detected guest reboot, restarting.";
+        ReExecSelf();
+      }
+    }
+  });
+}
+
+fruit::Component<fruit::Required<gatekeeper::SoftGateKeeper, TpmGatekeeper,
+                                 TpmResourceManager>,
+                 gatekeeper::GateKeeper, keymaster::KeymasterEnforcement>
+ChooseGatekeeperComponent() {
+  if (FLAGS_gatekeeper_impl == "software") {
+    return fruit::createComponent()
+        .bind<gatekeeper::GateKeeper, gatekeeper::SoftGateKeeper>()
+        .registerProvider([]() -> keymaster::KeymasterEnforcement* {
+          return new keymaster::SoftKeymasterEnforcement(64, 64);
+        });
+  } else if (FLAGS_gatekeeper_impl == "tpm") {
+    return fruit::createComponent()
+        .bind<gatekeeper::GateKeeper, TpmGatekeeper>()
+        .registerProvider(
+            [](TpmResourceManager& resource_manager,
+               TpmGatekeeper& gatekeeper) -> keymaster::KeymasterEnforcement* {
+              return new TpmKeymasterEnforcement(resource_manager, gatekeeper);
+            });
+  } else {
+    LOG(FATAL) << "Invalid gatekeeper implementation: "
+               << FLAGS_gatekeeper_impl;
+    abort();
+  }
+}
+
+fruit::Component<TpmResourceManager, gatekeeper::GateKeeper,
+                 keymaster::KeymasterEnforcement>
+SecureEnvComponent() {
+  return fruit::createComponent()
+      .registerProvider([]() -> Tpm* {  // fruit will take ownership
+        if (FLAGS_tpm_impl == "in_memory") {
+          return new InProcessTpm();
+        } else if (FLAGS_tpm_impl == "host_device") {
+          return new DeviceTpm("/dev/tpm0");
+        } else {
+          LOG(FATAL) << "Unknown TPM implementation: " << FLAGS_tpm_impl;
+          abort();
+        }
+      })
+      .registerProvider([](Tpm* tpm) {
+        if (tpm->TctiContext() == nullptr) {
+          LOG(FATAL) << "Unable to connect to TPM implementation.";
+        }
+        ESYS_CONTEXT* esys_ptr = nullptr;
+        std::unique_ptr<ESYS_CONTEXT, void (*)(ESYS_CONTEXT*)> esys(
+            nullptr, [](ESYS_CONTEXT* esys) { Esys_Finalize(&esys); });
+        auto rc = Esys_Initialize(&esys_ptr, tpm->TctiContext(), nullptr);
+        if (rc != TPM2_RC_SUCCESS) {
+          LOG(FATAL) << "Could not initialize esys: " << Tss2_RC_Decode(rc)
+                     << " (" << rc << ")";
+        }
+        esys.reset(esys_ptr);
+        return esys;
+      })
+      .registerProvider(
+          [](std::unique_ptr<ESYS_CONTEXT, void (*)(ESYS_CONTEXT*)>& esys) {
+            return new TpmResourceManager(
+                esys.get());  // fruit will take ownership
+          })
+      .registerProvider([](TpmResourceManager& resource_manager) {
+        return new FragileTpmStorage(resource_manager, "gatekeeper_secure");
+      })
+      .registerProvider([](TpmResourceManager& resource_manager) {
+        return new InsecureFallbackStorage(resource_manager,
+                                           "gatekeeper_insecure");
+      })
+      .registerProvider([](TpmResourceManager& resource_manager,
+                           FragileTpmStorage& secure_storage,
+                           InsecureFallbackStorage& insecure_storage) {
+        return new TpmGatekeeper(resource_manager, secure_storage,
+                                 insecure_storage);
+      })
+      .registerProvider([]() { return new gatekeeper::SoftGateKeeper(); })
+      .install(ChooseGatekeeperComponent);
+}
+
+}  // namespace
+
+int SecureEnvMain(int argc, char** argv) {
+  DefaultSubprocessLogging(argv);
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+  keymaster::SoftKeymasterLogger km_logger;
+
+  fruit::Injector<TpmResourceManager, gatekeeper::GateKeeper,
+                  keymaster::KeymasterEnforcement>
+      injector(SecureEnvComponent);
+  TpmResourceManager* resource_manager = injector.get<TpmResourceManager*>();
+  gatekeeper::GateKeeper* gatekeeper = injector.get<gatekeeper::GateKeeper*>();
+  keymaster::KeymasterEnforcement* keymaster_enforcement =
+      injector.get<keymaster::KeymasterEnforcement*>();
+  std::unique_ptr<keymaster::KeymasterContext> keymaster_context;
+  std::unique_ptr<keymaster::AndroidKeymaster> keymaster;
+
+  std::vector<std::thread> threads;
+
+  if (android::base::StartsWith(FLAGS_keymint_impl, "rust-")) {
+    // Use the Rust reference implementation of KeyMint.
+    LOG(DEBUG) << "starting Rust KeyMint implementation";
+    int security_level;
+    if (FLAGS_keymint_impl == "rust-software") {
+      security_level = KM_SECURITY_LEVEL_SOFTWARE;
+    } else if (FLAGS_keymint_impl == "rust-tpm") {
+      security_level = KM_SECURITY_LEVEL_TRUSTED_ENVIRONMENT;
+    } else {
+      LOG(FATAL) << "Unknown keymaster implementation " << FLAGS_keymint_impl;
+      return -1;
+    }
+
+    int keymaster_in = FLAGS_keymaster_fd_in;
+    int keymaster_out = FLAGS_keymaster_fd_out;
+    TpmResourceManager* rm = resource_manager;
+    threads.emplace_back([rm, keymaster_in, keymaster_out, security_level]() {
+      kmr_ta_main(keymaster_in, keymaster_out, security_level, rm);
+    });
+
+  } else {
+    // Use the C++ reference implementation of KeyMint.
+    LOG(DEBUG) << "starting C++ KeyMint implementation";
+    if (FLAGS_keymint_impl == "software") {
+      // TODO: See if this is the right KM version.
+      keymaster_context.reset(new keymaster::PureSoftKeymasterContext(
+          keymaster::KmVersion::KEYMINT_3, KM_SECURITY_LEVEL_SOFTWARE));
+    } else if (FLAGS_keymint_impl == "tpm") {
+      keymaster_context.reset(
+          new TpmKeymasterContext(*resource_manager, *keymaster_enforcement));
+    } else {
+      LOG(FATAL) << "Unknown keymaster implementation " << FLAGS_keymint_impl;
+      return -1;
+    }
+    // keymaster::AndroidKeymaster puts the context pointer into a UniquePtr,
+    // taking ownership.
+    keymaster.reset(new keymaster::AndroidKeymaster(
+        new ProxyKeymasterContext(*keymaster_context), kOperationTableSize,
+        keymaster::MessageVersion(keymaster::KmVersion::KEYMINT_3,
+                                  0 /* km_date */)));
+
+    auto keymaster_in = DupFdFlag(FLAGS_keymaster_fd_in);
+    auto keymaster_out = DupFdFlag(FLAGS_keymaster_fd_out);
+    keymaster::AndroidKeymaster* borrowed_km = keymaster.get();
+    threads.emplace_back([keymaster_in, keymaster_out, borrowed_km]() {
+      while (true) {
+        SharedFdKeymasterChannel keymaster_channel(keymaster_in, keymaster_out);
+
+        KeymasterResponder keymaster_responder(keymaster_channel, *borrowed_km);
+
+        while (keymaster_responder.ProcessMessage()) {
+        }
+      }
+    });
+  }
+
+  auto gatekeeper_in = DupFdFlag(FLAGS_gatekeeper_fd_in);
+  auto gatekeeper_out = DupFdFlag(FLAGS_gatekeeper_fd_out);
+  threads.emplace_back([gatekeeper_in, gatekeeper_out, &gatekeeper]() {
+    while (true) {
+      SharedFdGatekeeperChannel gatekeeper_channel(gatekeeper_in,
+                                                   gatekeeper_out);
+
+      GatekeeperResponder gatekeeper_responder(gatekeeper_channel, *gatekeeper);
+
+      while (gatekeeper_responder.ProcessMessage()) {
+      }
+    }
+  });
+
+  auto confui_server_fd = DupFdFlag(FLAGS_confui_server_fd);
+  threads.emplace_back([confui_server_fd, resource_manager]() {
+    ConfUiSignServer confui_sign_server(*resource_manager, confui_server_fd);
+    // no return, infinite loop
+    confui_sign_server.MainLoop();
+  });
+
+  auto kernel_events_fd = DupFdFlag(FLAGS_kernel_events_fd);
+  threads.emplace_back(StartKernelEventMonitor(kernel_events_fd));
+
+  for (auto& t : threads) {
+    t.join();
+  }
+
+  return 0;
+}
+
+}  // namespace cuttlefish
+
+int main(int argc, char** argv) {
+  return cuttlefish::SecureEnvMain(argc, argv);
+}
diff --git a/host/commands/secure_env/secure_env_windows_lib.cpp b/host/commands/secure_env/secure_env_windows_lib.cpp
new file mode 100644
index 0000000..64f10fe
--- /dev/null
+++ b/host/commands/secure_env/secure_env_windows_lib.cpp
@@ -0,0 +1,140 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <windows.h>
+
+#include "host/commands/secure_env/secure_env_windows_lib.h"
+
+#include <thread>
+
+#include <android-base/logging.h>
+#include <keymaster/android_keymaster.h>
+#include <keymaster/contexts/pure_soft_keymaster_context.h>
+#include "common/libs/security/gatekeeper_channel_windows.h"
+#include "common/libs/security/keymaster_channel_windows.h"
+#include "host/commands/secure_env/fragile_tpm_storage.h"
+#include "host/commands/secure_env/gatekeeper_responder.h"
+#include "host/commands/secure_env/insecure_fallback_storage.h"
+#include "host/commands/secure_env/keymaster_responder.h"
+#include "host/commands/secure_env/soft_gatekeeper.h"
+#include "host/commands/secure_env/tpm_gatekeeper.h"
+#include "host/commands/secure_env/tpm_keymaster_context.h"
+#include "host/commands/secure_env/tpm_keymaster_enforcement.h"
+#include "host/commands/secure_env/tpm_resource_manager.h"
+
+namespace secure_env {
+namespace {
+// Copied from AndroidKeymaster4Device
+constexpr size_t kOperationTableSize = 16;
+
+}  // namespace
+
+bool StartSecureEnvWithHandles(HANDLE keymaster_pipe_handle,
+                               HANDLE gatekeeper_pipe_handle,
+                               bool /*use_tpm*/) {
+  // Start threads for gatekeeper and keymaster
+  std::thread keymaster_thread([=]() {
+    // keymaster::AndroidKeymaster puts keymaster_context into a UniquePtr,
+    // taking ownership.
+    keymaster::KeymasterContext* keymaster_context =
+        new keymaster::PureSoftKeymasterContext(
+            keymaster::KmVersion::KEYMASTER_4_1, KM_SECURITY_LEVEL_SOFTWARE);
+
+    // Setup software keymaster
+    std::unique_ptr<keymaster::AndroidKeymaster> keymaster_ptr(
+        new keymaster::AndroidKeymaster(keymaster_context,
+                                        kOperationTableSize));
+
+    std::unique_ptr<cuttlefish::KeymasterWindowsChannel> keymaster_channel =
+        cuttlefish::KeymasterWindowsChannel::Create(keymaster_pipe_handle);
+    if (!keymaster_channel) {
+      return;
+    }
+
+    cuttlefish::KeymasterResponder keymaster_responder(*keymaster_channel,
+                                                       *keymaster_ptr);
+
+    while (keymaster_responder.ProcessMessage()) {
+    }
+  });
+
+  std::thread gatekeeper_thread([=]() {
+    // Setup software gatekeeper
+    std::unique_ptr<gatekeeper::GateKeeper> gatekeeper_ptr(
+        new gatekeeper::SoftGateKeeper);
+
+    std::unique_ptr<cuttlefish::GatekeeperWindowsChannel> gatekeeper_channel =
+        cuttlefish::GatekeeperWindowsChannel::Create(gatekeeper_pipe_handle);
+    if (!gatekeeper_channel) {
+      return;
+    }
+
+    cuttlefish::GatekeeperResponder gatekeeper_responder(*gatekeeper_channel,
+                                                         *gatekeeper_ptr);
+
+    while (gatekeeper_responder.ProcessMessage()) {
+    }
+  });
+
+  keymaster_thread.join();
+  gatekeeper_thread.join();
+
+  return true;
+}
+
+bool StartSecureEnv(const char* keymaster_pipe, const char* gatekeeper_pipe,
+                    bool use_tpm) {
+  // Create the keymaster pipe
+  HANDLE keymaster_handle = CreateNamedPipeA(
+      keymaster_pipe,
+      /* dwOpenMode= */ PIPE_ACCESS_DUPLEX | FILE_FLAG_FIRST_PIPE_INSTANCE |
+          FILE_FLAG_OVERLAPPED,
+      /* dwPipeMode= */ PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_WAIT |
+          PIPE_REJECT_REMOTE_CLIENTS,
+      /* nMaxInstances= */ 1,
+      /* nOutBufferSize= */ 1024,  // The buffer sizes are only advisory.
+      /* nInBufferSize= */ 1024,
+      /* nDefaultTimeOut= */ 0,
+      /* lpSecurityAttributes= */ NULL);
+  if (keymaster_handle == INVALID_HANDLE_VALUE) {
+    LOG(ERROR) << "Error: Could not create keymaster pipe " << keymaster_pipe
+               << ". Got error code " << GetLastError();
+    return false;
+  }
+  LOG(INFO) << "Created keymaster pipe " << keymaster_pipe;
+
+  HANDLE gatekeeper_handle = CreateNamedPipeA(
+      gatekeeper_pipe,
+      /* dwOpenMode= */ PIPE_ACCESS_DUPLEX | FILE_FLAG_FIRST_PIPE_INSTANCE |
+          FILE_FLAG_OVERLAPPED,
+      /* dwPipeMode= */ PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_WAIT |
+          PIPE_REJECT_REMOTE_CLIENTS,
+      /* nMaxInstances= */ 1,
+      /* nOutBufferSize= */ 1024,  // The buffer sizes are only advisory.
+      /* nInBufferSize= */ 1024,
+      /* nDefaultTimeOut= */ 0,
+      /* lpSecurityAttributes= */ NULL);
+  if (gatekeeper_handle == INVALID_HANDLE_VALUE) {
+    LOG(ERROR) << "Error: Could not create gatekeeper pipe " << gatekeeper_pipe
+               << ". Got error code " << GetLastError();
+    return false;
+  }
+  LOG(INFO) << "Created gatekeeper pipe " << gatekeeper_pipe;
+
+  return StartSecureEnvWithHandles(keymaster_handle, gatekeeper_handle,
+                                   use_tpm);
+}
+
+}  // namespace secure_env
diff --git a/host/commands/secure_env/secure_env_windows_lib.h b/host/commands/secure_env/secure_env_windows_lib.h
new file mode 100644
index 0000000..20dfe96
--- /dev/null
+++ b/host/commands/secure_env/secure_env_windows_lib.h
@@ -0,0 +1,47 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <windows.h>
+
+#if !defined(SECURE_ENV_DLL)
+#define SECURE_ENV_DLL_SYMBOL
+#elif defined(SECURE_ENV_BUILD_DLL)
+#define SECURE_ENV_DLL_SYMBOL __declspec(dllexport)
+#else
+#define SECURE_ENV_DLL_SYMBOL __declspec(dllimport)
+#endif
+
+namespace secure_env {
+extern "C" {
+/* Starts and runs remote keymaster and gatekeeper in separate threads.
+ * All cryptography is performed in software. Returns on failure, or once
+ * the connections are dropped on success.
+ *
+ */
+SECURE_ENV_DLL_SYMBOL bool StartSecureEnv(const char* keymaster_pipe,
+                                          const char* gatekeeper_pipe,
+                                          bool use_tpm);
+
+/* Starts and runs remote keymaster and gatekeeper using handles to preexisting
+ * async named pipes. Returns on failure, or once the connections are dropped on
+ * success.
+ */
+SECURE_ENV_DLL_SYMBOL bool StartSecureEnvWithHandles(
+    HANDLE keymaster_pipe_handle, HANDLE gatekeeper_pipe_handle, bool use_tpm);
+}
+
+}  // namespace secure_env
diff --git a/host/commands/secure_env/secure_env_windows_main.cpp b/host/commands/secure_env/secure_env_windows_main.cpp
new file mode 100644
index 0000000..1c2d9ef
--- /dev/null
+++ b/host/commands/secure_env/secure_env_windows_main.cpp
@@ -0,0 +1,51 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+
+#include "host/commands/secure_env/secure_env_windows_lib.h"
+
+DEFINE_string(keymaster_pipe, "", "Keymaster pipe path");
+DEFINE_string(gatekeeper_pipe, "", "Gatekeeper pipe path");
+DEFINE_bool(use_tpm, false, "Whether to use TPM for cryptography primitives.");
+
+int main(int argc, char** argv) {
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  std::string keymaster_pipe = FLAGS_keymaster_pipe;
+  if (keymaster_pipe.empty()) {
+    LOG(FATAL) << "Keymaster pipe (--keymaster_pipe) not specified.";
+  }
+  std::string gatekeeper_pipe = FLAGS_gatekeeper_pipe;
+  if (gatekeeper_pipe.empty()) {
+    LOG(FATAL) << "Gatekeeper pipe (--gatekeeper_pipe) not specified.";
+  }
+
+  bool use_tpm = FLAGS_use_tpm;
+  if (keymaster_pipe.empty() || gatekeeper_pipe.empty()) {
+    LOG(ERROR) << "Invalid arguments. See --help for details.";
+    return 1;
+  }
+
+  // Start up secure_env and wait for its threads to exit before returning.
+  if (!secure_env::StartSecureEnv(keymaster_pipe.c_str(),
+                                  gatekeeper_pipe.c_str(), use_tpm)) {
+    return 1;
+  }
+
+  return 0;
+}
\ No newline at end of file
diff --git a/host/commands/secure_env/soft_gatekeeper.h b/host/commands/secure_env/soft_gatekeeper.h
index c3322e8..9a4c61d 100644
--- a/host/commands/secure_env/soft_gatekeeper.h
+++ b/host/commands/secure_env/soft_gatekeeper.h
@@ -89,10 +89,14 @@
     }
 
     virtual uint64_t GetMillisecondsSinceBoot() const {
+#ifdef _WIN32
+        return GetTickCount64();
+#else
         struct timespec time;
         int res = clock_gettime(CLOCK_BOOTTIME, &time);
         if (res < 0) return 0;
         return (time.tv_sec * 1000) + (time.tv_nsec / 1000 / 1000);
+#endif
     }
 
     virtual bool IsHardwareBacked() const { return false; }
diff --git a/host/commands/secure_env/tpm_attestation_record.cpp b/host/commands/secure_env/tpm_attestation_record.cpp
index 674570b..ec97d36 100644
--- a/host/commands/secure_env/tpm_attestation_record.cpp
+++ b/host/commands/secure_env/tpm_attestation_record.cpp
@@ -44,7 +44,7 @@
 }  // namespace
 
 TpmAttestationRecordContext::TpmAttestationRecordContext()
-    : keymaster::AttestationContext(::keymaster::KmVersion::KEYMINT_2),
+    : keymaster::AttestationContext(::keymaster::KmVersion::KEYMINT_3),
       vb_params_(MakeVbParams()),
       unique_id_hbk_(16) {
   RAND_bytes(unique_id_hbk_.data(), unique_id_hbk_.size());
@@ -54,19 +54,91 @@
   return KM_SECURITY_LEVEL_TRUSTED_ENVIRONMENT;
 }
 
+// Return true if entries match, false otherwise.
+bool matchAttestationId(keymaster_blob_t blob, const std::vector<uint8_t>& id) {
+  if (blob.data_length != id.size()) {
+    return false;
+  }
+  if (memcmp(blob.data, id.data(), id.size())) {
+    return false;
+  }
+  return true;
+}
+
 keymaster_error_t TpmAttestationRecordContext::VerifyAndCopyDeviceIds(
-    const AuthorizationSet& /*attestation_params*/,
-    AuthorizationSet* /*attestation*/) const {
-  LOG(DEBUG) << "TODO(schuffelen): Implement VerifyAndCopyDeviceIds";
-  return KM_ERROR_UNIMPLEMENTED;
+    const AuthorizationSet& attestation_params,
+    AuthorizationSet* attestation) const {
+  const AttestationIds& ids = attestation_ids_;
+  bool found_mismatch = false;
+  for (auto& entry : attestation_params) {
+    switch (entry.tag) {
+      case KM_TAG_ATTESTATION_ID_BRAND:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.brand);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_DEVICE:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.device);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_PRODUCT:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.product);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_SERIAL:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.serial);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_IMEI:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.imei);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_MEID:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.meid);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_MANUFACTURER:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.manufacturer);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_MODEL:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.model);
+        attestation->push_back(entry);
+        break;
+
+      case KM_TAG_ATTESTATION_ID_SECOND_IMEI:
+        found_mismatch |= !matchAttestationId(entry.blob, ids.second_imei);
+        attestation->push_back(entry);
+        break;
+
+      default:
+        // Ignore non-ID tags.
+        break;
+    }
+  }
+
+  if (found_mismatch) {
+    attestation->Clear();
+    return KM_ERROR_CANNOT_ATTEST_IDS;
+  }
+
+  return KM_ERROR_OK;
 }
 
 keymaster::Buffer TpmAttestationRecordContext::GenerateUniqueId(
     uint64_t creation_date_time, const keymaster_blob_t& application_id,
     bool reset_since_rotation, keymaster_error_t* error) const {
-  *error = KM_ERROR_OK;
-  return keymaster::generate_unique_id(unique_id_hbk_, creation_date_time,
-                                       application_id, reset_since_rotation);
+  keymaster::Buffer unique_id;
+  *error = keymaster::generate_unique_id(unique_id_hbk_, creation_date_time,
+                                         application_id, reset_since_rotation,
+                                         &unique_id);
+  return unique_id;
 }
 
 const VerifiedBootParams* TpmAttestationRecordContext::GetVerifiedBootParams(
@@ -107,4 +179,28 @@
   vb_params_.device_locked = bootloader_state == "locked";
 }
 
+keymaster_error_t TpmAttestationRecordContext::SetAttestationIds(
+    const keymaster::SetAttestationIdsRequest& request) {
+  attestation_ids_.brand.assign(request.brand.begin(), request.brand.end());
+  attestation_ids_.device.assign(request.device.begin(), request.device.end());
+  attestation_ids_.product.assign(request.product.begin(),
+                                  request.product.end());
+  attestation_ids_.serial.assign(request.serial.begin(), request.serial.end());
+  attestation_ids_.imei.assign(request.imei.begin(), request.imei.end());
+  attestation_ids_.meid.assign(request.meid.begin(), request.meid.end());
+  attestation_ids_.manufacturer.assign(request.manufacturer.begin(),
+                                       request.manufacturer.end());
+  attestation_ids_.model.assign(request.model.begin(), request.model.end());
+
+  return KM_ERROR_OK;
+}
+
+keymaster_error_t TpmAttestationRecordContext::SetAttestationIdsKM3(
+    const keymaster::SetAttestationIdsKM3Request& request) {
+  SetAttestationIds(request.base);
+  attestation_ids_.second_imei.assign(request.second_imei.begin(),
+                                      request.second_imei.end());
+
+  return KM_ERROR_OK;
+}
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_attestation_record.h b/host/commands/secure_env/tpm_attestation_record.h
index dba0e91..9150d6b 100644
--- a/host/commands/secure_env/tpm_attestation_record.h
+++ b/host/commands/secure_env/tpm_attestation_record.h
@@ -22,10 +22,23 @@
 #include <string_view>
 #include <vector>
 
+#include <keymaster/android_keymaster_messages.h>
 #include <keymaster/attestation_context.h>
 
 namespace cuttlefish {
 
+struct AttestationIds {
+  std::vector<uint8_t> brand;
+  std::vector<uint8_t> device;
+  std::vector<uint8_t> product;
+  std::vector<uint8_t> serial;
+  std::vector<uint8_t> imei;
+  std::vector<uint8_t> meid;
+  std::vector<uint8_t> manufacturer;
+  std::vector<uint8_t> model;
+  std::vector<uint8_t> second_imei;
+};
+
 class TpmAttestationRecordContext : public keymaster::AttestationContext {
 public:
  TpmAttestationRecordContext();
@@ -46,11 +59,16 @@
  void SetVerifiedBootInfo(std::string_view verified_boot_state,
                           std::string_view bootloader_state,
                           const std::vector<uint8_t>& vbmeta_digest);
+ keymaster_error_t SetAttestationIds(
+     const keymaster::SetAttestationIdsRequest& request);
+ keymaster_error_t SetAttestationIdsKM3(
+     const keymaster::SetAttestationIdsKM3Request& request);
 
 private:
  std::vector<uint8_t> vbmeta_digest_;
  VerifiedBootParams vb_params_;
  std::vector<uint8_t> unique_id_hbk_;
+ AttestationIds attestation_ids_;
 };
 
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_ffi.cpp b/host/commands/secure_env/tpm_ffi.cpp
new file mode 100644
index 0000000..7ba83cf
--- /dev/null
+++ b/host/commands/secure_env/tpm_ffi.cpp
@@ -0,0 +1,78 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "tpm_ffi.h"
+
+#include <android-base/logging.h>
+
+#include "host/commands/secure_env/tpm_hmac.h"
+#include "host/commands/secure_env/tpm_resource_manager.h"
+
+using cuttlefish::TpmResourceManager;
+
+extern "C" {
+
+uint32_t tpm_hmac(void* trm, const uint8_t* data, uint32_t data_len,
+                  uint8_t* tag, uint32_t tag_len) {
+  if (trm == nullptr) {
+    LOG(ERROR) << "No TPM resource manager provided";
+    return 1;
+  }
+  TpmResourceManager* resource_manager =
+      reinterpret_cast<TpmResourceManager*>(trm);
+  auto hmac =
+      TpmHmacWithContext(*resource_manager, "TpmHmac_context", data, data_len);
+  if (!hmac) {
+    LOG(ERROR) << "Could not calculate HMAC";
+    return 1;
+  } else if (hmac->size != tag_len) {
+    LOG(ERROR) << "HMAC size of " << hmac->size
+               << " different than expected tag len " << tag_len;
+    return 1;
+  }
+  memcpy(tag, hmac->buffer, tag_len);
+  return 0;
+}
+
+void secure_env_log(const char* file, unsigned int line, int severity,
+                    const char* tag, const char* msg) {
+  android::base::LogSeverity severity_enum;
+  switch (severity) {
+    case 0:
+      severity_enum = android::base::LogSeverity::VERBOSE;
+      break;
+    case 1:
+      severity_enum = android::base::LogSeverity::DEBUG;
+      break;
+    case 2:
+      severity_enum = android::base::LogSeverity::INFO;
+      break;
+    case 3:
+      severity_enum = android::base::LogSeverity::WARNING;
+      break;
+    default:
+    case 4:
+      severity_enum = android::base::LogSeverity::ERROR;
+      break;
+    case 5:
+      severity_enum = android::base::LogSeverity::FATAL_WITHOUT_ABORT;
+      break;
+    case 6:
+      severity_enum = android::base::LogSeverity::FATAL;
+      break;
+  }
+  android::base::LogMessage::LogLine(file, line, severity_enum, tag, msg);
+}
+}
diff --git a/host/commands/secure_env/tpm_ffi.h b/host/commands/secure_env/tpm_ffi.h
new file mode 100644
index 0000000..c533ca8
--- /dev/null
+++ b/host/commands/secure_env/tpm_ffi.h
@@ -0,0 +1,42 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef SECURE_ENV_TPM_FFI_H_
+#define SECURE_ENV_TPM_FFI_H_
+
+#include <stdint.h>
+
+// C-based entrypoints for accessing TPM functionality, for use by Rust code.
+// Rust equivalents are generated using `bindgen`.
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * Provide an FFI-accessible version of `TpmHmacWithContext`.
+ * Returns 0 on success, 1 on error.
+ */
+uint32_t tpm_hmac(void* trm, const uint8_t* data, uint32_t data_len,
+                  uint8_t* tag, uint32_t tag_len);
+
+void secure_env_log(const char* file, unsigned int line, int severity,
+                    const char* tag, const char* msg);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // SECURE_ENV_TPM_FFI_H_
diff --git a/host/commands/secure_env/tpm_gatekeeper.cpp b/host/commands/secure_env/tpm_gatekeeper.cpp
index c8497d5..ae59415 100644
--- a/host/commands/secure_env/tpm_gatekeeper.cpp
+++ b/host/commands/secure_env/tpm_gatekeeper.cpp
@@ -24,6 +24,10 @@
 #include <tss2/tss2_mu.h>
 #include <tss2/tss2_rc.h>
 
+#ifdef _WIN32
+#include <sysinfoapi.h>
+#endif
+
 #include "host/commands/secure_env/primary_key_builder.h"
 #include "host/commands/secure_env/tpm_auth.h"
 #include "host/commands/secure_env/tpm_hmac.h"
@@ -95,21 +99,9 @@
     uint32_t length) const {
   memset(signature, 0, signature_length);
   std::string key_unique(reinterpret_cast<const char*>(key), key_length);
-  PrimaryKeyBuilder key_builder;
-  key_builder.UniqueData(key_unique);
-  key_builder.SigningKey();
-  auto key_slot = key_builder.CreateKey(resource_manager_);
-  if (!key_slot) {
-    LOG(ERROR) << "Unable to load signing key into TPM memory";
-    return;
-  }
+
   auto calculated_signature =
-      TpmHmac(
-          resource_manager_,
-          key_slot->get(),
-          TpmAuth(ESYS_TR_PASSWORD),
-          message,
-          length);
+      TpmHmacWithContext(resource_manager_, key_unique, message, length);
   if (!calculated_signature) {
     LOG(ERROR) << "Failure in calculating signature";
     return;
@@ -121,10 +113,14 @@
 }
 
 uint64_t TpmGatekeeper::GetMillisecondsSinceBoot() const {
+#ifdef _WIN32
+  return GetTickCount64();
+#else
   struct timespec time;
   int res = clock_gettime(CLOCK_BOOTTIME, &time);
   if (res < 0) return 0;
   return (time.tv_sec * 1000) + (time.tv_nsec / 1000 / 1000);
+#endif
 }
 
 gatekeeper::failure_record_t DefaultRecord(
diff --git a/host/commands/secure_env/tpm_hmac.cpp b/host/commands/secure_env/tpm_hmac.cpp
index b566659..6c1be6e 100644
--- a/host/commands/secure_env/tpm_hmac.cpp
+++ b/host/commands/secure_env/tpm_hmac.cpp
@@ -18,6 +18,7 @@
 #include <android-base/logging.h>
 #include <tss2/tss2_rc.h>
 
+#include "host/commands/secure_env/primary_key_builder.h"
 #include "host/commands/secure_env/tpm_resource_manager.h"
 
 namespace cuttlefish {
@@ -156,4 +157,19 @@
   return fn(resource_manager, key_handle, auth, data, data_size);
 }
 
+UniqueEsysPtr<TPM2B_DIGEST> TpmHmacWithContext(
+    TpmResourceManager& resource_manager, const std::string& context,
+    const uint8_t* data, size_t data_size) {
+  // Use the same context for all HMAC operations (ignoring the provided
+  // `context` parameter) to better comply with the KeyMint spec.
+  auto key_slot =
+      PrimaryKeyBuilder::CreateSigningKey(resource_manager, "TpmHmac_context");
+  if (!key_slot) {
+    LOG(ERROR) << "Could not make signing key for " << context;
+    return nullptr;
+  }
+  return TpmHmac(resource_manager, key_slot->get(), TpmAuth(ESYS_TR_PASSWORD),
+                 data, data_size);
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_hmac.h b/host/commands/secure_env/tpm_hmac.h
index 09aa011..d06fbf6 100644
--- a/host/commands/secure_env/tpm_hmac.h
+++ b/host/commands/secure_env/tpm_hmac.h
@@ -16,6 +16,7 @@
 #pragma once
 
 #include <memory>
+#include <string>
 
 #include <tss2/tss2_esys.h>
 
@@ -52,4 +53,12 @@
     const uint8_t* data,
     size_t data_size);
 
+/**
+ * Returns a HMAC signature for `data` with a key created in the TPM with
+ * context / unique-data `context`.
+ */
+UniqueEsysPtr<TPM2B_DIGEST> TpmHmacWithContext(
+    TpmResourceManager& resource_manager, const std::string& context,
+    const uint8_t* data, size_t data_size);
+
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_key_blob_maker.cpp b/host/commands/secure_env/tpm_key_blob_maker.cpp
index 38236d8..961fd5e 100644
--- a/host/commands/secure_env/tpm_key_blob_maker.cpp
+++ b/host/commands/secure_env/tpm_key_blob_maker.cpp
@@ -82,6 +82,7 @@
       case KM_TAG_ATTESTATION_ID_BRAND:
       case KM_TAG_ATTESTATION_ID_DEVICE:
       case KM_TAG_ATTESTATION_ID_IMEI:
+      case KM_TAG_ATTESTATION_ID_SECOND_IMEI:
       case KM_TAG_ATTESTATION_ID_MANUFACTURER:
       case KM_TAG_ATTESTATION_ID_MEID:
       case KM_TAG_ATTESTATION_ID_MODEL:
diff --git a/host/commands/secure_env/tpm_keymaster_context.cpp b/host/commands/secure_env/tpm_keymaster_context.cpp
index 4a60915..bca81b8 100644
--- a/host/commands/secure_env/tpm_keymaster_context.cpp
+++ b/host/commands/secure_env/tpm_keymaster_context.cpp
@@ -29,7 +29,9 @@
 #include <keymaster/operation.h>
 #include <keymaster/wrapped_key.h>
 
+#include "host/commands/secure_env/primary_key_builder.h"
 #include "host/commands/secure_env/tpm_attestation_record.h"
+#include "host/commands/secure_env/tpm_hmac.h"
 #include "host/commands/secure_env/tpm_key_blob_maker.h"
 #include "host/commands/secure_env/tpm_random_source.h"
 #include "host/commands/secure_env/tpm_remote_provisioning_context.h"
@@ -571,6 +573,28 @@
   return error;
 }
 
+keymaster_error_t TpmKeymasterContext::CheckConfirmationToken(
+    const std::uint8_t* input_data, size_t input_data_size,
+    const uint8_t confirmation_token[keymaster::kConfirmationTokenSize]) const {
+  auto hmac = TpmHmacWithContext(resource_manager_, "confirmation_token",
+                                 input_data, input_data_size);
+  if (!hmac) {
+    LOG(ERROR) << "Could not calculate confirmation token hmac";
+    return KM_ERROR_UNKNOWN_ERROR;
+  }
+
+  CHECK(hmac->size == keymaster::kConfirmationTokenSize)
+      << "Hmac size for confirmation UI must be "
+      << keymaster::kConfirmationTokenSize;
+
+  std::vector<std::uint8_t> hmac_buffer(hmac->buffer,
+                                        hmac->buffer + hmac->size);
+
+  const auto is_equal =
+      std::equal(hmac_buffer.cbegin(), hmac_buffer.cend(), confirmation_token);
+  return is_equal ? KM_ERROR_OK : KM_ERROR_NO_USER_CONFIRMATION;
+}
+
 keymaster::RemoteProvisioningContext*
 TpmKeymasterContext::GetRemoteProvisioningContext() const {
   return remote_provisioning_context_.get();
diff --git a/host/commands/secure_env/tpm_keymaster_context.h b/host/commands/secure_env/tpm_keymaster_context.h
index dbcdcb4..528d0ed 100644
--- a/host/commands/secure_env/tpm_keymaster_context.h
+++ b/host/commands/secure_env/tpm_keymaster_context.h
@@ -116,6 +116,11 @@
       keymaster_key_format_t* wrapped_key_format,
       keymaster::KeymasterKeyBlob* wrapped_key_material) const override;
 
+  keymaster_error_t CheckConfirmationToken(
+      const std::uint8_t* input_data, size_t input_data_size,
+      const uint8_t confirmation_token[keymaster::kConfirmationTokenSize])
+      const;
+
   keymaster::RemoteProvisioningContext* GetRemoteProvisioningContext()
       const override;
 
@@ -127,6 +132,16 @@
   keymaster_error_t SetBootPatchlevel(uint32_t boot_patchlevel) override;
   std::optional<uint32_t> GetVendorPatchlevel() const override;
   std::optional<uint32_t> GetBootPatchlevel() const override;
+
+  keymaster_error_t SetAttestationIds(
+      const keymaster::SetAttestationIdsRequest& request) override {
+    return attestation_context_->SetAttestationIds(request);
+  }
+
+  keymaster_error_t SetAttestationIdsKM3(
+      const keymaster::SetAttestationIdsKM3Request& request) override {
+    return attestation_context_->SetAttestationIdsKM3(request);
+  }
 };
 
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_keymaster_enforcement.cpp b/host/commands/secure_env/tpm_keymaster_enforcement.cpp
index a5368c5..10b3164 100644
--- a/host/commands/secure_env/tpm_keymaster_enforcement.cpp
+++ b/host/commands/secure_env/tpm_keymaster_enforcement.cpp
@@ -18,6 +18,10 @@
 #include <android-base/endian.h>
 #include <android-base/logging.h>
 
+#ifdef _WIN32
+#include <sysinfoapi.h>
+#endif
+
 #include "host/commands/secure_env/primary_key_builder.h"
 #include "host/commands/secure_env/tpm_hmac.h"
 #include "host/commands/secure_env/tpm_key_blob_maker.h"
@@ -32,6 +36,7 @@
 using keymaster::km_id_t;
 using keymaster::VerifyAuthorizationRequest;
 using keymaster::VerifyAuthorizationResponse;
+
 namespace {
 inline bool operator==(const keymaster_blob_t& a, const keymaster_blob_t& b) {
   if (!a.data_length && !b.data_length) return true;
@@ -108,12 +113,16 @@
 }
 
 uint64_t TpmKeymasterEnforcement::get_current_time_ms() const {
+#ifdef _WIN32
+  return GetTickCount64();
+#else
   struct timespec tp;
   int err = clock_gettime(CLOCK_BOOTTIME, &tp);
   if (err) {
     return 0;
   }
   return timespec_to_ms(tp);
+#endif
 }
 
 keymaster_security_level_t TpmKeymasterEnforcement::SecurityLevel() const {
@@ -204,26 +213,14 @@
     }
   }
 
-  auto signing_key_builder = PrimaryKeyBuilder();
-  signing_key_builder.SigningKey();
-  signing_key_builder.UniqueData(std::string(unique_data, sizeof(unique_data)));
-  auto signing_key = signing_key_builder.CreateKey(resource_manager_);
-  if (!signing_key) {
-    LOG(ERROR) << "Could not make signing key for key id";
-    return KM_ERROR_UNKNOWN_ERROR;
-  }
-
   static const uint8_t signing_input[] = "Keymaster HMAC Verification";
-
-  auto hmac =
-      TpmHmac(resource_manager_, signing_key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              signing_input, sizeof(signing_input));
-
+  auto hmac = TpmHmacWithContext(resource_manager_,
+                                 std::string(unique_data, sizeof(unique_data)),
+                                 signing_input, sizeof(signing_input));
   if (!hmac) {
     LOG(ERROR) << "Unable to complete signing check";
     return KM_ERROR_UNKNOWN_ERROR;
   }
-
   *sharingCheck = KeymasterBlob(hmac->buffer, hmac->size);
 
   return KM_ERROR_OK;
@@ -248,18 +245,9 @@
       .security_level = response.token.security_level,
   };
 
-  auto signing_key_builder = PrimaryKeyBuilder();
-  signing_key_builder.SigningKey();
-  signing_key_builder.UniqueData("verify_authorization");
-  auto signing_key = signing_key_builder.CreateKey(resource_manager_);
-  if (!signing_key) {
-    LOG(ERROR) << "Could not make signing key for verifying authorization";
-    return response;
-  }
-  auto hmac =
-      TpmHmac(resource_manager_, signing_key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              reinterpret_cast<uint8_t*>(&verify_data), sizeof(verify_data));
-
+  auto hmac = TpmHmacWithContext(resource_manager_, "verify_authorization",
+                                 reinterpret_cast<uint8_t*>(&verify_data),
+                                 sizeof(verify_data));
   if (!hmac) {
     LOG(ERROR) << "Could not calculate verification hmac";
     return response;
@@ -278,19 +266,14 @@
   token->timestamp = get_current_time_ms();
   token->security_level = SecurityLevel();
   token->mac = KeymasterBlob();
-
-  auto signing_key_builder = PrimaryKeyBuilder();
-  signing_key_builder.SigningKey();
-  signing_key_builder.UniqueData("timestamp_token");
-  auto signing_key = signing_key_builder.CreateKey(resource_manager_);
-  if (!signing_key) {
-    LOG(ERROR) << "Could not make signing key for verifying authorization";
-    return KM_ERROR_UNKNOWN_ERROR;
-  }
   std::vector<uint8_t> token_buf_to_sign(token->SerializedSize(), 0);
+  token->Serialize(token_buf_to_sign.data(),
+                   token_buf_to_sign.data() + token_buf_to_sign.size());
+
   auto hmac =
-      TpmHmac(resource_manager_, signing_key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              token_buf_to_sign.data(), token_buf_to_sign.size());
+      TpmHmacWithContext(resource_manager_, "timestamp_token",
+                         token_buf_to_sign.data(), token_buf_to_sign.size());
+
   if (!hmac) {
     LOG(ERROR) << "Could not calculate timestamp token hmac";
     return KM_ERROR_UNKNOWN_ERROR;
@@ -323,17 +306,9 @@
 
 bool TpmKeymasterEnforcement::CreateKeyId(const keymaster_key_blob_t& key_blob,
                                           km_id_t* keyid) const {
-  auto signing_key_builder = PrimaryKeyBuilder();
-  signing_key_builder.SigningKey();
-  signing_key_builder.UniqueData("key_id");
-  auto signing_key = signing_key_builder.CreateKey(resource_manager_);
-  if (!signing_key) {
-    LOG(ERROR) << "Could not make signing key for key id";
-    return false;
-  }
   auto hmac =
-      TpmHmac(resource_manager_, signing_key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              key_blob.key_material, key_blob.key_material_size);
+      TpmHmacWithContext(resource_manager_, "key_id", key_blob.key_material,
+                         key_blob.key_material_size);
   if (!hmac) {
     LOG(ERROR) << "Failed to make a signature for a key id";
     return false;
diff --git a/host/commands/secure_env/tpm_remote_provisioning_context.cpp b/host/commands/secure_env/tpm_remote_provisioning_context.cpp
index 87bb172..9899303 100644
--- a/host/commands/secure_env/tpm_remote_provisioning_context.cpp
+++ b/host/commands/secure_env/tpm_remote_provisioning_context.cpp
@@ -43,16 +43,15 @@
 
 std::vector<uint8_t> TpmRemoteProvisioningContext::DeriveBytesFromHbk(
     const std::string& context, size_t num_bytes) const {
-  PrimaryKeyBuilder key_builder;
-  key_builder.SigningKey();
-  key_builder.UniqueData("HardwareBoundKey");
-  TpmObjectSlot key = key_builder.CreateKey(resource_manager_);
-
-  auto hbk =
-      TpmHmac(resource_manager_, key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              reinterpret_cast<const uint8_t*>(context.data()), context.size());
-
   std::vector<uint8_t> result(num_bytes);
+  auto hbk = TpmHmacWithContext(
+      resource_manager_, "HardwareBoundKey",
+      reinterpret_cast<const uint8_t*>(context.data()), context.size());
+  if (!hbk) {
+    LOG(ERROR) << "Error calculating HMAC";
+    return result;
+  }
+
   if (!HKDF(result.data(), num_bytes,              //
             EVP_sha256(),                          //
             hbk->buffer, hbk->size,                //
@@ -60,14 +59,14 @@
             reinterpret_cast<const uint8_t*>(context.data()), context.size())) {
     // Should never fail. Even if it could the API has no way of reporting the
     // error.
-    LOG(ERROR) << "Error calculating HMAC: " << ERR_peek_last_error();
+    LOG(ERROR) << "Error calculating HKDF: " << ERR_peek_last_error();
   }
 
   return result;
 }
 
-std::unique_ptr<cppbor::Map> TpmRemoteProvisioningContext::CreateDeviceInfo()
-    const {
+std::unique_ptr<cppbor::Map> TpmRemoteProvisioningContext::CreateDeviceInfo(
+    uint32_t csrVersion) const {
   auto result = std::make_unique<cppbor::Map>();
   result->add(cppbor::Tstr("brand"), cppbor::Tstr("Google"));
   result->add(cppbor::Tstr("manufacturer"), cppbor::Tstr("Google"));
@@ -101,7 +100,10 @@
     result->add(cppbor::Tstr("vendor_patch_level"),
                 cppbor::Uint(*vendor_patchlevel_));
   }
-  result->add(cppbor::Tstr("version"), cppbor::Uint(2));
+  // "version" field was removed from DeviceInfo in CSR v3.
+  if (csrVersion < 3) {
+    result->add(cppbor::Tstr("version"), cppbor::Uint(csrVersion));
+  }
   result->add(cppbor::Tstr("fused"), cppbor::Uint(0));
   result->add(cppbor::Tstr("security_level"), cppbor::Tstr("tee"));
   result->canonicalize();
@@ -124,17 +126,17 @@
   }
   ED25519_keypair_from_seed(pubKey.data(), privKey.data(), seed.data());
 
+  const auto issuer_and_subject = "Cuttlefish secure env";
   auto coseKey = cppbor::Map()
                      .add(CoseKey::KEY_TYPE, OCTET_KEY_PAIR)
                      .add(CoseKey::ALGORITHM, EDDSA)
                      .add(CoseKey::CURVE, ED25519)
-                     .add(CoseKey::KEY_OPS, VERIFY)
                      .add(CoseKey::PUBKEY_X, pubKey)
                      .canonicalize();
   auto sign1Payload =
       cppbor::Map()
-          .add(1 /* Issuer */, "Issuer")
-          .add(2 /* Subject */, "Subject")
+          .add(1 /* Issuer */, issuer_and_subject)
+          .add(2 /* Subject */, issuer_and_subject)
           .add(-4670552 /* Subject Pub Key */, coseKey.encode())
           .add(-4670553 /* Key Usage (little-endian order) */,
                std::vector<uint8_t>{0x20} /* keyCertSign = 1<<5 */)
@@ -199,19 +201,9 @@
 std::optional<cppcose::HmacSha256>
 TpmRemoteProvisioningContext::GenerateHmacSha256(
     const cppcose::bytevec& input) const {
-  auto signing_key_builder = PrimaryKeyBuilder();
-  signing_key_builder.SigningKey();
-  signing_key_builder.UniqueData("Public Key Authentication Key");
-  auto signing_key = signing_key_builder.CreateKey(resource_manager_);
-  if (!signing_key) {
-    LOG(ERROR) << "Could not make MAC key for authenticating the pubkey";
-    return std::nullopt;
-  }
-
   auto tpm_digest =
-      TpmHmac(resource_manager_, signing_key->get(), TpmAuth(ESYS_TR_PASSWORD),
-              input.data(), input.size());
-
+      TpmHmacWithContext(resource_manager_, "Public Key Authentication Key",
+                         input.data(), input.size());
   if (!tpm_digest) {
     LOG(ERROR) << "Could not calculate hmac";
     return std::nullopt;
@@ -229,4 +221,34 @@
   return hmac;
 }
 
+void TpmRemoteProvisioningContext::GetHwInfo(
+    keymaster::GetHwInfoResponse* hwInfo) const {
+  hwInfo->version = 3;
+  hwInfo->rpcAuthorName = "Google";
+  hwInfo->supportedEekCurve = 0 /* CURVE_NONE */;
+  hwInfo->uniqueId = "remote keymint";
+  hwInfo->supportedNumKeysInCsr = 20;
+}
+
+cppcose::ErrMsgOr<cppbor::Array> TpmRemoteProvisioningContext::BuildCsr(
+    const std::vector<uint8_t>& challenge, cppbor::Array keysToSign) const {
+  uint32_t csrVersion = 3;
+  auto deviceInfo = std::move(*CreateDeviceInfo(csrVersion));
+  auto csrPayload = cppbor::Array()
+                        .add(csrVersion)
+                        .add("keymint" /* CertificateType */)
+                        .add(std::move(deviceInfo))
+                        .add(std::move(keysToSign));
+  auto signedDataPayload =
+      cppbor::Array().add(challenge).add(cppbor::Bstr(csrPayload.encode()));
+  auto signedData = constructCoseSign1(
+      devicePrivKey_, signedDataPayload.encode(), {} /* aad */);
+
+  return cppbor::Array()
+      .add(1 /* version */)
+      .add(cppbor::Map() /* UdsCerts */)
+      .add(std::move(*bcc_.clone()->asArray()) /* DiceCertChain */)
+      .add(std::move(*signedData) /* SignedData */);
+}
+
 }  // namespace cuttlefish
diff --git a/host/commands/secure_env/tpm_remote_provisioning_context.h b/host/commands/secure_env/tpm_remote_provisioning_context.h
index 9c9e359..726656f 100644
--- a/host/commands/secure_env/tpm_remote_provisioning_context.h
+++ b/host/commands/secure_env/tpm_remote_provisioning_context.h
@@ -33,13 +33,19 @@
   ~TpmRemoteProvisioningContext() override = default;
   std::vector<uint8_t> DeriveBytesFromHbk(const std::string& context,
                                           size_t numBytes) const override;
-  std::unique_ptr<cppbor::Map> CreateDeviceInfo() const override;
+  std::unique_ptr<cppbor::Map> CreateDeviceInfo(
+      uint32_t csrVersion) const override;
   cppcose::ErrMsgOr<std::vector<uint8_t>> BuildProtectedDataPayload(
       bool isTestMode,                     //
       const std::vector<uint8_t>& macKey,  //
       const std::vector<uint8_t>& aad) const override;
   std::optional<cppcose::HmacSha256> GenerateHmacSha256(
       const cppcose::bytevec& input) const override;
+  void GetHwInfo(keymaster::GetHwInfoResponse* hwInfo) const override;
+  cppcose::ErrMsgOr<cppbor::Array> BuildCsr(
+      const std::vector<uint8_t>& challenge,
+      cppbor::Array keysToSign) const override;
+
   std::pair<std::vector<uint8_t>, cppbor::Array> GenerateBcc(
       bool testMode) const;
   void SetSystemVersion(uint32_t os_version, uint32_t os_patchlevel);
diff --git a/host/commands/start/README.md b/host/commands/start/README.md
new file mode 100644
index 0000000..1e92874
--- /dev/null
+++ b/host/commands/start/README.md
@@ -0,0 +1,10 @@
+Entry point to launching one or more Cuttlefish virtual devices.
+`cvd_internal_start` invokes `assemble_cvd` once to set up all devices, and
+then invokes `run_cvd` once per device. Arguments / flags are passed through to
+`assemble_cvd` and `run_cvd`.
+
+The `cvd_internal_start` executable used to be named `launch_cvd`, and is still
+referred to as `launch_cvd` in some places.
+
+[![Linkage diagram](./linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/start/linkage.svg)
+
diff --git a/host/commands/start/filesystem_explorer.cc b/host/commands/start/filesystem_explorer.cc
index aac8ae6..e8f570a 100644
--- a/host/commands/start/filesystem_explorer.cc
+++ b/host/commands/start/filesystem_explorer.cc
@@ -30,26 +30,35 @@
 
 cuttlefish::FetcherConfig AvailableFilesReport() {
   std::string current_directory = cuttlefish::AbsolutePath(cuttlefish::CurrentDirectory());
+  cuttlefish::FetcherConfig config;
+
   if (cuttlefish::FileExists(current_directory + "/fetcher_config.json")) {
-    cuttlefish::FetcherConfig config;
     config.LoadFromFile(current_directory + "/fetcher_config.json");
     return config;
   }
 
+  // If needed check if `fetch_config.json` exists inside the $HOME directory.
+  // `assemble_cvd` will perform a similar check.
+  std::string home_directory =
+      cuttlefish::StringFromEnv("HOME", cuttlefish::CurrentDirectory());
+  std::string fetcher_config_path = home_directory + "/fetcher_config.json";
+  if (cuttlefish::FileExists(fetcher_config_path)) {
+    config.LoadFromFile(fetcher_config_path);
+    return config;
+  }
+
   std::set<std::string> files;
 
-  std::string psuedo_fetcher_dir =
-      cuttlefish::StringFromEnv("ANDROID_HOST_OUT",
-                         cuttlefish::StringFromEnv("HOME", current_directory));
-  std::string psuedo_fetcher_config =
-      psuedo_fetcher_dir + "/launcher_pseudo_fetcher_config.json";
-  files.insert(psuedo_fetcher_config);
+  std::string pseudo_fetcher_dir = cuttlefish::StringFromEnv(
+      "ANDROID_HOST_OUT", cuttlefish::StringFromEnv("HOME", current_directory));
+  std::string pseudo_fetcher_config =
+      pseudo_fetcher_dir + "/launcher_pseudo_fetcher_config.json";
+  files.insert(pseudo_fetcher_config);
 
-  cuttlefish::FetcherConfig config;
   config.RecordFlags();
   for (const auto& file : files) {
     config.add_cvd_file(cuttlefish::CvdFile(cuttlefish::FileSource::LOCAL_FILE, "", "", file));
   }
-  config.SaveToFile(psuedo_fetcher_config);
+  config.SaveToFile(pseudo_fetcher_config);
   return config;
 }
diff --git a/host/commands/start/flag_forwarder.cc b/host/commands/start/flag_forwarder.cc
index aa492c5..7cf92b0 100644
--- a/host/commands/start/flag_forwarder.cc
+++ b/host/commands/start/flag_forwarder.cc
@@ -17,9 +17,10 @@
 
 #include <cstring>
 
-#include <sstream>
 #include <map>
+#include <sstream>
 #include <string>
+#include <unordered_set>
 #include <vector>
 
 #include <gflags/gflags.h>
@@ -28,6 +29,7 @@
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/subprocess.h"
 
 /**
@@ -310,18 +312,67 @@
   }
 }
 
+// Hash table for repeatable flags (able to have repeated flag inputs)
+static std::unordered_set<std::string> kRepeatableFlags = {"custom_action_config",
+                                                    "custom_actions"};
+
 std::vector<std::string> FlagForwarder::ArgvForSubprocess(
-    const std::string& subprocess) const {
+    const std::string& subprocess, const std::vector<std::string>& args) const {
   std::vector<std::string> subprocess_argv;
+  std::map<std::string, std::vector<std::string>> name_to_value;
+
+  if (!args.empty()) {
+    for (int index = 0; index < args.size(); index++) {
+      std::string_view argument = args[index];
+      if (!android::base::ConsumePrefix(&argument, "-")) {
+        continue;
+      }
+      android::base::ConsumePrefix(&argument, "-");
+      std::size_t qual_pos = argument.find('=');
+      if (qual_pos == std::string::npos) {
+        // to handle error cases: --flag value and -flag value
+        // but it only apply to repeatable flag case
+        if (cuttlefish::Contains(kRepeatableFlags, argument)) {
+          // matched
+          LOG(FATAL) << subprocess
+                     << " has wrong flag input: " << args[index];
+        }
+        continue;
+      }
+      const std::string name(argument.substr(0, qual_pos));
+      const std::string value(
+          argument.substr(qual_pos + 1, argument.length() - qual_pos - 1));
+
+      if (cuttlefish::Contains(kRepeatableFlags, name)) {
+        // matched
+        if (!cuttlefish::Contains(name_to_value, name)) {
+          // this flag is new
+          std::vector<std::string> values;
+          name_to_value[name] = values;
+        }
+        name_to_value[name].push_back(value);
+      }
+    }
+  }
+
   for (const auto& flag : flags_) {
     if (flag->Subprocess() == subprocess) {
-      gflags::CommandLineFlagInfo flag_info =
-          gflags::GetCommandLineFlagInfoOrDie(flag->Name().c_str());
-      if (!flag_info.is_default) {
-        subprocess_argv.push_back("--" + flag->Name() + "=" + flag_info.current_value);
+      if (cuttlefish::Contains(kRepeatableFlags, flag->Name()) &&
+          cuttlefish::Contains(name_to_value, flag->Name())) {
+        // this is a repeatable flag with input values
+        for (const auto& value : name_to_value[flag->Name()]) {
+          subprocess_argv.push_back("--" + flag->Name() + "=" + value);
+        }
+      } else {
+        // normal case
+        gflags::CommandLineFlagInfo flag_info =
+            gflags::GetCommandLineFlagInfoOrDie(flag->Name().c_str());
+        if (!flag_info.is_default) {
+          subprocess_argv.push_back("--" + flag->Name() + "=" +
+                                    flag_info.current_value);
+        }
       }
     }
   }
   return subprocess_argv;
 }
-
diff --git a/host/commands/start/flag_forwarder.h b/host/commands/start/flag_forwarder.h
index b0f5cfe..0c003d4 100644
--- a/host/commands/start/flag_forwarder.h
+++ b/host/commands/start/flag_forwarder.h
@@ -35,5 +35,7 @@
   FlagForwarder& operator=(const FlagForwarder&) = delete;
 
   void UpdateFlagDefaults() const;
-  std::vector<std::string> ArgvForSubprocess(const std::string& subprocess) const;
+  std::vector<std::string> ArgvForSubprocess(
+      const std::string& subprocess,
+      const std::vector<std::string>& args = std::vector<std::string>()) const;
 };
diff --git a/host/commands/start/linkage.dot b/host/commands/start/linkage.dot
new file mode 100644
index 0000000..bfdc3a4
--- /dev/null
+++ b/host/commands/start/linkage.dot
@@ -0,0 +1,13 @@
+digraph {
+  assemble_cvd [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/assemble_cvd/"]
+  cvd [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/"]
+  cvd_internal_start [label = "launch_cvd / cvd_internal_start", URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/start/"]
+  run_cvd [URL = "https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/run_cvd/"]
+  user [label = "User CLI"]
+
+  user -> cvd
+  cvd -> cvd_internal_start
+  user -> cvd_internal_start
+  cvd_internal_start -> assemble_cvd
+  cvd_internal_start -> run_cvd
+}
diff --git a/host/commands/start/linkage.png b/host/commands/start/linkage.png
new file mode 100644
index 0000000..75fcada
--- /dev/null
+++ b/host/commands/start/linkage.png
Binary files differ
diff --git a/host/commands/start/linkage.svg b/host/commands/start/linkage.svg
new file mode 100644
index 0000000..e4ccb30
--- /dev/null
+++ b/host/commands/start/linkage.svg
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="252pt" height="260pt"
+ viewBox="0.00 0.00 252.38 260.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 256)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-256 248.38,-256 248.38,4 -4,4"/>
+<!-- assemble_cvd -->
+<g id="node1" class="node">
+<title>assemble_cvd</title>
+<g id="a_node1"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/assemble_cvd/" xlink:title="assemble_cvd">
+<ellipse fill="none" stroke="black" cx="63.19" cy="-18" rx="60.39" ry="18"/>
+<text text-anchor="middle" x="63.19" y="-14.3" font-family="Times,serif" font-size="14.00">assemble_cvd</text>
+</a>
+</g>
+</g>
+<!-- cvd -->
+<g id="node2" class="node">
+<title>cvd</title>
+<g id="a_node2"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/cvd/" xlink:title="cvd">
+<ellipse fill="none" stroke="black" cx="95.19" cy="-162" rx="27" ry="18"/>
+<text text-anchor="middle" x="95.19" y="-158.3" font-family="Times,serif" font-size="14.00">cvd</text>
+</a>
+</g>
+</g>
+<!-- cvd_internal_start -->
+<g id="node3" class="node">
+<title>cvd_internal_start</title>
+<g id="a_node3"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/start/" xlink:title="launch_cvd / cvd_internal_start">
+<ellipse fill="none" stroke="black" cx="122.19" cy="-90" rx="122.38" ry="18"/>
+<text text-anchor="middle" x="122.19" y="-86.3" font-family="Times,serif" font-size="14.00">launch_cvd / cvd_internal_start</text>
+</a>
+</g>
+</g>
+<!-- cvd&#45;&gt;cvd_internal_start -->
+<g id="edge2" class="edge">
+<title>cvd&#45;&gt;cvd_internal_start</title>
+<path fill="none" stroke="black" d="M101.59,-144.41C104.62,-136.54 108.33,-126.93 111.76,-118.04"/>
+<polygon fill="black" stroke="black" points="115.13,-119.04 115.46,-108.45 108.6,-116.52 115.13,-119.04"/>
+</g>
+<!-- cvd_internal_start&#45;&gt;assemble_cvd -->
+<g id="edge4" class="edge">
+<title>cvd_internal_start&#45;&gt;assemble_cvd</title>
+<path fill="none" stroke="black" d="M107.91,-72.05C100.73,-63.54 91.9,-53.07 83.99,-43.68"/>
+<polygon fill="black" stroke="black" points="86.46,-41.18 77.34,-35.79 81.11,-45.69 86.46,-41.18"/>
+</g>
+<!-- run_cvd -->
+<g id="node4" class="node">
+<title>run_cvd</title>
+<g id="a_node4"><a xlink:href="https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/commands/run_cvd/" xlink:title="run_cvd">
+<ellipse fill="none" stroke="black" cx="181.19" cy="-18" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="181.19" y="-14.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</a>
+</g>
+</g>
+<!-- cvd_internal_start&#45;&gt;run_cvd -->
+<g id="edge5" class="edge">
+<title>cvd_internal_start&#45;&gt;run_cvd</title>
+<path fill="none" stroke="black" d="M136.47,-72.05C143.83,-63.33 152.92,-52.54 160.98,-42.98"/>
+<polygon fill="black" stroke="black" points="163.68,-45.21 167.44,-35.31 158.32,-40.7 163.68,-45.21"/>
+</g>
+<!-- user -->
+<g id="node5" class="node">
+<title>user</title>
+<ellipse fill="none" stroke="black" cx="122.19" cy="-234" rx="44.39" ry="18"/>
+<text text-anchor="middle" x="122.19" y="-230.3" font-family="Times,serif" font-size="14.00">User CLI</text>
+</g>
+<!-- user&#45;&gt;cvd -->
+<g id="edge1" class="edge">
+<title>user&#45;&gt;cvd</title>
+<path fill="none" stroke="black" d="M115.65,-216.05C112.57,-208.06 108.82,-198.33 105.37,-189.4"/>
+<polygon fill="black" stroke="black" points="108.53,-187.86 101.66,-179.79 102,-190.38 108.53,-187.86"/>
+</g>
+<!-- user&#45;&gt;cvd_internal_start -->
+<g id="edge3" class="edge">
+<title>user&#45;&gt;cvd_internal_start</title>
+<path fill="none" stroke="black" d="M125.84,-215.91C127.86,-205.57 130.17,-192.09 131.19,-180 132.53,-164.06 132.53,-159.94 131.19,-144 130.47,-135.5 129.12,-126.31 127.68,-118.01"/>
+<polygon fill="black" stroke="black" points="131.1,-117.29 125.84,-108.09 124.22,-118.56 131.1,-117.29"/>
+</g>
+</g>
+</svg>
diff --git a/host/commands/start/main.cc b/host/commands/start/main.cc
index c11ac67..a7308f3 100644
--- a/host/commands/start/main.cc
+++ b/host/commands/start/main.cc
@@ -13,22 +13,27 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+#include <fstream>
 #include <iostream>
 #include <sstream>
-#include <fstream>
+#include <unordered_set>
 
-#include <gflags/gflags.h>
+#include <android-base/file.h>
 #include <android-base/logging.h>
+#include <android-base/parseint.h>
+#include <gflags/gflags.h>
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/files.h"
 #include "common/libs/utils/subprocess.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/commands/start/filesystem_explorer.h"
 #include "host/commands/start/flag_forwarder.h"
 #include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/config/host_tools_version.h"
 #include "host/libs/config/fetcher_config.h"
-
+#include "host/libs/config/host_tools_version.h"
+#include "host/libs/config/instance_nums.h"
 /**
  * If stdin is a tty, that means a user is invoking launch_cvd on the command
  * line and wants automatic file detection for assemble_cvd.
@@ -39,25 +44,46 @@
  * Controllable with a flag for extraordinary scenarios such as running from a
  * daemon which closes its own stdin.
  */
-DEFINE_bool(run_file_discovery, true,
+DEFINE_bool(run_file_discovery, CF_DEFAULTS_RUN_FILE_DISCOVERY,
             "Whether to run file discovery or get input files from stdin.");
-DEFINE_int32(num_instances, 1, "Number of Android guests to launch");
-DEFINE_string(report_anonymous_usage_stats, "", "Report anonymous usage "
-            "statistics for metrics collection and analysis.");
-DEFINE_int32(base_instance_num,
-             cuttlefish::GetInstance(),
-             "The instance number of the device created. When `-num_instances N`"
-             " is used, N instance numbers are claimed starting at this number.");
-DEFINE_string(verbosity, "INFO", "Console logging verbosity. Options are VERBOSE,"
-                                 "DEBUG,INFO,WARNING,ERROR");
-DEFINE_string(file_verbosity, "DEBUG",
+DEFINE_int32(num_instances, CF_DEFAULTS_NUM_INSTANCES,
+             "Number of Android guests to launch");
+DEFINE_string(report_anonymous_usage_stats,
+              CF_DEFAULTS_REPORT_ANONYMOUS_USAGE_STATS,
+              "Report anonymous usage "
+              "statistics for metrics collection and analysis.");
+DEFINE_int32(
+    base_instance_num, CF_DEFAULTS_BASE_INSTANCE_NUM,
+    "The instance number of the device created. When `-num_instances N`"
+    " is used, N instance numbers are claimed starting at this number.");
+DEFINE_string(instance_nums, CF_DEFAULTS_INSTANCE_NUMS,
+              "A comma-separated list of instance numbers "
+              "to use. Mutually exclusive with base_instance_num.");
+DEFINE_string(verbosity, CF_DEFAULTS_VERBOSITY,
+              "Console logging verbosity. Options are VERBOSE,"
+              "DEBUG,INFO,WARNING,ERROR");
+DEFINE_string(file_verbosity, CF_DEFAULTS_FILE_VERBOSITY,
               "Log file logging verbosity. Options are VERBOSE,DEBUG,INFO,"
               "WARNING,ERROR");
+DEFINE_bool(use_overlay, CF_DEFAULTS_USE_OVERLAY,
+            "Capture disk writes an overlay. This is a "
+            "prerequisite for powerwash_cvd or multiple instances.");
 
 namespace {
 
-std::string kAssemblerBin = cuttlefish::HostBinaryPath("assemble_cvd");
-std::string kRunnerBin = cuttlefish::HostBinaryPath("run_cvd");
+std::string SubtoolPath(const std::string& subtool_base) {
+  auto my_own_dir = android::base::GetExecutableDirectory();
+  std::stringstream subtool_path_stream;
+  subtool_path_stream << my_own_dir << "/" << subtool_base;
+  auto subtool_path = subtool_path_stream.str();
+  if (my_own_dir.empty() || !cuttlefish::FileExists(subtool_path)) {
+    return cuttlefish::HostBinaryPath(subtool_base);
+  }
+  return subtool_path;
+}
+
+std::string kAssemblerBin = SubtoolPath("assemble_cvd");
+std::string kRunnerBin = SubtoolPath("run_cvd");
 
 cuttlefish::Subprocess StartAssembler(cuttlefish::SharedFD assembler_stdin,
                                cuttlefish::SharedFD assembler_stdout,
@@ -109,19 +135,31 @@
       }
     }
   }
+
+  std::cout << "===================================================================\n";
+  std::cout << "NOTICE:\n\n";
+  std::cout << "By using this Android Virtual Device, you agree to\n";
+  std::cout << "Google Terms of Service (https://policies.google.com/terms).\n";
+  std::cout << "The Google Privacy Policy (https://policies.google.com/privacy)\n";
+  std::cout << "describes how Google handles information generated as you use\n";
+  std::cout << "Google Services.";
   char ch = !use_metrics.empty() ? tolower(use_metrics.at(0)) : -1;
   if (ch != 'n') {
-    std::cout << "===================================================================\n";
-    std::cout << "NOTICE:\n\n";
-    std::cout << "We collect usage statistics in accordance with our\n"
-                 "Content Licenses (https://source.android.com/setup/start/licenses),\n"
-                 "Contributor License Agreement (https://cla.developers.google.com/),\n"
-                 "Privacy Policy (https://policies.google.com/privacy) and\n"
-                 "Terms of Service (https://policies.google.com/terms).\n";
-    std::cout << "===================================================================\n\n";
     if (use_metrics.empty()) {
-      std::cout << "Do you accept anonymous usage statistics reporting (Y/n)?: ";
+      std::cout << "\n===================================================================\n";
+      std::cout << "Automatically send diagnostic information to Google, such as crash\n";
+      std::cout << "reports and usage data from this Android Virtual Device. You can\n";
+      std::cout << "adjust this permission at any time by running\n";
+      std::cout << "\"launch_cvd -report_anonymous_usage_stats=n\". (Y/n)?:";
+    } else {
+      std::cout << " You can adjust the permission for sending\n";
+      std::cout << "diagnostic information to Google, such as crash reports and usage\n";
+      std::cout << "data from this Android Virtual Device, at any time by running\n";
+      std::cout << "\"launch_cvd -report_anonymous_usage_stats=n\"\n";
+      std::cout << "===================================================================\n\n";
     }
+  } else {
+    std::cout << "\n===================================================================\n\n";
   }
   for (;;) {
     switch (ch) {
@@ -160,6 +198,100 @@
   return true;
 }
 
+// Hash table for all bool flag names
+// Used to find bool flag and convert "flag"/"noflag" to "--flag=value"
+// This is the solution for vectorize bool flags in gFlags
+
+std::unordered_set<std::string> kBoolFlags = {"guest_enforce_security",
+                                              "use_random_serial",
+                                              "use_allocd",
+                                              "use_sdcard",
+                                              "pause_in_bootloader",
+                                              "daemon",
+                                              "enable_minimal_mode",
+                                              "enable_modem_simulator",
+                                              "console",
+                                              "enable_sandbox",
+                                              "restart_subprocesses",
+                                              "enable_gpu_udmabuf",
+                                              "enable_gpu_angle",
+                                              "enable_audio",
+                                              "start_gnss_proxy",
+                                              "enable_bootanimation",
+                                              "record_screen",
+                                              "protected_vm",
+                                              "enable_kernel_log",
+                                              "kgdb",
+                                              "start_webrtc",
+                                              "smt",
+                                              "vhost_net"};
+
+struct BooleanFlag {
+  bool is_bool_flag;
+  bool bool_flag_value;
+  std::string name;
+};
+BooleanFlag IsBoolArg(const std::string& argument) {
+  // Validate format
+  // we only deal with special bool case: -flag, --flag, -noflag, --noflag
+  // and convert to -flag=true, --flag=true, -flag=false, --flag=false
+  // others not in this format just return false
+  std::string_view name = argument;
+  if (!android::base::ConsumePrefix(&name, "-")) {
+    return {false, false, ""};
+  }
+  android::base::ConsumePrefix(&name, "-");
+  std::size_t found = name.find('=');
+  if (found != std::string::npos) {
+    // found "=", --flag=value case, it doesn't need convert
+    return {false, false, ""};
+  }
+
+  // Validate it is part of the set
+  std::string result_name(name);
+  std::string_view new_name = result_name;
+  if (result_name.length() == 0) {
+    return {false, false, ""};
+  }
+  if (kBoolFlags.find(result_name) != kBoolFlags.end()) {
+    // matched -flag, --flag
+    return {true, true, result_name};
+  } else if (android::base::ConsumePrefix(&new_name, "no")) {
+    // 2nd chance to check -noflag, --noflag
+    result_name = new_name;
+    if (kBoolFlags.find(result_name) != kBoolFlags.end()) {
+      // matched -noflag, --noflag
+      return {true, false, result_name};
+    }
+  }
+  // return status
+  return {false, false, ""};
+}
+
+std::string FormatBoolString(const std::string& name_str, bool value) {
+  std::string new_flag = "--" + name_str;
+  if (value) {
+    new_flag += "=true";
+  } else {
+    new_flag += "=false";
+  }
+  return new_flag;
+}
+
+bool OverrideBoolArg(std::vector<std::string>& args) {
+  bool overrided = false;
+  for (int index = 0; index < args.size(); index++) {
+    const std::string curr_arg = args[index];
+    BooleanFlag value = IsBoolArg(curr_arg);
+    if (value.is_bool_flag) {
+      // Override the value
+      args[index] = FormatBoolString(value.name, value.bool_flag_value);
+      overrided = true;
+    }
+  }
+  return overrided;
+}
+
 } // namespace
 
 int main(int argc, char** argv) {
@@ -167,6 +299,15 @@
 
   FlagForwarder forwarder({kAssemblerBin, kRunnerBin});
 
+  // Used to find bool flag and convert "flag"/"noflag" to "--flag=value"
+  // This is the solution for vectorize bool flags in gFlags
+  std::vector<std::string> args(argv + 1, argv + argc);
+  if (OverrideBoolArg(args)) {
+    for (int i = 1; i < argc; i++) {
+      argv[i] = &args[i-1][0]; // args[] start from 0
+    }
+  }
+
   gflags::ParseCommandLineNonHelpFlags(&argc, &argv, false);
 
   forwarder.UpdateFlagDefaults();
@@ -191,8 +332,34 @@
     cuttlefish::SharedFD::Pipe(&assembler_stdin, &launcher_report);
   }
 
-  auto instance_num_str = std::to_string(FLAGS_base_instance_num);
-  setenv("CUTTLEFISH_INSTANCE", instance_num_str.c_str(), /* overwrite */ 1);
+  auto instance_nums =
+      cuttlefish::InstanceNumsCalculator().FromGlobalGflags().Calculate();
+  if (!instance_nums.ok()) {
+    LOG(ERROR) << instance_nums.error().Message();
+    LOG(DEBUG) << instance_nums.error().Trace();
+    abort();
+  }
+
+  if (cuttlefish::CuttlefishConfig::ConfigExists()) {
+    auto previous_config = cuttlefish::CuttlefishConfig::Get();
+    CHECK(previous_config);
+    CHECK(!previous_config->Instances().empty());
+    auto previous_instance = previous_config->Instances()[0];
+    const auto& disks = previous_instance.virtual_disk_paths();
+    auto overlay = previous_instance.PerInstancePath("overlay.img");
+    auto used_overlay =
+        std::find(disks.begin(), disks.end(), overlay) != disks.end();
+    CHECK(used_overlay == FLAGS_use_overlay)
+        << "Cannot transition between different values of --use_overlay "
+        << "(Previous = " << used_overlay << ", current = " << FLAGS_use_overlay
+        << "). To fix this, delete \"" << previous_config->root_dir()
+        << "\" and any image files.";
+  }
+
+  CHECK(!instance_nums->empty()) << "Expected at least one instance";
+  auto instance_num_str = std::to_string(*instance_nums->begin());
+  setenv(cuttlefish::kCuttlefishInstanceEnvVarName, instance_num_str.c_str(),
+         /* overwrite */ 1);
 
 #if defined(__BIONIC__)
   // These environment variables are needed in case when Bionic is used.
@@ -204,9 +371,9 @@
 
   // SharedFDs are std::move-d in to avoid dangling references.
   // Removing the std::move will probably make run_cvd hang as its stdin never closes.
-  auto assemble_proc = StartAssembler(std::move(assembler_stdin),
-                                      std::move(assembler_stdout),
-                                      forwarder.ArgvForSubprocess(kAssemblerBin));
+  auto assemble_proc =
+      StartAssembler(std::move(assembler_stdin), std::move(assembler_stdout),
+                     forwarder.ArgvForSubprocess(kAssemblerBin, args));
 
   if (should_generate_report) {
     WriteFiles(AvailableFilesReport(), std::move(launcher_report));
@@ -228,11 +395,12 @@
   }
 
   std::vector<cuttlefish::Subprocess> runners;
-  for (int i = 0; i < FLAGS_num_instances; i++) {
+  for (const auto& instance_num : *instance_nums) {
     cuttlefish::SharedFD runner_stdin_in, runner_stdin_out;
     cuttlefish::SharedFD::Pipe(&runner_stdin_out, &runner_stdin_in);
-    std::string instance_name = std::to_string(i + FLAGS_base_instance_num);
-    setenv("CUTTLEFISH_INSTANCE", instance_name.c_str(), /* overwrite */ 1);
+    std::string instance_num_str = std::to_string(instance_num);
+    setenv(cuttlefish::kCuttlefishInstanceEnvVarName, instance_num_str.c_str(),
+           /* overwrite */ 1);
 
     auto run_proc = StartRunner(std::move(runner_stdin_out),
                                 forwarder.ArgvForSubprocess(kRunnerBin));
diff --git a/host/commands/status/Android.bp b/host/commands/status/Android.bp
index 4ce0e8c..edde718 100644
--- a/host/commands/status/Android.bp
+++ b/host/commands/status/Android.bp
@@ -24,16 +24,14 @@
         "main.cc",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
-        "libfruit",
         "libjsoncpp",
     ],
     static_libs: [
+        "libcuttlefish_command_util",
         "libcuttlefish_host_config",
-        "libcuttlefish_vm_manager",
         "libgflags",
     ],
     defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
diff --git a/host/commands/status/main.cc b/host/commands/status/main.cc
index a479081..c4ecf53 100644
--- a/host/commands/status/main.cc
+++ b/host/commands/status/main.cc
@@ -14,161 +14,159 @@
  * limitations under the License.
  */
 
-#include <fcntl.h>
-#include <inttypes.h>
-#include <limits.h>
-#include <signal.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-#include <unistd.h>
-
-#include <algorithm>
+#include <cstdint>
 #include <cstdlib>
-#include <fstream>
-#include <iomanip>
 #include <iostream>
-#include <memory>
-#include <sstream>
 #include <string>
 #include <vector>
 
 #include <android-base/logging.h>
+#include <gflags/gflags.h>
+#include <json/value.h>
 
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/fs/shared_select.h"
-#include "common/libs/utils/environment.h"
+#include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/tee_logging.h"
 #include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/command_util/util.h"
 #include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/vm_manager/vm_manager.h"
-
-#define CHECK_PRINT(print, condition, message)                               \
-  if (print) {                                                               \
-    if (!(condition)) {                                                      \
-      std::cout << "      Status: Stopped (" << message << ")" << std::endl; \
-      exit(0);                                                               \
-    }                                                                        \
-  } else                                                                     \
-    CHECK(condition) << message
 
 namespace cuttlefish {
+namespace {
 
-int CvdStatusMain(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
-  ::android::base::SetLogger(LogToStderrAndFiles({}));
+struct StatusFlags {
+  std::int32_t wait_for_launcher = 5;
+  std::string instance_name;
+  bool print = false;
+  bool all_instances = false;
+  bool help_xml = false;
+};
 
+Result<StatusFlags> GetFlagValues(int argc, char** argv) {
+  StatusFlags flag_values;
   std::vector<Flag> flags;
-
-  std::int32_t wait_for_launcher;
-  Json::Value device_info;
   flags.emplace_back(
-      GflagsCompatFlag("wait_for_launcher", wait_for_launcher)
+      GflagsCompatFlag("wait_for_launcher", flag_values.wait_for_launcher)
           .Help("How many seconds to wait for the launcher to respond to the "
                 "status command. A value of zero means wait indefinitely"));
-  std::string instance_name;
-  flags.emplace_back(GflagsCompatFlag("instance_name", instance_name)
-                         .Help("Name of the instance to check. If not "
-                               "provided, DefaultInstance is used."));
-  bool print;
-  flags.emplace_back(GflagsCompatFlag("print", print)
+  flags.emplace_back(
+      GflagsCompatFlag("instance_name", flag_values.instance_name)
+          .Help("Name of the instance to check. If not "
+                "provided, DefaultInstance is used."));
+  flags.emplace_back(GflagsCompatFlag("print", flag_values.print)
                          .Help("If provided, prints status and instance config "
                                "information to stdout instead of CHECK"));
-  bool all_instances;
   flags.emplace_back(
-      GflagsCompatFlag("all_instances", all_instances)
+      GflagsCompatFlag("all_instances", flag_values.all_instances)
           .Help("List all instances status and instance config information."));
-
   flags.emplace_back(HelpFlag(flags));
+  flags.emplace_back(HelpXmlFlag(flags, std::cout, flag_values.help_xml));
   flags.emplace_back(UnexpectedArgumentGuard());
 
   std::vector<std::string> args =
       ArgsToVec(argc - 1, argv + 1);  // Skip argv[0]
-  CHECK(ParseFlags(flags, args)) << "Could not process command line flags.";
+  CF_EXPECT(ParseFlags(flags, args), "Could not process command line flags.");
+  return flag_values;
+}
 
-  auto config = CuttlefishConfig::Get();
-  CHECK(config) << "Failed to obtain config object";
+struct WebAccessUrlParam {
+  std::string sig_server_addr;
+  std::string device_name;
+};
+std::string CalcWebAccessUrl(const WebAccessUrlParam& web_access_url_param) {
+  if (!FileIsSocket(web_access_url_param.sig_server_addr)) {
+    return "";
+  }
+  return std::string("https://") + "localhost" + ":" + "1443" + "/devices/" +
+         web_access_url_param.device_name + "/files" + "/client.html";
+}
 
-  auto instance_names = all_instances ? config->instance_names()
-                                      : std::vector<std::string>{instance_name};
+Json::Value PopulateDevicesInfoFromInstance(
+    const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance_config) {
+  Json::Value device_info;
+  std::string device_name = instance_config.webrtc_device_id();
+  if (device_name.empty()) {
+    device_name = instance_config.instance_name();
+  }
+  device_info["assembly_dir"] = config.assembly_dir();
+  device_info["instance_name"] = device_name;
+  device_info["instance_dir"] = instance_config.instance_dir();
+  // 1443 is the port of the global webrtc "operator" service
+  device_info["web_access"] =
+      CalcWebAccessUrl({.sig_server_addr = config.sig_server_address(),
+                        .device_name = device_name});
+  device_info["adb_serial"] = instance_config.adb_ip_and_port();
+  device_info["webrtc_port"] = std::to_string(config.sig_server_port());
+  for (int i = 0; i < instance_config.display_configs().size(); i++) {
+    device_info["displays"][i] =
+        std::to_string(instance_config.display_configs()[i].width) + " x " +
+        std::to_string(instance_config.display_configs()[i].height) + " ( " +
+        std::to_string(instance_config.display_configs()[i].dpi) + " )";
+  }
+  device_info["status"] = "Running";
+  return device_info;
+}
+
+Result<void> CvdStatusMain(const StatusFlags& flag_values) {
+  const CuttlefishConfig* config =
+      CF_EXPECT(CuttlefishConfig::Get(), "Failed to obtain config object");
 
   Json::Value devices_info;
+  auto instance_names =
+      flag_values.all_instances
+          ? config->instance_names()
+          : std::vector<std::string>{flag_values.instance_name};
   for (int index = 0; index < instance_names.size(); index++) {
     std::string instance_name = instance_names[index];
-    auto instance = instance_name.empty()
-                        ? config->ForDefaultInstance()
-                        : config->ForInstanceName(instance_name);
-    auto monitor_path = instance.launcher_monitor_socket_path();
-    CHECK_PRINT(print, !monitor_path.empty(),
-                "No path to launcher monitor found");
+    auto instance_config = instance_name.empty()
+                               ? config->ForDefaultInstance()
+                               : config->ForInstanceName(instance_name);
+    SharedFD monitor_socket = CF_EXPECT(GetLauncherMonitorFromInstance(
+        instance_config, flag_values.wait_for_launcher));
 
-    auto monitor_socket = SharedFD::SocketLocalClient(
-        monitor_path.c_str(), false, SOCK_STREAM, wait_for_launcher);
-    CHECK_PRINT(print, monitor_socket->IsOpen(),
-                "Unable to connect to launcher monitor at " + monitor_path +
-                    ": " + monitor_socket->StrError());
+    LOG(INFO) << "Requesting status for instance "
+              << instance_config.instance_name();
+    CF_EXPECT(WriteLauncherAction(monitor_socket, LauncherAction::kStatus));
+    CF_EXPECT(WaitForRead(monitor_socket, flag_values.wait_for_launcher));
+    LauncherResponse status_response =
+        CF_EXPECT(ReadLauncherResponse(monitor_socket));
+    CF_EXPECT(
+        status_response == LauncherResponse::kSuccess,
+        "Received `" << static_cast<char>(status_response)
+                     << "` response from launcher monitor for status request");
 
-    auto request = LauncherAction::kStatus;
-    auto bytes_sent = monitor_socket->Send(&request, sizeof(request), 0);
-    CHECK_PRINT(print, bytes_sent > 0,
-                "Error sending launcher monitor the status command: " +
-                    monitor_socket->StrError());
-
-    // Perform a select with a timeout to guard against launcher hanging
-    SharedFDSet read_set;
-    read_set.Set(monitor_socket);
-    struct timeval timeout = {wait_for_launcher, 0};
-    int selected = Select(&read_set, nullptr, nullptr,
-                          wait_for_launcher <= 0 ? nullptr : &timeout);
-    CHECK_PRINT(
-        print, selected >= 0,
-        std::string("Failed communication with the launcher monitor: ") +
-            strerror(errno));
-    CHECK_PRINT(print, selected > 0,
-                "Timeout expired waiting for launcher monitor to respond");
-
-    LauncherResponse response;
-    auto bytes_recv = monitor_socket->Recv(&response, sizeof(response), 0);
-    CHECK_PRINT(
-        print, bytes_recv > 0,
-        std::string("Error receiving response from launcher monitor: ") +
-            monitor_socket->StrError());
-    CHECK_PRINT(print, response == LauncherResponse::kSuccess,
-                std::string("Received '") + static_cast<char>(response) +
-                    "' response from launcher monitor");
-    if (print) {
-      devices_info[index]["assembly_dir"] = config->assembly_dir();
-      devices_info[index]["instance_name"] = instance.instance_name();
-      devices_info[index]["instance_dir"] = instance.instance_dir();
-      devices_info[index]["web_access"] =
-          "https://" + config->sig_server_address() + ":" +
-          std::to_string(config->sig_server_port()) +
-          "/client.html?deviceId=" + instance.instance_name();
-      devices_info[index]["adb_serial"] = instance.adb_ip_and_port();
-      devices_info[index]["webrtc_port"] =
-          std::to_string(config->sig_server_port());
-      for (int i = 0; i < config->display_configs().size(); i++) {
-        devices_info[index]["displays"][i] =
-            std::to_string(config->display_configs()[i].width) + " x " +
-            std::to_string(config->display_configs()[i].height) + " ( " +
-            std::to_string(config->display_configs()[i].dpi) + " )";
-      }
-      devices_info[index]["status"] = "Running";
-      if (index == (instance_names.size() - 1)) {
-        std::cout << devices_info.toStyledString() << std::endl;
-      }
-    } else {
-      LOG(INFO) << "run_cvd is active.";
-    }
+    devices_info[index] =
+        PopulateDevicesInfoFromInstance(*config, instance_config);
+    LOG(INFO) << "run_cvd is active for instance "
+              << instance_config.instance_name();
   }
-  return 0;
+  if (flag_values.print) {
+    std::cout << devices_info.toStyledString();
+  }
+  return {};
 }
+
+}  // namespace
 }  // namespace cuttlefish
 
 int main(int argc, char** argv) {
-  return cuttlefish::CvdStatusMain(argc, argv);
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+  cuttlefish::Result<cuttlefish::StatusFlags> flag_result =
+      cuttlefish::GetFlagValues(argc, argv);
+  if (!flag_result.ok()) {
+    LOG(ERROR) << flag_result.error().Message();
+    LOG(DEBUG) << flag_result.error().Trace();
+    return EXIT_FAILURE;
+  }
+
+  auto result = cuttlefish::CvdStatusMain(flag_result.value());
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Message();
+    LOG(DEBUG) << result.error().Trace();
+    return EXIT_FAILURE;
+  }
+  return EXIT_SUCCESS;
 }
diff --git a/host/commands/stop/Android.bp b/host/commands/stop/Android.bp
index 7d705fd..73c3da5 100644
--- a/host/commands/stop/Android.bp
+++ b/host/commands/stop/Android.bp
@@ -24,17 +24,15 @@
         "main.cc",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
         "libcuttlefish_allocd_utils",
-        "libfruit",
         "libjsoncpp",
     ],
     static_libs: [
+        "libcuttlefish_command_util",
         "libcuttlefish_host_config",
-        "libcuttlefish_vm_manager",
         "libgflags",
     ],
     defaults: ["cuttlefish_host", "cuttlefish_libicuuc"],
diff --git a/host/commands/stop/main.cc b/host/commands/stop/main.cc
index 44ed5ec..c617632 100644
--- a/host/commands/stop/main.cc
+++ b/host/commands/stop/main.cc
@@ -14,25 +14,13 @@
  * limitations under the License.
  */
 
-#include <dirent.h>
-#include <inttypes.h>
-#include <limits.h>
-#include <stdio.h>
-#include <stdint.h>
-#include <stdlib.h>
 #include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/wait.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <signal.h>
 
-#include <algorithm>
+#include <cinttypes>
+#include <csignal>
+#include <cstdio>
 #include <cstdlib>
-#include <fstream>
-#include <iomanip>
-#include <memory>
-#include <sstream>
+#include <iostream>
 #include <string>
 #include <vector>
 
@@ -40,16 +28,14 @@
 #include <android-base/logging.h>
 
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/fs/shared_select.h"
-#include "common/libs/utils/environment.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
 #include "common/libs/utils/result.h"
 #include "host/commands/run_cvd/runner_defs.h"
 #include "host/libs/allocd/request.h"
 #include "host/libs/allocd/utils.h"
+#include "host/libs/command_util/util.h"
 #include "host/libs/config/cuttlefish_config.h"
-#include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 namespace {
@@ -76,6 +62,7 @@
   return {
       config.assembly_dir(),
       instance.instance_dir(),
+      instance.instance_uds_dir(),
   };
 }
 
@@ -126,50 +113,33 @@
 }
 
 Result<void> CleanStopInstance(
-    const CuttlefishConfig::InstanceSpecific& instance,
-    std::int32_t wait_for_launcher) {
-  auto monitor_path = instance.launcher_monitor_socket_path();
-  CF_EXPECT(!monitor_path.empty(), "No path to launcher monitor found");
+    const CuttlefishConfig::InstanceSpecific& instance_config,
+    const std::int32_t wait_for_launcher) {
+  SharedFD monitor_socket = CF_EXPECT(
+      GetLauncherMonitorFromInstance(instance_config, wait_for_launcher));
 
-  auto monitor_socket = SharedFD::SocketLocalClient(
-      monitor_path.c_str(), false, SOCK_STREAM, wait_for_launcher);
-  CF_EXPECT(monitor_socket->IsOpen(),
-            "Unable to connect to launcher monitor at "
-                << monitor_path << ": " << monitor_socket->StrError());
+  LOG(INFO) << "Requesting stop";
+  CF_EXPECT(WriteLauncherAction(monitor_socket, LauncherAction::kStop));
+  CF_EXPECT(WaitForRead(monitor_socket, wait_for_launcher));
+  LauncherResponse stop_response =
+      CF_EXPECT(ReadLauncherResponse(monitor_socket));
+  CF_EXPECT(
+      stop_response == LauncherResponse::kSuccess,
+      "Received `" << static_cast<char>(stop_response)
+                   << "` response from launcher monitor for status request");
 
-  auto request = LauncherAction::kStop;
-  auto bytes_sent = monitor_socket->Send(&request, sizeof(request), 0);
-  CF_EXPECT(bytes_sent >= 0, "Error sending launcher monitor the stop command: "
-                                 << monitor_socket->StrError());
-
-  // Perform a select with a timeout to guard against launcher hanging
-  SharedFDSet read_set;
-  read_set.Set(monitor_socket);
-  struct timeval timeout = {wait_for_launcher, 0};
-  int selected = Select(&read_set, nullptr, nullptr,
-                        wait_for_launcher <= 0 ? nullptr : &timeout);
-  CF_EXPECT(selected >= 0, "Failed communication with the launcher monitor: "
-                               << strerror(errno));
-  CF_EXPECT(selected > 0, "Timeout expired waiting for launcher to respond");
-
-  LauncherResponse response;
-  auto bytes_recv = monitor_socket->Recv(&response, sizeof(response), 0);
-  CF_EXPECT(bytes_recv >= 0, "Error receiving response from launcher monitor: "
-                                 << monitor_socket->StrError());
-  CF_EXPECT(response == LauncherResponse::kSuccess,
-            "Received '" << static_cast<char>(response)
-                         << "' response from launcher monitor");
-  LOG(INFO) << "Successfully stopped device " << instance.instance_name()
-            << ": " << instance.adb_ip_and_port();
+  LOG(INFO) << "Successfully stopped device " << instance_config.instance_name()
+            << ": " << instance_config.adb_ip_and_port();
   return {};
 }
 
 int StopInstance(const CuttlefishConfig& config,
                  const CuttlefishConfig::InstanceSpecific& instance,
-                 std::int32_t wait_for_launcher) {
-  auto res = CleanStopInstance(instance, wait_for_launcher);
-  if (!res.ok()) {
-    LOG(ERROR) << "Clean stop failed: " << res.error();
+                 const std::int32_t wait_for_launcher) {
+  auto result = CleanStopInstance(instance, wait_for_launcher);
+  if (!result.ok()) {
+    LOG(ERROR) << "Clean stop failed: " << result.error().Message();
+    LOG(DEBUG) << "Clean stop failed: " << result.error().Trace();
     return FallBackStop(DirsForInstance(config, instance));
   }
   return 0;
@@ -198,34 +168,45 @@
   LOG(INFO) << "Stop Session operation: " << resp["config_status"];
 }
 
-int StopCvdMain(int argc, char** argv) {
-  ::android::base::InitLogging(argv, android::base::StderrLogger);
+struct FlagVaules {
+  std::int32_t wait_for_launcher;
+  bool clear_instance_dirs;
+  bool helpxml;
+};
 
-  std::vector<Flag> flags;
-
+FlagVaules GetFlagValues(int argc, char** argv) {
   std::int32_t wait_for_launcher = 5;
+  bool clear_instance_dirs = false;
+  std::vector<Flag> flags;
   flags.emplace_back(
       GflagsCompatFlag("wait_for_launcher", wait_for_launcher)
           .Help("How many seconds to wait for the launcher to respond to the "
                 "status command. A value of zero means wait indefinitely"));
-  bool clear_instance_dirs;
   flags.emplace_back(
       GflagsCompatFlag("clear_instance_dirs", clear_instance_dirs)
           .Help("If provided, deletes the instance dir after attempting to "
                 "stop each instance."));
   flags.emplace_back(HelpFlag(flags));
+  bool helpxml = false;
+  flags.emplace_back(HelpXmlFlag(flags, std::cout, helpxml));
   flags.emplace_back(UnexpectedArgumentGuard());
   std::vector<std::string> args =
       ArgsToVec(argc - 1, argv + 1);  // Skip argv[0]
-  CHECK(ParseFlags(flags, args)) << "Could not process command line flags.";
+  auto parse_result = ParseFlags(flags, args);
+  CHECK(parse_result || helpxml) << "Could not process command line flags.";
 
+  return {wait_for_launcher, clear_instance_dirs, helpxml};
+}
+
+int StopCvdMain(const std::int32_t wait_for_launcher,
+                const bool clear_instance_dirs) {
   auto config = CuttlefishConfig::Get();
   if (!config) {
     LOG(ERROR) << "Failed to obtain config object";
     return FallBackStop(FallbackDirs());
   }
 
-  int ret = 0;
+  int exit_code = 0;
   for (const auto& instance : config->Instances()) {
     auto session_id = instance.session_id();
     int exit_status = StopInstance(*config, instance, wait_for_launcher);
@@ -238,26 +219,38 @@
                    << kDefaultLocation << ": "
                    << allocd_sock->StrError();
       }
-
       ReleaseAllocdResources(allocd_sock, session_id);
     }
-    if (clear_instance_dirs) {
-      if (DirectoryExists(instance.instance_dir())) {
-        LOG(INFO) << "Deleting instance dir " << instance.instance_dir();
-        if (!RecursivelyRemoveDirectory(instance.instance_dir())) {
-          LOG(ERROR) << "Unable to rmdir " << instance.instance_dir();
-        }
+
+    if (clear_instance_dirs && DirectoryExists(instance.instance_dir())) {
+      LOG(INFO) << "Deleting instance dir " << instance.instance_dir();
+      if (!RecursivelyRemoveDirectory(instance.instance_dir())) {
+        LOG(ERROR) << "Unable to rmdir " << instance.instance_dir();
       }
     }
-    ret |= exit_status;
+    exit_code |= exit_status;
   }
-
-  return ret;
+  return exit_code;
 }
 
 } // namespace
 } // namespace cuttlefish
 
 int main(int argc, char** argv) {
-  return cuttlefish::StopCvdMain(argc, argv);
+  ::android::base::InitLogging(argv, android::base::StderrLogger);
+
+  const auto [wait_for_launcher, clear_instance_dirs, helpxml] =
+      cuttlefish::GetFlagValues(argc, argv);
+
+  if (helpxml) {
+    /*
+     * b/269925398
+     *
+     * CHECK(false) should not be executed if --helpxml is given.
+     * The return code does not have to be the same. It is good if
+     * CHECK(false) and --helpxml return the same return code.
+     */
+    return 134;
+  }
+  return cuttlefish::StopCvdMain(wait_for_launcher, clear_instance_dirs);
 }
diff --git a/host/commands/tcp_connector/Android.bp b/host/commands/tcp_connector/Android.bp
new file mode 100644
index 0000000..a3676bc
--- /dev/null
+++ b/host/commands/tcp_connector/Android.bp
@@ -0,0 +1,39 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_binary {
+    name: "tcp_connector",
+    srcs: [
+        "main.cpp",
+    ],
+    shared_libs: [
+        "libext2_blkid",
+        "libbase",
+        "libcuttlefish_fs",
+        "libjsoncpp",
+        "liblog",
+        "libcuttlefish_utils",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+        "libgflags",
+    ],
+    defaults: ["cuttlefish_buildhost_only"]
+}
diff --git a/host/commands/tcp_connector/OWNERS b/host/commands/tcp_connector/OWNERS
new file mode 100644
index 0000000..8a7b21c
--- /dev/null
+++ b/host/commands/tcp_connector/OWNERS
@@ -0,0 +1,4 @@
+include device/google/cuttlefish:/OWNERS
+include platform/packages/modules/Bluetooth:/OWNERS
+include platform/packages/modules/Uwb:/OWNERS
+jeongik@google.com
\ No newline at end of file
diff --git a/host/commands/tcp_connector/main.cpp b/host/commands/tcp_connector/main.cpp
new file mode 100644
index 0000000..2eb46a5
--- /dev/null
+++ b/host/commands/tcp_connector/main.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <poll.h>
+#include <unistd.h>
+#include <ios>
+#include <mutex>
+
+#include <android-base/logging.h>
+#include <gflags/gflags.h>
+#include <thread>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/logging.h"
+
+DEFINE_int32(fifo_in, -1, "A pipe for incoming communication");
+DEFINE_int32(fifo_out, -1, "A pipe for outgoing communication");
+DEFINE_int32(data_port, -1, "A port for datas");
+DEFINE_int32(buffer_size, -1, "The buffer size");
+
+void openSocket(cuttlefish::SharedFD* fd, int port) {
+  static std::mutex mutex;
+  std::unique_lock<std::mutex> lock(mutex);
+  *fd = cuttlefish::SharedFD::SocketLocalClient(port, SOCK_STREAM);
+}
+
+int main(int argc, char** argv) {
+  cuttlefish::DefaultSubprocessLogging(argv);
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+  auto fifo_in = cuttlefish::SharedFD::Dup(FLAGS_fifo_in);
+  if (!fifo_in->IsOpen()) {
+    LOG(ERROR) << "Error dupping fd " << FLAGS_fifo_in << ": "
+               << fifo_in->StrError();
+    return 1;
+  }
+  close(FLAGS_fifo_in);
+
+  auto fifo_out = cuttlefish::SharedFD::Dup(FLAGS_fifo_out);
+  if (!fifo_out->IsOpen()) {
+    LOG(ERROR) << "Error dupping fd " << FLAGS_fifo_out << ": "
+               << fifo_out->StrError();
+    return 1;
+  }
+  close(FLAGS_fifo_out);
+  cuttlefish::SharedFD sock;
+  openSocket(&sock, FLAGS_data_port);
+
+  auto guest_to_host = std::thread([&]() {
+    while (true) {
+      char buf[FLAGS_buffer_size];
+      auto read = fifo_in->Read(buf, sizeof(buf));
+      while (cuttlefish::WriteAll(sock, buf, read) == -1) {
+        LOG(ERROR) << "failed to write to socket, retry.";
+        // Wait for the host process to be ready
+        sleep(1);
+        openSocket(&sock, FLAGS_data_port);
+      }
+    }
+  });
+
+  auto host_to_guest = std::thread([&]() {
+    while (true) {
+      char buf[FLAGS_buffer_size];
+      auto read = sock->Read(buf, sizeof(buf));
+      if (read == -1) {
+        LOG(ERROR) << "failed to read from socket, retry.";
+        // Wait for the host process to be ready
+        sleep(1);
+        openSocket(&sock, FLAGS_data_port);
+        continue;
+      }
+      cuttlefish::WriteAll(fifo_out, buf, read);
+    }
+  });
+  guest_to_host.join();
+  host_to_guest.join();
+}
diff --git a/host/commands/test_gce_driver/cvd_test_gce_driver.cpp b/host/commands/test_gce_driver/cvd_test_gce_driver.cpp
index d037478..b3db89b 100644
--- a/host/commands/test_gce_driver/cvd_test_gce_driver.cpp
+++ b/host/commands/test_gce_driver/cvd_test_gce_driver.cpp
@@ -22,9 +22,11 @@
 #include <iomanip>
 #include <iostream>
 #include <iterator>
+#include <memory>
 #include <optional>
 #include <string>
 #include <unordered_map>
+#include <utility>
 
 #include <android-base/logging.h>
 #include <android-base/result.h>
@@ -45,8 +47,7 @@
 #include "host/commands/test_gce_driver/scoped_instance.h"
 #include "host/libs/web/build_api.h"
 #include "host/libs/web/credential_source.h"
-#include "host/libs/web/curl_wrapper.h"
-#include "host/libs/web/install_zip.h"
+#include "host/libs/web/http_client/http_client.h"
 
 #include "test_gce_driver.pb.h"
 
@@ -89,7 +90,7 @@
       if (clean_eof) {
         return {};
       } else if (!parsed) {
-        return Error() << "Failed to parse input message";
+        return CF_ERR("Failed to parse input message");
       }
       Result<void> handler_result;
       switch (msg.contents_case()) {
@@ -97,7 +98,7 @@
           test_gce_driver::TestMessage stream_end_msg;
           stream_end_msg.mutable_exit();  // Set this in the oneof
           if (!SerializeDelimitedToFileDescriptor(stream_end_msg, out_)) {
-            return Error() << "Failure while writing stream end message";
+            return CF_ERR("Failure while writing stream end message");
           }
           return {};
         }
@@ -118,19 +119,19 @@
         default: {
           std::string msg_txt;
           if (google::protobuf::TextFormat::PrintToString(msg, &msg_txt)) {
-            handler_result = Error()
-                             << "Unexpected message: \"" << msg_txt << "\"";
+            handler_result =
+                CF_ERR("Unexpected message: \"" << msg_txt << "\"");
           } else {
-            handler_result = Error() << "Unexpected message: (unprintable)";
+            handler_result = CF_ERR("Unexpected message: (unprintable)");
           }
         }
       }
       if (!handler_result.ok()) {
         test_gce_driver::TestMessage error_msg;
-        error_msg.mutable_error()->set_text(handler_result.error().message());
+        error_msg.mutable_error()->set_text(handler_result.error().Trace());
         CF_EXPECT(SerializeDelimitedToFileDescriptor(error_msg, out_),
                   "Failure while writing error message: (\n"
-                      << handler_result.error() << "\n)");
+                      << handler_result.error().Trace() << "\n)");
       }
       test_gce_driver::TestMessage stream_end_msg;
       stream_end_msg.mutable_stream_end();  // Set this in the oneof
@@ -242,7 +243,8 @@
       if (data == nullptr) {
         auto ssh = callback_state.instance->Ssh();
         if (!ssh.ok()) {
-          callback_state.result = CF_ERR("ssh command failed\n" << ssh.error());
+          callback_state.result = CF_ERR("ssh command failed\n"
+                                         << ssh.error().Trace());
           return false;
         }
 
@@ -271,7 +273,7 @@
         "Failed to send file: (\n"
             << (callback_state.result.ok()
                     ? "Unknown failure"
-                    : callback_state.result.error().message() + "\n)"));
+                    : callback_state.result.error().Trace() + "\n)"));
 
     callback_state.ssh_in->Close();
 
@@ -381,7 +383,7 @@
 
   static constexpr char COMPUTE_SCOPE[] =
       "https://www.googleapis.com/auth/compute";
-  auto curl = CurlWrapper::Create();
+  auto curl = HttpClient::CurlClient();
   auto gce_creds = CF_EXPECT(ServiceAccountOauthCredentialSource::FromJson(
       *curl, service_json, COMPUTE_SCOPE));
 
@@ -389,10 +391,11 @@
 
   static constexpr char BUILD_SCOPE[] =
       "https://www.googleapis.com/auth/androidbuild.internal";
-  auto build_creds = CF_EXPECT(ServiceAccountOauthCredentialSource::FromJson(
-      *curl, service_json, BUILD_SCOPE));
+  auto build_creds = std::make_unique<ServiceAccountOauthCredentialSource>(
+      CF_EXPECT(ServiceAccountOauthCredentialSource::FromJson(
+          *curl, service_json, BUILD_SCOPE)));
 
-  BuildApi build(*curl, &build_creds);
+  BuildApi build(std::move(curl), std::move(build_creds));
 
   ReadEvalPrintLoop executor(gce, build, STDIN_FILENO, STDOUT_FILENO,
                              internal_addresses);
@@ -406,5 +409,9 @@
 
 int main(int argc, char** argv) {
   auto res = cuttlefish::TestGceDriverMain(argc, argv);
-  CHECK(res.ok()) << "cvd_test_gce_driver failed: " << res.error();
+  if (res.ok()) {
+    return 0;
+  }
+  LOG(ERROR) << "cvd_test_gce_driver failed: " << res.error().Message();
+  LOG(DEBUG) << "cvd_test_gce_driver failed: " << res.error().Trace();
 }
diff --git a/host/commands/test_gce_driver/gce_api.cpp b/host/commands/test_gce_driver/gce_api.cpp
index 4922c59..9f67a6f 100644
--- a/host/commands/test_gce_driver/gce_api.cpp
+++ b/host/commands/test_gce_driver/gce_api.cpp
@@ -24,10 +24,6 @@
 #include <android-base/strings.h>
 
 #include "host/libs/web/credential_source.h"
-#include "host/libs/web/curl_wrapper.h"
-
-using android::base::Error;
-using android::base::Result;
 
 namespace cuttlefish {
 
@@ -130,10 +126,11 @@
 GceNetworkInterface::GceNetworkInterface(const Json::Value& data)
     : data_(data) {}
 
+constexpr char kNetwork[] = "network";
 constexpr char kGceNetworkAccessConfigs[] = "accessConfigs";
 GceNetworkInterface GceNetworkInterface::Default() {
   Json::Value json{Json::ValueType::objectValue};
-  json["network"] = "global/networks/default";
+  json[kNetwork] = "global/networks/default";
   Json::Value accessConfig{Json::ValueType::objectValue};
   accessConfig["type"] = "ONE_TO_ONE_NAT";
   accessConfig["name"] = "External NAT";
@@ -141,6 +138,35 @@
   return GceNetworkInterface(json);
 }
 
+std::optional<std::string> GceNetworkInterface::Network() const {
+  return OptStringMember(data_, kNetwork);
+}
+GceNetworkInterface& GceNetworkInterface::Network(
+    const std::string& network) & {
+  data_[kNetwork] = network;
+  return *this;
+}
+GceNetworkInterface GceNetworkInterface::Network(
+    const std::string& network) && {
+  data_[kNetwork] = network;
+  return *this;
+}
+
+constexpr char kSubnetwork[] = "subnetwork";
+std::optional<std::string> GceNetworkInterface::Subnetwork() const {
+  return OptStringMember(data_, kSubnetwork);
+}
+GceNetworkInterface& GceNetworkInterface::Subnetwork(
+    const std::string& subnetwork) & {
+  data_[kSubnetwork] = subnetwork;
+  return *this;
+}
+GceNetworkInterface GceNetworkInterface::Subnetwork(
+    const std::string& subnetwork) && {
+  data_[kSubnetwork] = subnetwork;
+  return *this;
+}
+
 constexpr char kGceNetworkExternalIp[] = "natIP";
 std::optional<std::string> GceNetworkInterface::ExternalIp() const {
   auto accessConfigs = OptArrayMember(data_, kGceNetworkAccessConfigs);
@@ -282,15 +308,15 @@
 
 const Json::Value& GceInstanceInfo::AsJson() const { return data_; }
 
-GceApi::GceApi(CurlWrapper& curl, CredentialSource& credentials,
+GceApi::GceApi(HttpClient& http_client, CredentialSource& credentials,
                const std::string& project)
-    : curl_(curl), credentials_(credentials), project_(project) {}
+    : http_client_(http_client), credentials_(credentials), project_(project) {}
 
-std::vector<std::string> GceApi::Headers() {
-  return {
-      "Authorization:Bearer " + credentials_.Credential(),
+Result<std::vector<std::string>> GceApi::Headers() {
+  return {{
+      "Authorization:Bearer " + CF_EXPECT(credentials_.Credential()),
       "Content-Type: application/json",
-  };
+  }};
 }
 
 class GceApi::Operation::Impl {
@@ -301,22 +327,20 @@
   }
 
   Result<bool> Run() {
-    auto initial_response = initial_request_();
-    if (!initial_response.ok()) {
-      return Error() << "Initial request failed: " << initial_response.error();
-    }
+    auto initial_response =
+        CF_EXPECT(initial_request_(), "Initial request failed: ");
 
-    auto url = OptStringMember(*initial_response, "selfLink");
+    auto url = OptStringMember(initial_response, "selfLink");
     if (!url) {
-      return Error() << "Operation " << *initial_response
-                     << " was missing `selfLink` field.";
+      return CF_ERR("Operation " << initial_response
+                                 << " was missing `selfLink` field.");
     }
     url = *url + "/wait";
     running_ = true;
 
     while (running_) {
-      auto response =
-          gce_api_.curl_.PostToJson(*url, std::string{""}, gce_api_.Headers());
+      auto response = CF_EXPECT(gce_api_.http_client_.PostToJson(
+          *url, std::string{""}, CF_EXPECT(gce_api_.Headers())));
       const auto& json = response.data;
       Json::Value errors;
       if (auto j_error = OptObjMember(json, "error"); j_error) {
@@ -331,13 +355,12 @@
       LOG(DEBUG) << "Requested operation status at \"" << *url
                  << "\", received " << json;
       if (!response.HttpSuccess() || errors != Json::Value()) {
-        return Error() << "Error accessing \"" << *url
-                       << "\". Errors: " << errors
-                       << ", Warnings: " << warnings;
+        return CF_ERR("Error accessing \"" << *url << "\". Errors: " << errors
+                                           << ", Warnings: " << warnings);
       }
       if (!json.isMember("status") ||
           json["status"].type() != Json::ValueType::stringValue) {
-        return Error() << json << " \"status\" field invalid";
+        return CF_ERR(json << " \"status\" field invalid");
       }
       if (json["status"] == "DONE") {
         return true;
@@ -388,14 +411,14 @@
   auto name = instance.Name();
   if (!name) {
     auto task = [json = instance.AsJson()]() -> Result<GceInstanceInfo> {
-      return Error() << "Missing a name for \"" << json << "\"";
+      return CF_ERR("Missing a name for \"" << json << "\"");
     };
     return std::async(std::launch::deferred, task);
   }
   auto zone = instance.Zone();
   if (!zone) {
     auto task = [json = instance.AsJson()]() -> Result<GceInstanceInfo> {
-      return Error() << "Missing a zone for \"" << json << "\"";
+      return CF_ERR("Missing a zone for \"" << json << "\"");
     };
     return std::async(std::launch::deferred, task);
   }
@@ -406,14 +429,15 @@
                                                  const std::string& name) {
   std::stringstream url;
   url << "https://compute.googleapis.com/compute/v1";
-  url << "/projects/" << curl_.UrlEscape(project_);
-  url << "/zones/" << curl_.UrlEscape(SanitizeZone(zone));
-  url << "/instances/" << curl_.UrlEscape(name);
+  url << "/projects/" << http_client_.UrlEscape(project_);
+  url << "/zones/" << http_client_.UrlEscape(SanitizeZone(zone));
+  url << "/instances/" << http_client_.UrlEscape(name);
   auto task = [this, url = url.str()]() -> Result<GceInstanceInfo> {
-    auto response = curl_.DownloadToJson(url, Headers());
+    auto response =
+        CF_EXPECT(http_client_.DownloadToJson(url, CF_EXPECT(Headers())));
     if (!response.HttpSuccess()) {
-      return Error() << "Failed to get instance info, received "
-                     << response.data << " with code " << response.http_code;
+      return CF_ERR("Failed to get instance info, received "
+                    << response.data << " with code " << response.http_code);
     }
     return GceInstanceInfo(response.data);
   };
@@ -424,7 +448,7 @@
   if (!request.isMember("zone") ||
       request["zone"].type() != Json::ValueType::stringValue) {
     auto task = [request]() -> Result<Json::Value> {
-      return Error() << "Missing a zone for \"" << request << "\"";
+      return CF_ERR("Missing a zone for \"" << request << "\"");
     };
     return Operation(
         std::unique_ptr<Operation::Impl>(new Operation::Impl(*this, task)));
@@ -434,15 +458,16 @@
   requestNoZone.removeMember("zone");
   std::stringstream url;
   url << "https://compute.googleapis.com/compute/v1";
-  url << "/projects/" << curl_.UrlEscape(project_);
-  url << "/zones/" << curl_.UrlEscape(SanitizeZone(zone));
+  url << "/projects/" << http_client_.UrlEscape(project_);
+  url << "/zones/" << http_client_.UrlEscape(SanitizeZone(zone));
   url << "/instances";
   url << "?requestId=" << RandomUuid();  // Avoid duplication on request retry
   auto task = [this, requestNoZone, url = url.str()]() -> Result<Json::Value> {
-    auto response = curl_.PostToJson(url, requestNoZone, Headers());
+    auto response = CF_EXPECT(
+        http_client_.PostToJson(url, requestNoZone, CF_EXPECT(Headers())));
     if (!response.HttpSuccess()) {
-      return Error() << "Failed to create instance: " << response.data
-                     << ". Sent request " << requestNoZone;
+      return CF_ERR("Failed to create instance: "
+                    << response.data << ". Sent request " << requestNoZone);
     }
     return response.data;
   };
@@ -458,15 +483,16 @@
                                 const std::string& name) {
   std::stringstream url;
   url << "https://compute.googleapis.com/compute/v1";
-  url << "/projects/" << curl_.UrlEscape(project_);
-  url << "/zones/" << curl_.UrlEscape(SanitizeZone(zone));
-  url << "/instances/" << curl_.UrlEscape(name);
+  url << "/projects/" << http_client_.UrlEscape(project_);
+  url << "/zones/" << http_client_.UrlEscape(SanitizeZone(zone));
+  url << "/instances/" << http_client_.UrlEscape(name);
   url << "/reset";
   url << "?requestId=" << RandomUuid();  // Avoid duplication on request retry
   auto task = [this, url = url.str()]() -> Result<Json::Value> {
-    auto response = curl_.PostToJson(url, Json::Value(), Headers());
+    auto response = CF_EXPECT(
+        http_client_.PostToJson(url, Json::Value(), CF_EXPECT(Headers())));
     if (!response.HttpSuccess()) {
-      return Error() << "Failed to create instance: " << response.data;
+      return CF_ERR("Failed to create instance: " << response.data);
     }
     return response.data;
   };
@@ -478,7 +504,7 @@
   auto name = instance.Name();
   if (!name) {
     auto task = [json = instance.AsJson()]() -> Result<Json::Value> {
-      return Error() << "Missing a name for \"" << json << "\"";
+      return CF_ERR("Missing a name for \"" << json << "\"");
     };
     return Operation(
         std::unique_ptr<Operation::Impl>(new Operation::Impl(*this, task)));
@@ -486,7 +512,7 @@
   auto zone = instance.Zone();
   if (!zone) {
     auto task = [json = instance.AsJson()]() -> Result<Json::Value> {
-      return Error() << "Missing a zone for \"" << json << "\"";
+      return CF_ERR("Missing a zone for \"" << json << "\"");
     };
     return Operation(
         std::unique_ptr<Operation::Impl>(new Operation::Impl(*this, task)));
@@ -498,14 +524,15 @@
                                  const std::string& name) {
   std::stringstream url;
   url << "https://compute.googleapis.com/compute/v1";
-  url << "/projects/" << curl_.UrlEscape(project_);
-  url << "/zones/" << curl_.UrlEscape(SanitizeZone(zone));
-  url << "/instances/" << curl_.UrlEscape(name);
+  url << "/projects/" << http_client_.UrlEscape(project_);
+  url << "/zones/" << http_client_.UrlEscape(SanitizeZone(zone));
+  url << "/instances/" << http_client_.UrlEscape(name);
   url << "?requestId=" << RandomUuid();  // Avoid duplication on request retry
   auto task = [this, url = url.str()]() -> Result<Json::Value> {
-    auto response = curl_.DeleteToJson(url, Headers());
+    auto response =
+        CF_EXPECT(http_client_.DeleteToJson(url, CF_EXPECT(Headers())));
     if (!response.HttpSuccess()) {
-      return Error() << "Failed to delete instance: " << response.data;
+      return CF_ERR("Failed to delete instance: " << response.data);
     }
     return response.data;
   };
@@ -517,7 +544,7 @@
   auto name = instance.Name();
   if (!name) {
     auto task = [json = instance.AsJson()]() -> Result<Json::Value> {
-      return Error() << "Missing a name for \"" << json << "\"";
+      return CF_ERR("Missing a name for \"" << json << "\"");
     };
     return Operation(
         std::unique_ptr<Operation::Impl>(new Operation::Impl(*this, task)));
@@ -525,7 +552,7 @@
   auto zone = instance.Zone();
   if (!zone) {
     auto task = [json = instance.AsJson()]() -> Result<Json::Value> {
-      return Error() << "Missing a zone for \"" << json << "\"";
+      return CF_ERR("Missing a zone for \"" << json << "\"");
     };
     return Operation(
         std::unique_ptr<Operation::Impl>(new Operation::Impl(*this, task)));
diff --git a/host/commands/test_gce_driver/gce_api.h b/host/commands/test_gce_driver/gce_api.h
index e4d605f..6aead4c 100644
--- a/host/commands/test_gce_driver/gce_api.h
+++ b/host/commands/test_gce_driver/gce_api.h
@@ -19,11 +19,11 @@
 #include <optional>
 #include <string>
 
-#include <android-base/result.h>
 #include <json/json.h>
 
+#include "common/libs/utils/result.h"
 #include "host/libs/web/credential_source.h"
-#include "host/libs/web/curl_wrapper.h"
+#include "host/libs/web/http_client/http_client.h"
 
 namespace cuttlefish {
 
@@ -58,6 +58,14 @@
 
   static GceNetworkInterface Default();
 
+  std::optional<std::string> Network() const;
+  GceNetworkInterface& Network(const std::string&) &;
+  GceNetworkInterface Network(const std::string&) &&;
+
+  std::optional<std::string> Subnetwork() const;
+  GceNetworkInterface& Subnetwork(const std::string&) &;
+  GceNetworkInterface Subnetwork(const std::string&) &&;
+
   std::optional<std::string> ExternalIp() const;
   std::optional<std::string> InternalIp() const;
 
@@ -110,7 +118,7 @@
     ~Operation();
     void StopWaiting();
     /// `true` means it waited to completion, `false` means it was cancelled
-    std::future<android::base::Result<bool>>& Future();
+    std::future<Result<bool>>& Future();
 
    private:
     class Impl;
@@ -119,13 +127,12 @@
     friend class GceApi;
   };
 
-  GceApi(CurlWrapper&, CredentialSource& credentials,
+  GceApi(HttpClient&, CredentialSource& credentials,
          const std::string& project);
 
-  std::future<android::base::Result<GceInstanceInfo>> Get(
-      const GceInstanceInfo&);
-  std::future<android::base::Result<GceInstanceInfo>> Get(
-      const std::string& zone, const std::string& name);
+  std::future<Result<GceInstanceInfo>> Get(const GceInstanceInfo&);
+  std::future<Result<GceInstanceInfo>> Get(const std::string& zone,
+                                           const std::string& name);
 
   Operation Insert(const Json::Value&);
   Operation Insert(const GceInstanceInfo&);
@@ -137,9 +144,9 @@
   Operation Delete(const GceInstanceInfo&);
 
  private:
-  std::vector<std::string> Headers();
+  Result<std::vector<std::string>> Headers();
 
-  CurlWrapper& curl_;
+  HttpClient& http_client_;
   CredentialSource& credentials_;
   std::string project_;
 };
diff --git a/host/commands/test_gce_driver/key_pair.cpp b/host/commands/test_gce_driver/key_pair.cpp
index 5435f50..38a5884 100644
--- a/host/commands/test_gce_driver/key_pair.cpp
+++ b/host/commands/test_gce_driver/key_pair.cpp
@@ -24,13 +24,10 @@
 #include <string>
 
 #include <android-base/logging.h>
-#include <android-base/result.h>
 
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 
-using android::base::Error;
-using android::base::Result;
-
 namespace cuttlefish {
 
 static int SslRecordErrCallback(const char* str, size_t len, void* data) {
@@ -55,21 +52,21 @@
     std::string error;
     if (!ctx) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "EVP_PKEY_CTX_new_id failed: " << error;
+      return CF_ERR("EVP_PKEY_CTX_new_id failed: " << error);
     }
     if (EVP_PKEY_keygen_init(ctx.get()) <= 0) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "EVP_PKEY_keygen_init failed: " << error;
+      return CF_ERR("EVP_PKEY_keygen_init failed: " << error);
     }
     if (EVP_PKEY_CTX_set_rsa_keygen_bits(ctx.get(), bytes) <= 0) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "EVP_PKEY_CTX_set_rsa_keygen_bits failed: " << error;
+      return CF_ERR("EVP_PKEY_CTX_set_rsa_keygen_bits failed: " << error);
     }
 
     EVP_PKEY* pkey = nullptr;
     if (EVP_PKEY_keygen(ctx.get(), &pkey) <= 0) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "EVP_PKEY_keygen failed: " << error;
+      return CF_ERR("EVP_PKEY_keygen failed: " << error);
     }
     return std::unique_ptr<KeyPair>{new BoringSslKeyPair(pkey)};
   }
@@ -79,18 +76,18 @@
     std::string error;
     if (!bo) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "BIO_new failed: " << error;
+      return CF_ERR("BIO_new failed: " << error);
     }
     if (!PEM_write_bio_PrivateKey(bo.get(), pkey_.get(), NULL, NULL, 0, 0,
                                   NULL)) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "PEM_write_bio_PrivateKey failed: " << error;
+      return CF_ERR("PEM_write_bio_PrivateKey failed: " << error);
     }
     std::string priv(BIO_pending(bo.get()), ' ');
     auto written = BIO_read(bo.get(), priv.data(), priv.size());
     if (written != priv.size()) {
-      return Error() << "Unexpected amount of data written: " << written
-                     << " != " << priv.size();
+      return CF_ERR("Unexpected amount of data written: " << written << " != "
+                                                          << priv.size());
     }
     return priv;
   }
@@ -100,18 +97,18 @@
     std::string error;
     if (!bo) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "BIO_new failed: " << error;
+      return CF_ERR("BIO_new failed: " << error);
     }
     if (!PEM_write_bio_PUBKEY(bo.get(), pkey_.get())) {
       ERR_print_errors_cb(SslRecordErrCallback, &error);
-      return Error() << "PEM_write_bio_PUBKEY failed: " << error;
+      return CF_ERR("PEM_write_bio_PUBKEY failed: " << error);
     }
 
     std::string priv(BIO_pending(bo.get()), ' ');
     auto written = BIO_read(bo.get(), priv.data(), priv.size());
     if (written != priv.size()) {
-      return Error() << "Unexpected amount of data written: " << written
-                     << " != " << priv.size();
+      return CF_ERR("Unexpected amount of data written: " << written << " != "
+                                                          << priv.size());
     }
     return priv;
   }
@@ -122,14 +119,11 @@
    * to convert the BoringSSL-generated RSA key without touching the filesystem.
    */
   Result<std::string> OpenSshPublicKey() const override {
-    auto pem_pubkey = PemPublicKey();
-    if (!pem_pubkey.ok()) {
-      return Error() << "Failed to get pem public key: " << pem_pubkey.error();
-    }
-    auto fd = SharedFD::MemfdCreateWithData("", *pem_pubkey);
-    if (!fd->IsOpen()) {
-      return Error() << "Could not create pubkey memfd: " << fd->StrError();
-    }
+    auto pem_pubkey =
+        CF_EXPECT(PemPublicKey(), "Failed to get pem public key: ");
+    auto fd = SharedFD::MemfdCreateWithData("", pem_pubkey);
+    CF_EXPECT(fd->IsOpen(),
+              "Could not create pubkey memfd: " << fd->StrError());
     Command cmd("/usr/bin/ssh-keygen");
     cmd.AddParameter("-i");
     cmd.AddParameter("-f");
@@ -139,11 +133,9 @@
     cmd.AddParameter("PKCS8");
     std::string out;
     std::string err;
-    auto ret = RunWithManagedStdio(std::move(cmd), nullptr, &out, &err);
-    if (ret != 0) {
-      return Error() << "Could not convert pem key to openssh key. "
-                     << "stdout=\"" << out << "\", stderr=\"" << err << "\"";
-    }
+    CF_EXPECT(RunWithManagedStdio(std::move(cmd), nullptr, &out, &err) == 0,
+              "Could not convert pem key to openssh key. "
+                  << "stdout=\"" << out << "\", stderr=\"" << err << "\"");
     return out;
   }
 
diff --git a/host/commands/test_gce_driver/key_pair.h b/host/commands/test_gce_driver/key_pair.h
index 570a7e6..ecc073f 100644
--- a/host/commands/test_gce_driver/key_pair.h
+++ b/host/commands/test_gce_driver/key_pair.h
@@ -18,19 +18,18 @@
 #include <memory>
 #include <string>
 
-#include <android-base/result.h>
+#include "common/libs/utils/result.h"
 
 namespace cuttlefish {
 
 struct KeyPair {
  public:
-  static android::base::Result<std::unique_ptr<KeyPair>> CreateRsa(
-      size_t bytes);
+  static Result<std::unique_ptr<KeyPair>> CreateRsa(size_t bytes);
   virtual ~KeyPair() = default;
 
-  virtual android::base::Result<std::string> PemPrivateKey() const = 0;
-  virtual android::base::Result<std::string> PemPublicKey() const = 0;
-  virtual android::base::Result<std::string> OpenSshPublicKey() const = 0;
+  virtual Result<std::string> PemPrivateKey() const = 0;
+  virtual Result<std::string> PemPublicKey() const = 0;
+  virtual Result<std::string> OpenSshPublicKey() const = 0;
 };
 
 };  // namespace cuttlefish
diff --git a/host/commands/test_gce_driver/scoped_instance.cpp b/host/commands/test_gce_driver/scoped_instance.cpp
index 3c13d91..f570974 100644
--- a/host/commands/test_gce_driver/scoped_instance.cpp
+++ b/host/commands/test_gce_driver/scoped_instance.cpp
@@ -22,12 +22,11 @@
 #include <string>
 
 #include <android-base/file.h>
-#include <android-base/result.h>
 
 #include "common/libs/fs/shared_buf.h"
-
-using android::base::Error;
-using android::base::Result;
+#include "common/libs/utils/result.h"
+#include "host/commands/test_gce_driver/gce_api.h"
+#include "host/commands/test_gce_driver/key_pair.h"
 
 namespace cuttlefish {
 
@@ -113,14 +112,20 @@
 Result<std::unique_ptr<ScopedGceInstance>> ScopedGceInstance::CreateDefault(
     GceApi& gce, const std::string& zone, const std::string& instance_name,
     bool internal) {
-  auto ssh_key = KeyPair::CreateRsa(4096);
-  if (!ssh_key.ok()) {
-    return Error() << "Could not create ssh key pair: " << ssh_key.error();
-  }
+  auto ssh_key =
+      CF_EXPECT(KeyPair::CreateRsa(4096), "Could not create ssh key pair");
+  auto ssh_pubkey =
+      CF_EXPECT(ssh_key->OpenSshPublicKey(), "Could get openssh format key: ");
 
-  auto ssh_pubkey = (*ssh_key)->OpenSshPublicKey();
-  if (!ssh_pubkey.ok()) {
-    return Error() << "Could get openssh format key: " << ssh_pubkey.error();
+  // TODO(schuffelen): Pass this through more layers to make it more general.
+  auto network_interface = GceNetworkInterface::Default();
+  if (internal) {
+    network_interface.Network(
+        "https://www.googleapis.com/compute/v1/projects/android-treehugger/"
+        "global/networks/cloud-tf-vpc");
+    network_interface.Subnetwork(
+        "https://www.googleapis.com/compute/v1/projects/android-treehugger/"
+        "regions/us-west1/subnetworks/cloud-tf-vpc");
   }
 
   auto default_instance_info =
@@ -128,8 +133,8 @@
           .Name(instance_name)
           .Zone(zone)
           .MachineType("zones/us-west1-a/machineTypes/n1-standard-4")
-          .AddMetadata("ssh-keys", "vsoc-01:" + *ssh_pubkey)
-          .AddNetworkInterface(GceNetworkInterface::Default())
+          .AddMetadata("ssh-keys", "vsoc-01:" + ssh_pubkey)
+          .AddNetworkInterface(std::move(network_interface))
           .AddDisk(
               GceInstanceDisk::EphemeralBootDisk()
                   .SourceImage(
@@ -140,12 +145,10 @@
           .AddScope("https://www.googleapis.com/auth/devstorage.read_only")
           .AddScope("https://www.googleapis.com/auth/logging.write");
 
-  auto creation = gce.Insert(default_instance_info).Future().get();
-  if (!creation.ok()) {
-    return Error() << "Failed to create instance: " << creation.error();
-  }
+  CF_EXPECT(gce.Insert(default_instance_info).Future().get(),
+            "Failed to create instance");
 
-  auto privkey = CF_EXPECT((*ssh_key)->PemPrivateKey());
+  auto privkey = CF_EXPECT(ssh_key->PemPrivateKey());
   std::unique_ptr<TemporaryFile> privkey_file(CF_EXPECT(new TemporaryFile()));
   auto fd_dup = SharedFD::Dup(privkey_file->fd);
   CF_EXPECT(fd_dup->IsOpen());
@@ -155,16 +158,10 @@
   std::unique_ptr<ScopedGceInstance> instance(new ScopedGceInstance(
       gce, default_instance_info, std::move(privkey_file), internal));
 
-  auto created_info = gce.Get(default_instance_info).get();
-  if (!created_info.ok()) {
-    return Error() << "Failed to get instance info: " << created_info.error();
-  }
-  instance->instance_ = *created_info;
+  auto created_info = CF_EXPECT(gce.Get(default_instance_info).get(),
+                                "Failed to get instance info: ");
 
-  auto ssh_ready = instance->EnforceSshReady();
-  if (!ssh_ready.ok()) {
-    return Error() << "Failed to access SSH on instance: " << ssh_ready.error();
-  }
+  CF_EXPECT(instance->EnforceSshReady(), "Failed to access SSH on instance");
   return instance;
 }
 
@@ -172,14 +169,11 @@
   std::string out;
   std::string err;
   for (int i = 0; i < 100; i++) {
-    auto ssh = Ssh();
-    if (!ssh.ok()) {
-      return Error() << "Failed to create ssh command: " << ssh.error();
-    }
+    auto ssh = CF_EXPECT(Ssh(), "Failed to create ssh command");
 
-    ssh->RemoteParameter("ls");
-    ssh->RemoteParameter("/");
-    auto command = ssh->Build();
+    ssh.RemoteParameter("ls");
+    ssh.RemoteParameter("/");
+    auto command = ssh.Build();
 
     out = "";
     err = "";
@@ -189,8 +183,8 @@
     }
   }
 
-  return Error() << "Failed to ssh to the instance. stdout=\"" << out
-                 << "\", stderr = \"" << err << "\"";
+  return CF_ERR("Failed to ssh to the instance. stdout=\""
+                << out << "\", stderr = \"" << err << "\"");
 }
 
 ScopedGceInstance::ScopedGceInstance(GceApi& gce,
@@ -205,7 +199,8 @@
 ScopedGceInstance::~ScopedGceInstance() {
   auto delete_ins = gce_.Delete(instance_).Future().get();
   if (!delete_ins.ok()) {
-    LOG(ERROR) << "Failed to delete instance: " << delete_ins.error();
+    LOG(ERROR) << "Failed to delete instance: " << delete_ins.error().Message();
+    LOG(DEBUG) << "Failed to delete instance: " << delete_ins.error().Trace();
   }
 }
 
diff --git a/host/commands/test_gce_driver/scoped_instance.h b/host/commands/test_gce_driver/scoped_instance.h
index f4ee87f..411175a 100644
--- a/host/commands/test_gce_driver/scoped_instance.h
+++ b/host/commands/test_gce_driver/scoped_instance.h
@@ -21,11 +21,10 @@
 #include <vector>
 
 #include <android-base/file.h>
-#include <android-base/result.h>
 
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/commands/test_gce_driver/gce_api.h"
-#include "host/commands/test_gce_driver/key_pair.h"
 
 namespace cuttlefish {
 
@@ -70,20 +69,20 @@
 
 class ScopedGceInstance {
  public:
-  static android::base::Result<std::unique_ptr<ScopedGceInstance>>
-  CreateDefault(GceApi& gce, const std::string& zone,
-                const std::string& instance_name, bool internal_addresses);
+  static Result<std::unique_ptr<ScopedGceInstance>> CreateDefault(
+      GceApi& gce, const std::string& zone, const std::string& instance_name,
+      bool internal_addresses);
   ~ScopedGceInstance();
 
-  android::base::Result<SshCommand> Ssh();
-  android::base::Result<void> Reset();
+  Result<SshCommand> Ssh();
+  Result<void> Reset();
 
  private:
   ScopedGceInstance(GceApi& gce, const GceInstanceInfo& instance,
                     std::unique_ptr<TemporaryFile> privkey,
                     bool internal_addresses);
 
-  android::base::Result<void> EnforceSshReady();
+  Result<void> EnforceSshReady();
 
   GceApi& gce_;
   GceInstanceInfo instance_;
diff --git a/host/commands/wmediumd_control/Android.bp b/host/commands/wmediumd_control/Android.bp
index 0e58ec3..39dbfac 100644
--- a/host/commands/wmediumd_control/Android.bp
+++ b/host/commands/wmediumd_control/Android.bp
@@ -17,27 +17,27 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_binary {
+cc_binary_host {
     name: "wmediumd_control",
     srcs: [
         "main.cpp",
     ],
-    shared_libs: [
-        "libext2_blkid",
+    static_libs: [
+        "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
-        "libbase",
-        "libfruit",
-        "libjsoncpp",
-        "libz",
-    ],
-    static_libs: [
         "libcuttlefish_host_config",
         "libcuttlefish_wmediumd_controller",
+        "libext2_blkid",
+        "libfruit",
         "libgflags",
+        "liblog",
+        "libjsoncpp",
+        "libz",
     ],
     header_libs: [
         "wmediumd_headers",
     ],
+    stl: "libc++_static",
     defaults: ["cuttlefish_host"],
 }
diff --git a/host/commands/wmediumd_control/main.cpp b/host/commands/wmediumd_control/main.cpp
index 63763b5..f0516df 100644
--- a/host/commands/wmediumd_control/main.cpp
+++ b/host/commands/wmediumd_control/main.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <android-base/logging.h>
+#include <android-base/parsedouble.h>
 #include <android-base/parseint.h>
 #include <gflags/gflags.h>
 
@@ -27,6 +28,7 @@
 #include <vector>
 
 #include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/wmediumd_controller/wmediumd_api_protocol.h"
 #include "host/libs/wmediumd_controller/wmediumd_controller.h"
 
 const std::string usageMessage =
@@ -44,52 +46,25 @@
     "    stop_pcap\n"
     "      stop packet capture\n\n"
     "    list_stations\n"
-    "      listing stations connected to wmediumd\n\n";
+    "      listing stations connected to wmediumd\n\n"
+    "    set_position mac xpos ypos\n"
+    "      set X, Y positions of specific station\n"
+    "      use -- before set_position if you want to set the position with "
+    "negative values\n"
+    "        e.g. wmediumd_control -- set_position 42:00:00:00:00:00 -1.0 "
+    "-2.0\n\n"
+    "    set_lci mac lci\n"
+    "      set LCI (latitude, longitude, altitude) of the specific station\n"
+    "      it's free-form string and may not match with other location nor"
+    "position information\n\n"
+    "    set_civicloc mac civicloc\n"
+    "      set CIVIC location (e.g. postal address) of the specific station\n"
+    "      it's free-form string and may not match with other location nor"
+    "position information\n";
 
 DEFINE_string(wmediumd_api_server, "",
               "Unix socket path of wmediumd api server");
 
-const int kMacAddrStringSize = 17;
-
-bool ValidMacAddr(const std::string& macAddr) {
-  if (macAddr.size() != kMacAddrStringSize) {
-    return false;
-  }
-
-  if (macAddr[2] != ':' || macAddr[5] != ':' || macAddr[8] != ':' ||
-      macAddr[11] != ':' || macAddr[14] != ':') {
-    return false;
-  }
-
-  for (int i = 0; i < kMacAddrStringSize; ++i) {
-    if ((i - 2) % 3 == 0) continue;
-    char c = macAddr[i];
-
-    if (isupper(c)) {
-      c = tolower(c);
-    }
-
-    if ((c < '0' || c > '9') && (c < 'a' || c > 'f')) return false;
-  }
-
-  return true;
-}
-
-std::string MacToString(const char* macAddr) {
-  std::stringstream result;
-
-  for (int i = 0; i < ETH_ALEN; i++) {
-    result << std::setfill('0') << std::setw(2) << std::right << std::hex
-           << static_cast<int>(static_cast<uint8_t>(macAddr[i]));
-
-    if (i != 5) {
-      result << ":";
-    }
-  }
-
-  return result.str();
-}
-
 bool HandleSetSnrCommand(cuttlefish::WmediumdController& client,
                          const std::vector<std::string>& args) {
   if (args.size() != 4) {
@@ -97,12 +72,12 @@
     return false;
   }
 
-  if (!ValidMacAddr(args[1])) {
+  if (!cuttlefish::ValidMacAddr(args[1])) {
     LOG(ERROR) << "error: invalid mac address " << args[1];
     return false;
   }
 
-  if (!ValidMacAddr(args[2])) {
+  if (!cuttlefish::ValidMacAddr(args[2])) {
     LOG(ERROR) << "error: invalid mac address " << args[2];
     return false;
   }
@@ -187,13 +162,18 @@
             << "\t"
             << "Y Pos"
             << "\t"
+            << "LCI"
+            << "\t"
+            << "CIVICLOC"
+            << "\t"
             << "TX Power" << std::endl;
 
   for (auto& station : stationList) {
-    std::cout << MacToString(station.addr) << "\t" << std::setprecision(1)
-              << std::fixed << station.x << "\t" << std::setprecision(1)
-              << std::fixed << station.y << "\t" << station.tx_power
-              << std::endl;
+    std::cout << cuttlefish::MacToString(station.addr) << "\t"
+              << std::setprecision(1) << std::fixed << station.x << "\t"
+              << std::setprecision(1) << std::fixed << station.y << "\t\""
+              << station.lci << "\"\t\"" << station.civicloc << "\"\t"
+              << station.tx_power << std::endl;
   }
 
   std::cout << std::endl;
@@ -201,6 +181,79 @@
   return true;
 }
 
+bool HandleSetPositionCommand(cuttlefish::WmediumdController& client,
+                              const std::vector<std::string>& args) {
+  if (args.size() != 4) {
+    LOG(ERROR) << "error: set_position must provide 3 options";
+    return false;
+  }
+
+  if (!cuttlefish::ValidMacAddr(args[1])) {
+    LOG(ERROR) << "error: invalid mac address " << args[1];
+    return false;
+  }
+
+  double x = 0;
+  double y = 0;
+
+  auto parseResultX = android::base::ParseDouble(args[2].c_str(), &x);
+  auto parseResultY = android::base::ParseDouble(args[3].c_str(), &y);
+
+  if (!parseResultX) {
+    LOG(ERROR) << "error: cannot parse X: " << args[2];
+    return false;
+  }
+
+  if (!parseResultY) {
+    LOG(ERROR) << "error: cannot parse Y: " << args[3];
+    return false;
+  }
+
+  if (!client.SetPosition(args[1], x, y)) {
+    return false;
+  }
+
+  return true;
+}
+
+bool HandleSetLciCommand(cuttlefish::WmediumdController& client,
+                         const std::vector<std::string>& args) {
+  if (args.size() != 3) {
+    LOG(ERROR) << "error: set_lci must provide 2 options";
+    return false;
+  }
+
+  if (!cuttlefish::ValidMacAddr(args[1])) {
+    LOG(ERROR) << "error: invalid mac address " << args[1];
+    return false;
+  }
+
+  if (!client.SetLci(args[1], args[2])) {
+    return false;
+  }
+
+  return true;
+}
+
+bool HandleSetCiviclocCommand(cuttlefish::WmediumdController& client,
+                              const std::vector<std::string>& args) {
+  if (args.size() != 3) {
+    LOG(ERROR) << "error: set_civicloc must provide 2 options";
+    return false;
+  }
+
+  if (!cuttlefish::ValidMacAddr(args[1])) {
+    LOG(ERROR) << "error: invalid mac address " << args[1];
+    return false;
+  }
+
+  if (!client.SetCivicloc(args[1], args[2])) {
+    return false;
+  }
+
+  return true;
+}
+
 int main(int argc, char** argv) {
   gflags::SetUsageMessage(usageMessage);
   gflags::ParseCommandLineFlags(&argc, &argv, true);
@@ -240,13 +293,15 @@
   auto commandMap =
       std::unordered_map<std::string,
                          std::function<bool(cuttlefish::WmediumdController&,
-                                            const std::vector<std::string>&)>>{{
-          {"set_snr", HandleSetSnrCommand},
-          {"reload_config", HandleReloadConfigCommand},
-          {"start_pcap", HandleStartPcapCommand},
-          {"stop_pcap", HandleStopPcapCommand},
-          {"list_stations", HandleListStationsCommand},
-      }};
+                                            const std::vector<std::string>&)>>{
+          {{"set_snr", HandleSetSnrCommand},
+           {"reload_config", HandleReloadConfigCommand},
+           {"start_pcap", HandleStartPcapCommand},
+           {"stop_pcap", HandleStopPcapCommand},
+           {"list_stations", HandleListStationsCommand},
+           {"set_position", HandleSetPositionCommand},
+           {"set_lci", HandleSetLciCommand},
+           {"set_civicloc", HandleSetCiviclocCommand}}};
 
   if (commandMap.find(args[0]) == std::end(commandMap)) {
     LOG(ERROR) << "error: command " << args[0] << " does not exist";
diff --git a/host/example_custom_actions/Android.bp b/host/example_custom_actions/Android.bp
index a213099..a671e26 100644
--- a/host/example_custom_actions/Android.bp
+++ b/host/example_custom_actions/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/host/example_custom_actions/main.cpp b/host/example_custom_actions/main.cpp
index c6d3b3c..cac73ea 100644
--- a/host/example_custom_actions/main.cpp
+++ b/host/example_custom_actions/main.cpp
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <sys/socket.h>
diff --git a/host/frontend/adb_connector/README.md b/host/frontend/adb_connector/README.md
new file mode 100644
index 0000000..b907a77
--- /dev/null
+++ b/host/frontend/adb_connector/README.md
@@ -0,0 +1,6 @@
+Manages state in the adb server pertaining to a Cuttlefish device. The ADB
+server does not automatically discover Cuttlefish devices, so this process
+sends register and deregister messages to the ADB server to inform it of
+the device state.
+
+[![Linkage diagram](./doc/linkage.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/frontend/adb_connector/doc/linkage.svg)
diff --git a/host/frontend/adb_connector/adb_connection_maintainer.cpp b/host/frontend/adb_connector/adb_connection_maintainer.cpp
index 5f9ff1d..736b0d8 100644
--- a/host/frontend/adb_connector/adb_connection_maintainer.cpp
+++ b/host/frontend/adb_connector/adb_connection_maintainer.cpp
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include "host/frontend/adb_connector/adb_connection_maintainer.h"
 
 #include <cctype>
 #include <iomanip>
@@ -24,9 +25,10 @@
 
 #include <unistd.h>
 
+#include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
-#include "host/frontend/adb_connector/adb_connection_maintainer.h"
 
+namespace cuttlefish {
 namespace {
 
 std::string MakeMessage(const std::string& user_message) {
@@ -52,37 +54,6 @@
   return MakeMessage("host:disconnect:" + address);
 }
 
-// returns true if successfully sent the whole message
-bool SendAll(cuttlefish::SharedFD sock, const std::string& msg) {
-  ssize_t total_written{};
-  while (total_written < static_cast<ssize_t>(msg.size())) {
-    if (!sock->IsOpen()) {
-      return false;
-    }
-    auto just_written = sock->Send(msg.c_str() + total_written,
-                                   msg.size() - total_written, MSG_NOSIGNAL);
-    if (just_written <= 0) {
-      return false;
-    }
-    total_written += just_written;
-  }
-  return true;
-}
-
-std::string RecvAll(cuttlefish::SharedFD sock, const size_t count) {
-  size_t total_read{};
-  std::unique_ptr<char[]> data(new char[count]);
-  while (total_read < count) {
-    auto just_read = sock->Read(data.get() + total_read, count - total_read);
-    if (just_read <= 0) {
-      LOG(WARNING) << "adb daemon socket closed early";
-      return {};
-    }
-    total_read += just_read;
-  }
-  return {data.get(), count};
-}
-
 // Response will either be OKAY or FAIL
 constexpr char kAdbOkayStatusResponse[] = "OKAY";
 constexpr std::size_t kAdbStatusResponseLength =
@@ -93,7 +64,7 @@
 
 constexpr int kAdbDaemonPort = 5037;
 
-bool AdbSendMessage(cuttlefish::SharedFD sock, const std::string& message) {
+bool AdbSendMessage(const SharedFD& sock, const std::string& message) {
   if (!sock->IsOpen()) {
     return false;
   }
@@ -105,7 +76,7 @@
 }
 
 bool AdbSendMessage(const std::string& message) {
-  auto sock = cuttlefish::SharedFD::SocketLocalClient(kAdbDaemonPort, SOCK_STREAM);
+  auto sock = SharedFD::SocketLocalClient(kAdbDaemonPort, SOCK_STREAM);
   return AdbSendMessage(sock, message);
 }
 
@@ -123,7 +94,7 @@
 }
 
 // assumes the OKAY/FAIL status has already been read
-std::string RecvAdbResponse(cuttlefish::SharedFD sock) {
+std::string RecvAdbResponse(const SharedFD& sock) {
   auto length_as_hex_str = RecvAll(sock, kAdbMessageLengthLength);
   if (!IsInteger(length_as_hex_str)) {
     return {};
@@ -134,7 +105,7 @@
 
 // Returns a negative value if uptime result couldn't be read for
 // any reason.
-int RecvUptimeResult(cuttlefish::SharedFD sock) {
+int RecvUptimeResult(const SharedFD& sock) {
   std::vector<char> uptime_vec{};
   std::vector<char> just_read(16);
   do {
@@ -182,7 +153,7 @@
   // sleeps stabilize the communication.
   LOG(DEBUG) << "Watching for disconnect on " << address;
   while (true) {
-    auto sock = cuttlefish::SharedFD::SocketLocalClient(kAdbDaemonPort, SOCK_STREAM);
+    auto sock = SharedFD::SocketLocalClient(kAdbDaemonPort, SOCK_STREAM);
     if (!AdbSendMessage(sock, MakeTransportMessage(address))) {
       LOG(WARNING) << "transport message failed, response body: "
                    << RecvAdbResponse(sock);
@@ -208,9 +179,11 @@
 
 }  // namespace
 
-[[noreturn]] void cuttlefish::EstablishAndMaintainConnection(std::string address) {
+[[noreturn]] void EstablishAndMaintainConnection(const std::string& address) {
   while (true) {
     EstablishConnection(address);
     WaitForAdbDisconnection(address);
   }
 }
+
+}  // namespace cuttlefish
diff --git a/host/frontend/adb_connector/adb_connection_maintainer.h b/host/frontend/adb_connector/adb_connection_maintainer.h
index 23a7b44..b67dc0b 100644
--- a/host/frontend/adb_connector/adb_connection_maintainer.h
+++ b/host/frontend/adb_connector/adb_connection_maintainer.h
@@ -15,8 +15,10 @@
  */
 #pragma once
 
+#include <string>
+
 namespace cuttlefish {
 
-[[noreturn]] void EstablishAndMaintainConnection(std::string address);
+[[noreturn]] void EstablishAndMaintainConnection(const std::string& address);
 
 }  // namespace cuttlefish
diff --git a/host/frontend/adb_connector/doc/linkage.dot b/host/frontend/adb_connector/doc/linkage.dot
new file mode 100644
index 0000000..6dde284
--- /dev/null
+++ b/host/frontend/adb_connector/doc/linkage.dot
@@ -0,0 +1,23 @@
+graph {
+  adb_connector [label = < <B>adb_connector</B> >, penwidth = "2"]
+  adb_client [label = "ADB command line interface"]
+  adb_server [label = "ADB Server"]
+  user [label = "User input"]
+  vmm [label = "crosvm / qemu"]
+  host_vsock [label = "/dev/vhost_vsock", shape = "rectangle"]
+  subgraph cluster_android {
+    adb_daemon [label = "ADB Daemon"]
+    android_vsock [label = "/dev/vhost_vsock", shape = "rectangle"]
+    shell [label = "toybox / sh"]
+  }
+
+  run_cvd -- adb_connector
+  adb_connector -- adb_server
+  user -- adb_client
+  adb_client -- adb_server
+  android_vsock -- adb_daemon
+  vmm -- android_vsock
+  host_vsock -- vmm
+  adb_server -- host_vsock
+  adb_daemon -- shell
+}
diff --git a/host/frontend/adb_connector/doc/linkage.png b/host/frontend/adb_connector/doc/linkage.png
new file mode 100644
index 0000000..dbe52b7
--- /dev/null
+++ b/host/frontend/adb_connector/doc/linkage.png
Binary files differ
diff --git a/host/frontend/adb_connector/doc/linkage.svg b/host/frontend/adb_connector/doc/linkage.svg
new file mode 100644
index 0000000..c0a5641
--- /dev/null
+++ b/host/frontend/adb_connector/doc/linkage.svg
@@ -0,0 +1,124 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="402pt" height="564pt"
+ viewBox="0.00 0.00 401.83 564.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 560)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-560 397.83,-560 397.83,4 -4,4"/>
+<g id="clust1" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="149.34,-8 149.34,-204 289.34,-204 289.34,-8 149.34,-8"/>
+</g>
+<!-- adb_connector -->
+<g id="node1" class="node">
+<title>adb_connector</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="322.34" cy="-466" rx="71.49" ry="18"/>
+<text text-anchor="start" x="275.34" y="-463.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="279.34" y="-463.3" font-family="Times,serif" font-weight="bold" font-size="14.00">adb_connector</text>
+<text text-anchor="start" x="365.34" y="-463.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- adb_server -->
+<g id="node3" class="node">
+<title>adb_server</title>
+<ellipse fill="none" stroke="black" cx="219.34" cy="-394" rx="54.69" ry="18"/>
+<text text-anchor="middle" x="219.34" y="-390.3" font-family="Times,serif" font-size="14.00">ADB Server</text>
+</g>
+<!-- adb_connector&#45;&#45;adb_server -->
+<g id="edge2" class="edge">
+<title>adb_connector&#45;&#45;adb_server</title>
+<path fill="none" stroke="black" d="M298.71,-448.94C281.72,-437.4 258.95,-421.92 242.15,-410.5"/>
+</g>
+<!-- adb_client -->
+<g id="node2" class="node">
+<title>adb_client</title>
+<ellipse fill="none" stroke="black" cx="116.34" cy="-466" rx="116.18" ry="18"/>
+<text text-anchor="middle" x="116.34" y="-462.3" font-family="Times,serif" font-size="14.00">ADB command line interface</text>
+</g>
+<!-- adb_client&#45;&#45;adb_server -->
+<g id="edge4" class="edge">
+<title>adb_client&#45;&#45;adb_server</title>
+<path fill="none" stroke="black" d="M141.01,-448.23C157.91,-436.75 180.16,-421.62 196.63,-410.43"/>
+</g>
+<!-- host_vsock -->
+<g id="node6" class="node">
+<title>host_vsock</title>
+<polygon fill="none" stroke="black" points="274.84,-340 163.84,-340 163.84,-304 274.84,-304 274.84,-340"/>
+<text text-anchor="middle" x="219.34" y="-318.3" font-family="Times,serif" font-size="14.00">/dev/vhost_vsock</text>
+</g>
+<!-- adb_server&#45;&#45;host_vsock -->
+<g id="edge8" class="edge">
+<title>adb_server&#45;&#45;host_vsock</title>
+<path fill="none" stroke="black" d="M219.34,-375.7C219.34,-364.85 219.34,-350.92 219.34,-340.1"/>
+</g>
+<!-- user -->
+<g id="node4" class="node">
+<title>user</title>
+<ellipse fill="none" stroke="black" cx="116.34" cy="-538" rx="48.19" ry="18"/>
+<text text-anchor="middle" x="116.34" y="-534.3" font-family="Times,serif" font-size="14.00">User input</text>
+</g>
+<!-- user&#45;&#45;adb_client -->
+<g id="edge3" class="edge">
+<title>user&#45;&#45;adb_client</title>
+<path fill="none" stroke="black" d="M116.34,-519.7C116.34,-508.85 116.34,-494.92 116.34,-484.1"/>
+</g>
+<!-- vmm -->
+<g id="node5" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="219.34" cy="-250" rx="64.19" ry="18"/>
+<text text-anchor="middle" x="219.34" y="-246.3" font-family="Times,serif" font-size="14.00">crosvm / qemu</text>
+</g>
+<!-- android_vsock -->
+<g id="node8" class="node">
+<title>android_vsock</title>
+<polygon fill="none" stroke="black" points="274.84,-196 163.84,-196 163.84,-160 274.84,-160 274.84,-196"/>
+<text text-anchor="middle" x="219.34" y="-174.3" font-family="Times,serif" font-size="14.00">/dev/vhost_vsock</text>
+</g>
+<!-- vmm&#45;&#45;android_vsock -->
+<g id="edge6" class="edge">
+<title>vmm&#45;&#45;android_vsock</title>
+<path fill="none" stroke="black" d="M219.34,-231.7C219.34,-220.85 219.34,-206.92 219.34,-196.1"/>
+</g>
+<!-- host_vsock&#45;&#45;vmm -->
+<g id="edge7" class="edge">
+<title>host_vsock&#45;&#45;vmm</title>
+<path fill="none" stroke="black" d="M219.34,-303.7C219.34,-292.85 219.34,-278.92 219.34,-268.1"/>
+</g>
+<!-- adb_daemon -->
+<g id="node7" class="node">
+<title>adb_daemon</title>
+<ellipse fill="none" stroke="black" cx="219.34" cy="-106" rx="61.99" ry="18"/>
+<text text-anchor="middle" x="219.34" y="-102.3" font-family="Times,serif" font-size="14.00">ADB Daemon</text>
+</g>
+<!-- shell -->
+<g id="node9" class="node">
+<title>shell</title>
+<ellipse fill="none" stroke="black" cx="219.34" cy="-34" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="219.34" y="-30.3" font-family="Times,serif" font-size="14.00">toybox / sh</text>
+</g>
+<!-- adb_daemon&#45;&#45;shell -->
+<g id="edge9" class="edge">
+<title>adb_daemon&#45;&#45;shell</title>
+<path fill="none" stroke="black" d="M219.34,-87.7C219.34,-76.85 219.34,-62.92 219.34,-52.1"/>
+</g>
+<!-- android_vsock&#45;&#45;adb_daemon -->
+<g id="edge5" class="edge">
+<title>android_vsock&#45;&#45;adb_daemon</title>
+<path fill="none" stroke="black" d="M219.34,-159.7C219.34,-148.85 219.34,-134.92 219.34,-124.1"/>
+</g>
+<!-- run_cvd -->
+<g id="node10" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" cx="322.34" cy="-538" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="322.34" y="-534.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- run_cvd&#45;&#45;adb_connector -->
+<g id="edge1" class="edge">
+<title>run_cvd&#45;&#45;adb_connector</title>
+<path fill="none" stroke="black" d="M322.34,-519.7C322.34,-508.85 322.34,-494.92 322.34,-484.1"/>
+</g>
+</g>
+</svg>
diff --git a/host/frontend/adb_connector/main.cpp b/host/frontend/adb_connector/main.cpp
index aaca8cf..598d77e 100644
--- a/host/frontend/adb_connector/main.cpp
+++ b/host/frontend/adb_connector/main.cpp
@@ -36,9 +36,10 @@
 DEFINE_string(addresses, "", "Comma-separated list of addresses to "
                              "'adb connect' to");
 
+namespace cuttlefish {
 namespace {
 void LaunchConnectionMaintainerThread(const std::string& address) {
-  std::thread(cuttlefish::EstablishAndMaintainConnection, address).detach();
+  std::thread(EstablishAndMaintainConnection, address).detach();
 }
 
 std::vector<std::string> ParseAddressList(std::string ports) {
@@ -56,14 +57,20 @@
 
 }  // namespace
 
-int main(int argc, char* argv[]) {
-  cuttlefish::DefaultSubprocessLogging(argv);
+int AdbConnectorMain(int argc, char* argv[]) {
+  DefaultSubprocessLogging(argv);
   gflags::ParseCommandLineFlags(&argc, &argv, true);
   CHECK(!FLAGS_addresses.empty()) << "Must specify --addresses flag";
 
-  for (auto address : ParseAddressList(FLAGS_addresses)) {
+  for (const auto& address : ParseAddressList(FLAGS_addresses)) {
     LaunchConnectionMaintainerThread(address);
   }
 
   SleepForever();
 }
+
+}  // namespace cuttlefish
+
+int main(int argc, char* argv[]) {
+  return cuttlefish::AdbConnectorMain(argc, argv);
+}
diff --git a/host/frontend/webrtc/Android.bp b/host/frontend/webrtc/Android.bp
index d4384c5..c7d281c 100644
--- a/host/frontend/webrtc/Android.bp
+++ b/host/frontend/webrtc/Android.bp
@@ -17,68 +17,15 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_library_static {
-    name: "libcuttlefish_webrtc",
-    srcs: [
-        "lib/audio_device.cpp",
-        "lib/audio_track_source_impl.cpp",
-        "lib/camera_streamer.cpp",
-        "lib/client_handler.cpp",
-        "lib/keyboard.cpp",
-        "lib/local_recorder.cpp",
-        "lib/port_range_socket_factory.cpp",
-        "lib/streamer.cpp",
-        "lib/utils.cpp",
-        "lib/video_track_source_impl.cpp",
-        "lib/vp8only_encoder_factory.cpp",
-        "lib/server_connection.cpp",
-    ],
-    cflags: [
-        // libwebrtc headers need this
-        "-Wno-unused-parameter",
-        "-DWEBRTC_POSIX",
-        "-DWEBRTC_LINUX",
-    ],
-    header_libs: [
-        "webrtc_signaling_headers",
-        "libwebrtc_absl_headers",
-    ],
-    static_libs: [
-        "libsrtp2",
-        "libcuttlefish_host_config",
-        "libcuttlefish_screen_connector",
-        "libcuttlefish_wayland_server",
-        "libgflags",
-        "libdrm",
-        "libffi",
-        "libwayland_crosvm_gpu_display_extension_server_protocols",
-        "libwayland_extension_server_protocols",
-        "libwayland_server",
-        "libwebsockets",
-        "libcap",
-        "libcuttlefish_utils",
-        "libwebrtc",
-        "libwebrtc_absl_base",
-        "libwebrtc_absl_types",
-    ],
-    shared_libs: [
-        "libbase",
-        "libcn-cbor",
-        "libcuttlefish_fs",
-        "libfruit",
-        "libjsoncpp",
-        "libssl",
-        "libwebm_mkvmuxer",
-    ],
-    defaults: ["cuttlefish_buildhost_only"],
-}
-
 cc_binary_host {
     name: "webRTC",
     srcs: [
         "adb_handler.cpp",
         "audio_handler.cpp",
         "bluetooth_handler.cpp",
+        "location_handler.cpp",
+        "gpx_locations_handler.cpp",
+        "kml_locations_handler.cpp",
         "client_server.cpp",
         "connection_observer.cpp",
         "cvd_video_frame_buffer.cpp",
@@ -86,23 +33,19 @@
         "kernel_log_events_handler.cpp",
         "main.cpp",
     ],
+    cflags: [
+        // libwebrtc headers need this
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+        "-DWEBRTC_POSIX",
+        "-DWEBRTC_LINUX",
+    ],
     header_libs: [
         "webrtc_signaling_headers",
         "libwebrtc_absl_headers",
         "libcuttlefish_confui_host_headers",
     ],
     static_libs: [
-        "libwebrtc_absl_base",
-        "libwebrtc_absl_container",
-        "libwebrtc_absl_debugging",
-        "libwebrtc_absl_flags",
-        "libwebrtc_absl_hash",
-        "libwebrtc_absl_numeric",
-        "libwebrtc_absl_status",
-        "libwebrtc_absl_strings",
-        "libwebrtc_absl_synchronization",
-        "libwebrtc_absl_time",
-        "libwebrtc_absl_types",
         "libaom",
         "libcap",
         "libcn-cbor",
@@ -128,9 +71,12 @@
         "libwayland_extension_server_protocols",
         "libwayland_server",
         "libwebrtc",
-        "libcuttlefish_webrtc",
+        "libcuttlefish_webrtc_device",
+        "libcuttlefish_webrtc_common",
         "libwebsockets",
         "libyuv",
+        "libcvd_gnss_grpc_proxy",
+        "liblocation",
     ],
     shared_libs: [
         "libext2_blkid",
@@ -146,62 +92,10 @@
         "libvpx",
         "libyuv",
         "libwebm_mkvmuxer",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+        "libxml2",
     ],
     defaults: ["cuttlefish_buildhost_only"],
 }
 
-prebuilt_usr_share_host {
-    name: "webrtc_client.html",
-    src: "client/client.html",
-    filename: "client.html",
-    sub_dir: "webrtc/assets",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_style.css",
-    src: "client/style.css",
-    filename: "style.css",
-    sub_dir: "webrtc/assets",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_controls.css",
-    src: "client/controls.css",
-    filename: "controls.css",
-    sub_dir: "webrtc/assets",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_adb.js",
-    src: "client/js/adb.js",
-    filename: "adb.js",
-    sub_dir: "webrtc/assets/js",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_cf.js",
-    src: "client/js/cf_webrtc.js",
-    filename: "cf_webrtc.js",
-    sub_dir: "webrtc/assets/js",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_app.js",
-    src: "client/js/app.js",
-    filename: "app.js",
-    sub_dir: "webrtc/assets/js",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_controls.js",
-    src: "client/js/controls.js",
-    filename: "controls.js",
-    sub_dir: "webrtc/assets/js",
-}
-
-prebuilt_usr_share_host {
-    name: "webrtc_rootcanal.js",
-    src: "client/js/rootcanal.js",
-    filename: "rootcanal.js",
-    sub_dir: "webrtc/assets/js",
-}
diff --git a/host/frontend/webrtc/README.md b/host/frontend/webrtc/README.md
new file mode 100644
index 0000000..2c44b4b
--- /dev/null
+++ b/host/frontend/webrtc/README.md
@@ -0,0 +1,8 @@
+Browser endpoint serving Android screen frames.
+
+[![Interface linkage diagram](./doc/interface.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/frontend/webrtc/doc/interface.svg)
+
+Not all configurations mentioned in the diagram are available everywhere. Some
+functionality is crosvm-specific and some is QEMU-specific.
+
+[![Graphics linkage diagram](./doc/graphics.png)](https://cs.android.com/android/platform/superproject/+/master:device/google/cuttlefish/host/frontend/webrtc/doc/graphics.svg)
diff --git a/host/frontend/webrtc/audio_handler.h b/host/frontend/webrtc/audio_handler.h
index fc4734a..a3ba710 100644
--- a/host/frontend/webrtc/audio_handler.h
+++ b/host/frontend/webrtc/audio_handler.h
@@ -21,8 +21,8 @@
 #include <thread>
 #include <vector>
 
-#include "host/frontend/webrtc/lib/audio_sink.h"
-#include "host/frontend/webrtc/lib/audio_source.h"
+#include "host/frontend/webrtc/libdevice/audio_sink.h"
+#include "host/frontend/webrtc/libcommon/audio_source.h"
 #include "host/libs/audio_connector/server.h"
 
 namespace cuttlefish {
diff --git a/host/frontend/webrtc/client/client.html b/host/frontend/webrtc/client/client.html
deleted file mode 100644
index 3ffb97b..0000000
--- a/host/frontend/webrtc/client/client.html
+++ /dev/null
@@ -1,158 +0,0 @@
-<?--
- Copyright (C) 2019 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<html>
-    <head>
-        <link rel="stylesheet" type="text/css" href="style.css" >
-        <link rel="stylesheet" type="text/css" href="controls.css" >
-        <link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons+Outlined">
-        <link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
-        <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.0/jquery.min.js"></script>
-    </head>
-
-    <body>
-      <div id="loader"></div>
-      <div id="error-message-div">
-        <h3 id="error-message" class="hidden">
-          <span class="material-icons close-btn">close</span>
-        </h3>
-      </div>
-      <section id="device-connection">
-        <div id='header'>
-          <div id='app-controls'>
-            <div id="keyboard-capture-control" title="Capture Keyboard"></div>
-            <div id="mic-capture-control" title="Capture Microphone"></div>
-            <div id="camera-control" title="Capture Camera"></div>
-            <div id="audio-playback-control" title="Play audio">
-              <audio id="device-audio"></audio>
-            </div>
-            <div id="record-video-control" title="Capture Display as Webm"></div>
-          </div>
-          <div id='status-div'>
-            <h3 id='status-message' class='connecting'>Connecting to device</h3>
-          </div>
-        </div>
-        <div id='controls-and-displays'>
-          <div id='control-panel-default-buttons' class='control-panel-column'>
-            <button id='device-details-button' title='Device Details' class='material-icons'>
-              settings
-            </button>
-            <button id='bluetooth-modal-button' title='Bluetooth console' class='material-icons'>
-              settings_bluetooth
-            </button>
-          </div>
-          <div id='control-panel-custom-buttons' class='control-panel-column'></div>
-          <div id='device-displays'>
-          </div>
-        </div>
-      </section>
-      <div id='device-details-modal' class='modal'>
-        <div id='device-details-modal-header' class='modal-header'>
-          <h2>Device Details</h2>
-          <button id='device-details-close' title='Close' class='material-icons modal-close'>close</button>
-        </div>
-        <hr>
-        <h3>Hardware Configuration</h3>
-        <span id='device-details-hardware'>unknown</span>
-      </div>
-
-      <div id='bluetooth-modal' class='modal-wrapper'>
-        <div id='bluetooth-prompt' class='modal'>
-          <div id='bluetooth-prompt-header' class='modal-header'>
-            <h2>Bluetooth</h2>
-            <button id='bluetooth-prompt-close' title='Close' class='material-icons modal-close'>close</button>
-          </div>
-          <div>
-            <div id='bluetooth-prompt-text' class='bluetooth-text'>
-            We have enabled a BT Wizard to simplify adding a<br>bluetooth device.<br>
-            Alternatively, you can enter the BT Console if you<br>want to exercise full control.</div><br>
-            <div class='bluetooth-button'>
-              <button id='bluetooth-prompt-wizard' title='Start Wizard' class='modal-button-highlight'>Start Wizard</button>
-              <button id='bluetooth-prompt-list' title='Device List' class='modal-button'>Device List</button>
-              <button id='bluetooth-prompt-console' title='BT Console' class='modal-button'>BT Console</button>
-            </div>
-          </div>
-        </div>
-        <div id='bluetooth-wizard' class='modal'>
-          <div id='bluetooth-wizard-modal-header' class='modal-header'>
-            <h2>BT Wizard</h2>
-            <button id='bluetooth-wizard-close' title='Close' class='material-icons modal-close'>close</button>
-          </div>
-          <div>
-            <div class='bluetooth-text-field'><input type="text" id='bluetooth-wizard-name' placeholder="Device Name"></input></div>
-            <div class='bluetooth-drop-down'>
-              <select id='bluetooth-wizard-type' validate-mac="true" required>
-                <option value="beacon">Beacon</option>
-                <option value="beacon_swarm">Beacon Swarm</option>
-                <!-- Disabled because they were "started but never finished" (according to mylesgw@)
-                <option value="car_kit">Car Kit</option>
-                <option value="classic">Classic</option> -->
-                <option value="keyboard">Keyboard</option>
-                <option value="remote_loopback">Remote Loopback</option>
-                <option value="scripted_beacon">Scripted Beacon</option>
-                <!-- Disabled because it will never show up in the UI
-                <option value="sniffer">Sniffer</option> -->
-              </select>
-            </div>
-            <div class='bluetooth-text-field'><input type="text" id='bluetooth-wizard-mac' placeholder="Device MAC" validate-mac="true" required></input><span></span></div>
-            <div class='bluetooth-button'>
-              <button id='bluetooth-wizard-device' title='Add Device' class='modal-button-highlight' disabled>Add Device</button>
-              <button id='bluetooth-wizard-cancel' title='Cancel' class='modal-button'>Cancel</button>
-            </div>
-          </div>
-        </div>
-        <div id='bluetooth-wizard-confirm' class='modal'>
-          <div id='bluetooth-wizard-confirm-header' class='modal-header'>
-            <h2>BT Wizard</h2>
-            <button id='bluetooth-wizard-confirm-close' title='Close' class='material-icons modal-close'>close</button>
-          </div>
-          <div id='bluetooth-wizard-text' class='bluetooth-text'>Device added. See device details below.</div><br>
-          <div class='bluetooth-text'>
-            <p>Name: <b>GKeyboard</b></p>
-            <p>Type: <b>Keyboard</b></p>
-            <p>MAC Addr: <b>be:ac:01:55:00:03</b></p>
-          </div>
-          <div class='bluetooth-button'><button id='bluetooth-wizard-another' title='Add Another' class='modal-button-highlight'>Add Another</button></div>
-        </div>
-        <div id='bluetooth-list' class='modal'>
-          <div id='bluetooth-list-header' class='modal-header'>
-            <h2>Device List</h2>
-            <button id='bluetooth-list-close' title='Close' class='material-icons modal-close'>close</button>
-          </div>
-          <div class='bluetooth-text'>
-            <div><button title="Delete" data-device-id="delete" class="bluetooth-list-trash material-icons">delete</button>GKeyboard | Keyboard | be:ac:01:55:00:03</div>
-            <div><button title="Delete" data-device-id="delete" class="bluetooth-list-trash material-icons">delete</button>GHeadphones | Audio | dc:fa:32:00:55:02</div>
-          </div>
-        </div>
-        <div id='bluetooth-console' class='modal'>
-          <div id='bluetooth-console-modal-header' class='modal-header'>
-            <h2>BT Console</h2>
-            <button id='bluetooth-console-close' title='Close' class='material-icons modal-close'>close</button>
-          </div>
-          <div>
-            <div colspan='2'><textarea id='bluetooth-console-view' readonly rows='10' cols='60'></textarea></div>
-            <div width='1'><p id='bluetooth-console-cmd-label'>Command:</p></div>
-            <div width='100'><input id='bluetooth-console-input' type='text'></input></div>
-          </div>
-        </div>
-      </div>
-       <script src="js/adb.js"></script>
-       <script src="js/rootcanal.js"></script>
-       <script src="js/cf_webrtc.js" type="module"></script>
-       <script src="js/controls.js"></script>
-       <script src="js/app.js"></script>
-    </body>
-</html>
diff --git a/host/frontend/webrtc/client/controls.css b/host/frontend/webrtc/client/controls.css
deleted file mode 100644
index a2dce53..0000000
--- a/host/frontend/webrtc/client/controls.css
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-.toggle-control {
-  padding-left: 2px;
-  padding-right: 7px;
-  border-radius: 10px;
-  background-color: #5f6368; /* Google grey 700 */
-  width: 80px;
-  height: 44px;
-}
-
-.toggle-control .toggle-control-icon {
-  position: relative;
-  display: inline-block;
-  float: left;
-  font-size: 44px;
-  color: #e8eaed;
-}
-
-.toggle-control .toggle-control-switch {
-  position: relative;
-  display: inline-block;
-  float:left;
-  width: 36px;
-  height: 21px;
-  top: 11px;
-}
-
-.toggle-control .toggle-control-switch input {
-  opacity: 0;
-  width: 0;
-  height: 0;
-}
-
-.toggle-control .toggle-control-slider {
-  position: absolute;
-  cursor: pointer;
-  top: 0;
-  left: 0;
-  right: 0;
-  bottom: 0;
-  -webkit-transition: .4s;
-  transition: .4s;
-  border-radius: 21px;
-  border: solid 4px;
-  border-color: #e8eaed;
-}
-
-.toggle-control .toggle-control-slider:before {
-  position: absolute;
-  content: "";
-  height: 12px;
-  width: 12px;
-  left: 1px;
-  bottom: 1px;
-  background-color: #e8eaed;
-  -webkit-transition: .4s;
-  transition: .4s;
-  border-radius: 50%;
-}
-
-.toggle-control input:checked + .toggle-control-slider {
-  background-color: #1c4587;
-}
-
-.toggle-control input:focus + .toggle-control-slider {
-  box-shadow: 0 0 1px #2196F3;
-}
-
-.toggle-control input:checked + .toggle-control-slider:before {
-  -webkit-transform: translateX(15px);
-  -ms-transform: translateX(15px);
-  transform: translateX(15px);
-}
diff --git a/host/frontend/webrtc/client/js/app.js b/host/frontend/webrtc/client/js/app.js
deleted file mode 100644
index f26862a..0000000
--- a/host/frontend/webrtc/client/js/app.js
+++ /dev/null
@@ -1,1031 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-'use strict';
-
-async function ConnectDevice(deviceId, serverConnector) {
-  console.debug('Connect: ' + deviceId);
-  // Prepare messages in case of connection failure
-  let connectionAttemptDuration = 0;
-  const intervalMs = 15000;
-  let connectionInterval = setInterval(() => {
-    connectionAttemptDuration += intervalMs;
-    if (connectionAttemptDuration > 30000) {
-      showError(
-          'Connection should have occurred by now. ' +
-          'Please attempt to restart the guest device.');
-      clearInterval(connectionInterval);
-    } else if (connectionAttemptDuration > 15000) {
-      showWarning('Connection is taking longer than expected');
-    }
-  }, intervalMs);
-
-  let module = await import('./cf_webrtc.js');
-  let deviceConnection = await module.Connect(deviceId, serverConnector);
-  console.info('Connected to ' + deviceId);
-  clearInterval(connectionInterval);
-  return deviceConnection;
-}
-
-function setupMessages() {
-  let closeBtn = document.querySelector('#error-message .close-btn');
-  closeBtn.addEventListener('click', evt => {
-    evt.target.parentElement.className = 'hidden';
-  });
-}
-
-function showMessage(msg, className) {
-  let element = document.getElementById('error-message');
-  if (element.childNodes.length < 2) {
-    // First time, no text node yet
-    element.insertAdjacentText('afterBegin', msg);
-  } else {
-    element.childNodes[0].data = msg;
-  }
-  element.className = className;
-}
-
-function showWarning(msg) {
-  showMessage(msg, 'warning');
-}
-
-function showError(msg) {
-  showMessage(msg, 'error');
-}
-
-
-class DeviceDetailsUpdater {
-  #element;
-
-  constructor() {
-    this.#element = document.getElementById('device-details-hardware');
-  }
-
-  setHardwareDetailsText(text) {
-    this.#element.dataset.hardwareDetailsText = text;
-    return this;
-  }
-
-  setDeviceStateDetailsText(text) {
-    this.#element.dataset.deviceStateDetailsText = text;
-    return this;
-  }
-
-  update() {
-    this.#element.textContent =
-        [
-          this.#element.dataset.hardwareDetailsText,
-          this.#element.dataset.deviceStateDetailsText,
-        ].filter(e => e /*remove empty*/)
-            .join('\n');
-  }
-}  // DeviceDetailsUpdater
-
-class DeviceControlApp {
-  #deviceConnection = {};
-  #currentRotation = 0;
-  #displayDescriptions = [];
-  #buttons = {};
-  #recording = {};
-  #phys = {};
-  #deviceCount = 0;
-
-  constructor(deviceConnection) {
-    this.#deviceConnection = deviceConnection;
-  }
-
-  start() {
-    console.debug('Device description: ', this.#deviceConnection.description);
-    this.#deviceConnection.onControlMessage(msg => this.#onControlMessage(msg));
-    createToggleControl(
-        document.getElementById('keyboard-capture-control'), 'keyboard',
-        enabled => this.#onKeyboardCaptureToggle(enabled));
-    createToggleControl(
-        document.getElementById('mic-capture-control'), 'mic',
-        enabled => this.#onMicCaptureToggle(enabled));
-    createToggleControl(
-        document.getElementById('camera-control'), 'videocam',
-        enabled => this.#onCameraCaptureToggle(enabled));
-    createToggleControl(
-        document.getElementById('record-video-control'), 'movie_creation',
-        enabled => this.#onVideoCaptureToggle(enabled));
-    const audioElm = document.getElementById('device-audio');
-
-    let audioPlaybackCtrl = createToggleControl(
-        document.getElementById('audio-playback-control'), 'speaker',
-        enabled => this.#onAudioPlaybackToggle(enabled), !audioElm.paused);
-    // The audio element may start or stop playing at any time, this ensures the
-    // audio control always show the right state.
-    audioElm.onplay = () => audioPlaybackCtrl.Set(true);
-    audioElm.onpause = () => audioPlaybackCtrl.Set(false);
-
-    this.#showDeviceUI();
-  }
-
-  #showDeviceUI() {
-    window.onresize = evt => this.#resizeDeviceDisplays();
-    // Set up control panel buttons
-    this.#buttons = {};
-    this.#buttons['power'] = createControlPanelButton(
-        'power', 'Power', 'power_settings_new',
-        evt => this.#onControlPanelButton(evt));
-    this.#buttons['home'] = createControlPanelButton(
-        'home', 'Home', 'home', evt => this.#onControlPanelButton(evt));
-    this.#buttons['menu'] = createControlPanelButton(
-        'menu', 'Menu', 'menu', evt => this.#onControlPanelButton(evt));
-    this.#buttons['rotate'] = createControlPanelButton(
-        'rotate', 'Rotate', 'screen_rotation',
-        evt => this.#onRotateButton(evt));
-    this.#buttons['rotate'].adb = true;
-    this.#buttons['volumedown'] = createControlPanelButton(
-        'volumedown', 'Volume Down', 'volume_down',
-        evt => this.#onControlPanelButton(evt));
-    this.#buttons['volumeup'] = createControlPanelButton(
-        'volumeup', 'Volume Up', 'volume_up',
-        evt => this.#onControlPanelButton(evt));
-
-    createModalButton(
-        'device-details-button', 'device-details-modal',
-        'device-details-close');
-    createModalButton(
-        'bluetooth-modal-button', 'bluetooth-prompt',
-        'bluetooth-prompt-close');
-    createModalButton(
-        'bluetooth-prompt-wizard', 'bluetooth-wizard',
-        'bluetooth-wizard-close', 'bluetooth-prompt');
-    createModalButton(
-        'bluetooth-wizard-device', 'bluetooth-wizard-confirm',
-        'bluetooth-wizard-confirm-close', 'bluetooth-wizard');
-    createModalButton(
-        'bluetooth-wizard-another', 'bluetooth-wizard',
-        'bluetooth-wizard-close', 'bluetooth-wizard-confirm');
-    createModalButton(
-        'bluetooth-prompt-list', 'bluetooth-list',
-        'bluetooth-list-close', 'bluetooth-prompt');
-    createModalButton(
-        'bluetooth-prompt-console', 'bluetooth-console',
-        'bluetooth-console-close', 'bluetooth-prompt');
-    createModalButton(
-        'bluetooth-wizard-cancel', 'bluetooth-prompt',
-        'bluetooth-wizard-close', 'bluetooth-wizard');
-
-    positionModal('device-details-button', 'bluetooth-modal');
-    positionModal('device-details-button', 'bluetooth-prompt');
-    positionModal('device-details-button', 'bluetooth-wizard');
-    positionModal('device-details-button', 'bluetooth-wizard-confirm');
-    positionModal('device-details-button', 'bluetooth-list');
-    positionModal('device-details-button', 'bluetooth-console');
-
-    createButtonListener('bluetooth-prompt-list', null, this.#deviceConnection,
-      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
-    createButtonListener('bluetooth-wizard-device', null, this.#deviceConnection,
-      evt => this.#onRootCanalCommand(this.#deviceConnection, "add", evt));
-    createButtonListener('bluetooth-list-trash', null, this.#deviceConnection,
-      evt => this.#onRootCanalCommand(this.#deviceConnection, "del", evt));
-    createButtonListener('bluetooth-prompt-wizard', null, this.#deviceConnection,
-      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
-    createButtonListener('bluetooth-wizard-another', null, this.#deviceConnection,
-      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
-
-    if (this.#deviceConnection.description.custom_control_panel_buttons.length >
-        0) {
-      document.getElementById('control-panel-custom-buttons').style.display =
-          'flex';
-      for (const button of this.#deviceConnection.description
-               .custom_control_panel_buttons) {
-        if (button.shell_command) {
-          // This button's command is handled by sending an ADB shell command.
-          this.#buttons[button.command] = createControlPanelButton(
-              button.command, button.title, button.icon_name,
-              e => this.#onCustomShellButton(button.shell_command, e),
-              'control-panel-custom-buttons');
-          this.#buttons[button.command].adb = true;
-        } else if (button.device_states) {
-          // This button corresponds to variable hardware device state(s).
-          this.#buttons[button.command] = createControlPanelButton(
-              button.command, button.title, button.icon_name,
-              this.#getCustomDeviceStateButtonCb(button.device_states),
-              'control-panel-custom-buttons');
-          for (const device_state of button.device_states) {
-            // hinge_angle is currently injected via an adb shell command that
-            // triggers a guest binary.
-            if ('hinge_angle_value' in device_state) {
-              this.#buttons[button.command].adb = true;
-            }
-          }
-        } else {
-          // This button's command is handled by custom action server.
-          this.#buttons[button.command] = createControlPanelButton(
-              button.command, button.title, button.icon_name,
-              evt => this.#onControlPanelButton(evt),
-              'control-panel-custom-buttons');
-        }
-      }
-    }
-
-    // Set up displays
-    this.#createDeviceDisplays();
-
-    // Set up audio
-    const deviceAudio = document.getElementById('device-audio');
-    for (const audio_desc of this.#deviceConnection.description.audio_streams) {
-      let stream_id = audio_desc.stream_id;
-      this.#deviceConnection.getStream(stream_id)
-          .then(stream => {
-            deviceAudio.srcObject = stream;
-            let playPromise = deviceAudio.play();
-            if (playPromise !== undefined) {
-              playPromise.catch(error => {
-                showWarning(
-                    'Audio playback is disabled, click on the speaker control to activate it');
-              });
-            }
-          })
-          .catch(e => console.error('Unable to get audio stream: ', e));
-    }
-
-    // Set up touch input
-    this.#startMouseTracking();
-
-    this.#updateDeviceHardwareDetails(
-        this.#deviceConnection.description.hardware);
-
-    // Show the error message and disable buttons when the WebRTC connection
-    // fails.
-    this.#deviceConnection.onConnectionStateChange(state => {
-      if (state == 'disconnected' || state == 'failed') {
-        this.#showWebrtcError();
-      }
-    });
-
-    let bluetoothConsole =
-        cmdConsole('bluetooth-console-view', 'bluetooth-console-input');
-    bluetoothConsole.addCommandListener(cmd => {
-      let inputArr = cmd.split(' ');
-      let command = inputArr[0];
-      inputArr.shift();
-      let args = inputArr;
-      this.#deviceConnection.sendBluetoothMessage(
-          createRootcanalMessage(command, args));
-    });
-    this.#deviceConnection.onBluetoothMessage(msg => {
-      let decoded = decodeRootcanalMessage(msg);
-      let deviceCount = btUpdateDeviceList(decoded);
-      if (deviceCount > 0) {
-        this.#deviceCount = deviceCount;
-        createButtonListener('bluetooth-list-trash', null, this.#deviceConnection,
-           evt => this.#onRootCanalCommand(this.#deviceConnection, "del", evt));
-      }
-      btUpdateAdded(decoded);
-      let phyList = btParsePhys(decoded);
-      if (phyList) {
-        this.#phys = phyList;
-      }
-      bluetoothConsole.addLine(decoded);
-    });
-  }
-
-  #onRootCanalCommand(deviceConnection, cmd, evt) {
-    if (cmd == "list") {
-      deviceConnection.sendBluetoothMessage(createRootcanalMessage("list", []));
-    }
-    if (cmd == "del") {
-      let id = evt.srcElement.getAttribute("data-device-id");
-      deviceConnection.sendBluetoothMessage(createRootcanalMessage("del", [id]));
-      deviceConnection.sendBluetoothMessage(createRootcanalMessage("list", []));
-    }
-    if (cmd == "add") {
-      let name = document.getElementById('bluetooth-wizard-name').value;
-      let type = document.getElementById('bluetooth-wizard-type').value;
-      if (type == "remote_loopback") {
-        deviceConnection.sendBluetoothMessage(createRootcanalMessage("add", [type]));
-      } else {
-        let mac = document.getElementById('bluetooth-wizard-mac').value;
-        deviceConnection.sendBluetoothMessage(createRootcanalMessage("add", [type, mac]));
-      }
-      let phyId = this.#phys["LOW_ENERGY"].toString();
-      if (type == "remote_loopback") {
-        phyId = this.#phys["BR_EDR"].toString();
-      }
-      let devId = this.#deviceCount.toString();
-      this.#deviceCount++;
-      deviceConnection.sendBluetoothMessage(createRootcanalMessage("add_device_to_phy", [devId, phyId]));
-    }
-  }
-
-  #showWebrtcError() {
-    document.getElementById('status-message').className = 'error';
-    document.getElementById('status-message').textContent =
-        'No connection to the guest device. ' +
-        'Please ensure the WebRTC process on the host machine is active.';
-    document.getElementById('status-message').style.visibility = 'visible';
-    const deviceDisplays = document.getElementById('device-displays');
-    deviceDisplays.style.display = 'none';
-    for (const [_, button] of Object.entries(this.#buttons)) {
-      button.disabled = true;
-    }
-  }
-
-  #takePhoto() {
-    const imageCapture = this.#deviceConnection.imageCapture;
-    if (imageCapture) {
-      const photoSettings = {
-        imageWidth: this.#deviceConnection.cameraWidth,
-        imageHeight: this.#deviceConnection.cameraHeight
-      };
-      imageCapture.takePhoto(photoSettings)
-          .then(blob => blob.arrayBuffer())
-          .then(buffer => this.#deviceConnection.sendOrQueueCameraData(buffer))
-          .catch(error => console.error(error));
-    }
-  }
-
-  #getCustomDeviceStateButtonCb(device_states) {
-    let states = device_states;
-    let index = 0;
-    return e => {
-      if (e.type == 'mousedown') {
-        // Reset any overridden device state.
-        adbShell('cmd device_state state reset');
-        // Send a device_state message for the current state.
-        let message = {
-          command: 'device_state',
-          ...states[index],
-        };
-        this.#deviceConnection.sendControlMessage(JSON.stringify(message));
-        console.debug('Control message sent: ', JSON.stringify(message));
-        let lidSwitchOpen = null;
-        if ('lid_switch_open' in states[index]) {
-          lidSwitchOpen = states[index].lid_switch_open;
-        }
-        let hingeAngle = null;
-        if ('hinge_angle_value' in states[index]) {
-          hingeAngle = states[index].hinge_angle_value;
-          // TODO(b/181157794): Use a custom Sensor HAL for hinge_angle
-          // injection instead of this guest binary.
-          adbShell(
-              '/vendor/bin/cuttlefish_sensor_injection hinge_angle ' +
-              states[index].hinge_angle_value);
-        }
-        // Update the Device Details view.
-        this.#updateDeviceStateDetails(lidSwitchOpen, hingeAngle);
-        // Cycle to the next state.
-        index = (index + 1) % states.length;
-      }
-    }
-  }
-
-  #resizeDeviceDisplays() {
-    // Padding between displays.
-    const deviceDisplayWidthPadding = 10;
-    // Padding for the display info above each display video.
-    const deviceDisplayHeightPadding = 38;
-
-    let deviceDisplayList = document.getElementsByClassName('device-display');
-    let deviceDisplayVideoList =
-        document.getElementsByClassName('device-display-video');
-    let deviceDisplayInfoList =
-        document.getElementsByClassName('device-display-info');
-
-    const deviceDisplays = document.getElementById('device-displays');
-    const rotationDegrees = this.#getTransformRotation(deviceDisplays);
-    const rotationRadians = rotationDegrees * Math.PI / 180;
-
-    // Auto-scale the screen based on window size.
-    let availableWidth = deviceDisplays.clientWidth;
-    let availableHeight = deviceDisplays.clientHeight - deviceDisplayHeightPadding;
-
-    // Reserve space for padding between the displays.
-    availableWidth = availableWidth -
-        (this.#displayDescriptions.length * deviceDisplayWidthPadding);
-
-    // Loop once over all of the displays to compute the total space needed.
-    let neededWidth = 0;
-    let neededHeight = 0;
-    for (let i = 0; i < deviceDisplayList.length; i++) {
-      let deviceDisplayDescription = this.#displayDescriptions[i];
-      let deviceDisplayVideo = deviceDisplayVideoList[i];
-
-      const originalDisplayWidth = deviceDisplayDescription.x_res;
-      const originalDisplayHeight = deviceDisplayDescription.y_res;
-
-      const neededBoundingBoxWidth =
-          Math.abs(Math.cos(rotationRadians) * originalDisplayWidth) +
-          Math.abs(Math.sin(rotationRadians) * originalDisplayHeight);
-      const neededBoundingBoxHeight =
-          Math.abs(Math.sin(rotationRadians) * originalDisplayWidth) +
-          Math.abs(Math.cos(rotationRadians) * originalDisplayHeight);
-
-      neededWidth = neededWidth + neededBoundingBoxWidth;
-      neededHeight = Math.max(neededHeight, neededBoundingBoxHeight);
-    }
-
-    const scaling =
-        Math.min(availableWidth / neededWidth, availableHeight / neededHeight);
-
-    // Loop again over all of the displays to set the sizes and positions.
-    let deviceDisplayLeftOffset = 0;
-    for (let i = 0; i < deviceDisplayList.length; i++) {
-      let deviceDisplay = deviceDisplayList[i];
-      let deviceDisplayVideo = deviceDisplayVideoList[i];
-      let deviceDisplayInfo = deviceDisplayInfoList[i];
-      let deviceDisplayDescription = this.#displayDescriptions[i];
-
-      let rotated = this.#currentRotation == 1 ? ' (Rotated)' : '';
-      deviceDisplayInfo.textContent = `Display ${i} - ` +
-          `${deviceDisplayDescription.x_res}x` +
-          `${deviceDisplayDescription.y_res} ` +
-          `(${deviceDisplayDescription.dpi} DPI)${rotated}`;
-
-      const originalDisplayWidth = deviceDisplayDescription.x_res;
-      const originalDisplayHeight = deviceDisplayDescription.y_res;
-
-      const scaledDisplayWidth = originalDisplayWidth * scaling;
-      const scaledDisplayHeight = originalDisplayHeight * scaling;
-
-      const neededBoundingBoxWidth =
-          Math.abs(Math.cos(rotationRadians) * originalDisplayWidth) +
-          Math.abs(Math.sin(rotationRadians) * originalDisplayHeight);
-      const neededBoundingBoxHeight =
-          Math.abs(Math.sin(rotationRadians) * originalDisplayWidth) +
-          Math.abs(Math.cos(rotationRadians) * originalDisplayHeight);
-
-      const scaledBoundingBoxWidth = neededBoundingBoxWidth * scaling;
-      const scaledBoundingBoxHeight = neededBoundingBoxHeight * scaling;
-
-      const offsetX = (scaledBoundingBoxWidth - scaledDisplayWidth) / 2;
-      const offsetY = (scaledBoundingBoxHeight - scaledDisplayHeight) / 2;
-
-      deviceDisplayVideo.style.width = scaledDisplayWidth;
-      deviceDisplayVideo.style.height = scaledDisplayHeight;
-      deviceDisplayVideo.style.transform = `translateX(${offsetX}px) ` +
-          `translateY(${offsetY}px) ` +
-          `rotateZ(${rotationDegrees}deg) `;
-
-      deviceDisplay.style.left = `${deviceDisplayLeftOffset}px`;
-      deviceDisplay.style.width = scaledBoundingBoxWidth;
-      deviceDisplay.style.height = scaledBoundingBoxHeight;
-
-      deviceDisplayLeftOffset = deviceDisplayLeftOffset + deviceDisplayWidthPadding +
-          scaledBoundingBoxWidth;
-    }
-  }
-
-  #getTransformRotation(element) {
-    if (!element.style.textIndent) {
-      return 0;
-    }
-    // Remove 'px' and convert to float.
-    return parseFloat(element.style.textIndent.slice(0, -2));
-  }
-
-  #onControlMessage(message) {
-    let message_data = JSON.parse(message.data);
-    console.debug('Control message received: ', message_data)
-    let metadata = message_data.metadata;
-    if (message_data.event == 'VIRTUAL_DEVICE_BOOT_STARTED') {
-      // Start the adb connection after receiving the BOOT_STARTED message.
-      // (This is after the adbd start message. Attempting to connect
-      // immediately after adbd starts causes issues.)
-      this.#initializeAdb();
-    }
-    if (message_data.event == 'VIRTUAL_DEVICE_SCREEN_CHANGED') {
-      if (metadata.rotation != this.#currentRotation) {
-        // Animate the screen rotation.
-        const targetRotation = metadata.rotation == 0 ? 0 : -90;
-
-        $('#device-displays')
-            .animate(
-                {
-                  textIndent: targetRotation,
-                },
-                {
-                  duration: 1000,
-                  step: (now, tween) => {
-                    this.#resizeDeviceDisplays();
-                  },
-                });
-      }
-
-      this.#currentRotation = metadata.rotation;
-    }
-    if (message_data.event == 'VIRTUAL_DEVICE_CAPTURE_IMAGE') {
-      if (this.#deviceConnection.cameraEnabled) {
-        this.#takePhoto();
-      }
-    }
-    if (message_data.event == 'VIRTUAL_DEVICE_DISPLAY_POWER_MODE_CHANGED') {
-      this.#updateDisplayVisibility(metadata.display, metadata.mode);
-    }
-  }
-
-  #updateDeviceStateDetails(lidSwitchOpen, hingeAngle) {
-    let deviceStateDetailsTextLines = [];
-    if (lidSwitchOpen != null) {
-      let state = lidSwitchOpen ? 'Opened' : 'Closed';
-      deviceStateDetailsTextLines.push(`Lid Switch - ${state}`);
-    }
-    if (hingeAngle != null) {
-      deviceStateDetailsTextLines.push(`Hinge Angle - ${hingeAngle}`);
-    }
-    let deviceStateDetailsText = deviceStateDetailsTextLines.join('\n');
-    new DeviceDetailsUpdater()
-        .setDeviceStateDetailsText(deviceStateDetailsText)
-        .update();
-  }
-
-  #updateDeviceHardwareDetails(hardware) {
-    let hardwareDetailsTextLines = [];
-    Object.keys(hardware).forEach((key) => {
-      let value = hardware[key];
-      hardwareDetailsTextLines.push(`${key} - ${value}`);
-    });
-
-    let hardwareDetailsText = hardwareDetailsTextLines.join('\n');
-    new DeviceDetailsUpdater()
-        .setHardwareDetailsText(hardwareDetailsText)
-        .update();
-  }
-
-  // Creates a <video> element and a <div> container element for each display.
-  // The extra <div> container elements are used to maintain the width and
-  // height of the device as the CSS 'transform' property used on the <video>
-  // element for rotating the device only affects the visuals of the element
-  // and not its layout.
-  #createDeviceDisplays() {
-    console.debug(
-        'Display descriptions: ', this.#deviceConnection.description.displays);
-    this.#displayDescriptions = this.#deviceConnection.description.displays;
-    let anyDisplayLoaded = false;
-    const deviceDisplays = document.getElementById('device-displays');
-    for (const deviceDisplayDescription of this.#displayDescriptions) {
-      let deviceDisplay = document.createElement('div');
-      deviceDisplay.classList.add('device-display');
-      // Start the screen as hidden. Only show when data is ready.
-      deviceDisplay.style.visibility = 'hidden';
-
-      let deviceDisplayInfo = document.createElement("div");
-      deviceDisplayInfo.classList.add("device-display-info");
-      deviceDisplayInfo.id = deviceDisplayDescription.stream_id + '_info';
-      deviceDisplay.appendChild(deviceDisplayInfo);
-
-      let deviceDisplayVideo = document.createElement('video');
-      deviceDisplayVideo.autoplay = true;
-      deviceDisplayVideo.muted = true;
-      deviceDisplayVideo.id = deviceDisplayDescription.stream_id;
-      deviceDisplayVideo.classList.add('device-display-video');
-      deviceDisplayVideo.addEventListener('loadeddata', (evt) => {
-        if (!anyDisplayLoaded) {
-          anyDisplayLoaded = true;
-          this.#onDeviceDisplayLoaded();
-        }
-      });
-      deviceDisplay.appendChild(deviceDisplayVideo);
-
-      deviceDisplays.appendChild(deviceDisplay);
-
-      let stream_id = deviceDisplayDescription.stream_id;
-      this.#deviceConnection.getStream(stream_id)
-          .then(stream => {
-            deviceDisplayVideo.srcObject = stream;
-          })
-          .catch(e => console.error('Unable to get display stream: ', e));
-    }
-  }
-
-  #initializeAdb() {
-    init_adb(
-        this.#deviceConnection, () => this.#showAdbConnected(),
-        () => this.#showAdbError());
-  }
-
-  #showAdbConnected() {
-    // Screen changed messages are not reported until after boot has completed.
-    // Certain default adb buttons change screen state, so wait for boot
-    // completion before enabling these buttons.
-    document.getElementById('status-message').className = 'connected';
-    document.getElementById('status-message').textContent =
-        'adb connection established successfully.';
-    setTimeout(() => {
-      document.getElementById('status-message').style.visibility = 'hidden';
-    }, 5000);
-    for (const [_, button] of Object.entries(this.#buttons)) {
-      if (button.adb) {
-        button.disabled = false;
-      }
-    }
-  }
-
-  #showAdbError() {
-    document.getElementById('status-message').className = 'error';
-    document.getElementById('status-message').textContent =
-        'adb connection failed.';
-    document.getElementById('status-message').style.visibility = 'visible';
-    for (const [_, button] of Object.entries(this.#buttons)) {
-      if (button.adb) {
-        button.disabled = true;
-      }
-    }
-  }
-
-  #onDeviceDisplayLoaded() {
-    document.getElementById('status-message').textContent =
-        'Awaiting bootup and adb connection. Please wait...';
-    this.#resizeDeviceDisplays();
-
-    let deviceDisplayList = document.getElementsByClassName('device-display');
-    for (const deviceDisplay of deviceDisplayList) {
-      deviceDisplay.style.visibility = 'visible';
-    }
-
-    // Enable the buttons after the screen is visible.
-    for (const [key, button] of Object.entries(this.#buttons)) {
-      if (!button.adb) {
-        button.disabled = false;
-      }
-    }
-    // Start the adb connection if it is not already started.
-    this.#initializeAdb();
-  }
-
-  #onRotateButton(e) {
-    // Attempt to init adb again, in case the initial connection failed.
-    // This succeeds immediately if already connected.
-    this.#initializeAdb();
-    if (e.type == 'mousedown') {
-      adbShell(
-          '/vendor/bin/cuttlefish_sensor_injection rotate ' +
-          (this.#currentRotation == 0 ? 'landscape' : 'portrait'))
-    }
-  }
-
-  #onControlPanelButton(e) {
-    if (e.type == 'mouseout' && e.which == 0) {
-      // Ignore mouseout events if no mouse button is pressed.
-      return;
-    }
-    this.#deviceConnection.sendControlMessage(JSON.stringify({
-      command: e.target.dataset.command,
-      button_state: e.type == 'mousedown' ? 'down' : 'up',
-    }));
-  }
-
-  #onKeyboardCaptureToggle(enabled) {
-    if (enabled) {
-      document.addEventListener('keydown', evt => this.#onKeyEvent(evt));
-      document.addEventListener('keyup', evt => this.#onKeyEvent(evt));
-    } else {
-      document.removeEventListener('keydown', evt => this.#onKeyEvent(evt));
-      document.removeEventListener('keyup', evt => this.#onKeyEvent(evt));
-    }
-  }
-
-  #onKeyEvent(e) {
-    e.preventDefault();
-    this.#deviceConnection.sendKeyEvent(e.code, e.type);
-  }
-
-  #startMouseTracking() {
-    let $this = this;
-    let mouseIsDown = false;
-    let mouseCtx = {
-      down: false,
-      touchIdSlotMap: new Map(),
-      touchSlots: [],
-    };
-    function onStartDrag(e) {
-      e.preventDefault();
-
-      // console.debug("mousedown at " + e.pageX + " / " + e.pageY);
-      mouseCtx.down = true;
-
-      $this.#sendEventUpdate(mouseCtx, e);
-    }
-
-    function onEndDrag(e) {
-      e.preventDefault();
-
-      // console.debug("mouseup at " + e.pageX + " / " + e.pageY);
-      mouseCtx.down = false;
-
-      $this.#sendEventUpdate(mouseCtx, e);
-    }
-
-    function onContinueDrag(e) {
-      e.preventDefault();
-
-      // console.debug("mousemove at " + e.pageX + " / " + e.pageY + ", down=" +
-      // mouseIsDown);
-      if (mouseCtx.down) {
-        $this.#sendEventUpdate(mouseCtx, e);
-      }
-    }
-
-    let deviceDisplayList = document.getElementsByClassName('device-display');
-    if (window.PointerEvent) {
-      for (const deviceDisplay of deviceDisplayList) {
-        deviceDisplay.addEventListener('pointerdown', onStartDrag);
-        deviceDisplay.addEventListener('pointermove', onContinueDrag);
-        deviceDisplay.addEventListener('pointerup', onEndDrag);
-      }
-    } else if (window.TouchEvent) {
-      for (const deviceDisplay of deviceDisplayList) {
-        deviceDisplay.addEventListener('touchstart', onStartDrag);
-        deviceDisplay.addEventListener('touchmove', onContinueDrag);
-        deviceDisplay.addEventListener('touchend', onEndDrag);
-      }
-    } else if (window.MouseEvent) {
-      for (const deviceDisplay of deviceDisplayList) {
-        deviceDisplay.addEventListener('mousedown', onStartDrag);
-        deviceDisplay.addEventListener('mousemove', onContinueDrag);
-        deviceDisplay.addEventListener('mouseup', onEndDrag);
-      }
-    }
-  }
-
-  #sendEventUpdate(ctx, e) {
-    let eventType = e.type.substring(0, 5);
-
-    // The <video> element:
-    const deviceDisplay = e.target;
-
-    // Before the first video frame arrives there is no way to know width and
-    // height of the device's screen, so turn every click into a click at 0x0.
-    // A click at that position is not more dangerous than anywhere else since
-    // the user is clicking blind anyways.
-    const videoWidth = deviceDisplay.videoWidth ? deviceDisplay.videoWidth : 1;
-    const videoHeight =
-        deviceDisplay.videoHeight ? deviceDisplay.videoHeight : 1;
-    const elementWidth =
-        deviceDisplay.offsetWidth ? deviceDisplay.offsetWidth : 1;
-    const elementHeight =
-        deviceDisplay.offsetHeight ? deviceDisplay.offsetHeight : 1;
-
-    // vh*ew > eh*vw? then scale h instead of w
-    const scaleHeight = videoHeight * elementWidth > videoWidth * elementHeight;
-    let elementScaling = 0, videoScaling = 0;
-    if (scaleHeight) {
-      elementScaling = elementHeight;
-      videoScaling = videoHeight;
-    } else {
-      elementScaling = elementWidth;
-      videoScaling = videoWidth;
-    }
-
-    // The screen uses the 'object-fit: cover' property in order to completely
-    // fill the element while maintaining the screen content's aspect ratio.
-    // Therefore:
-    // - If vh*ew > eh*vw, w is scaled so that content width == element width
-    // - Otherwise,        h is scaled so that content height == element height
-    const scaleWidth = videoHeight * elementWidth > videoWidth * elementHeight;
-
-    // Convert to coordinates relative to the video by scaling.
-    // (This matches the scaling used by 'object-fit: cover'.)
-    //
-    // This scaling is needed to translate from the in-browser x/y to the
-    // on-device x/y.
-    //   - When the device screen has not been resized, this is simple: scale
-    //     the coordinates based on the ratio between the input video size and
-    //     the in-browser size.
-    //   - When the device screen has been resized, this scaling is still needed
-    //     even though the in-browser size and device size are identical. This
-    //     is due to the way WindowManager handles a resized screen, resized via
-    //     `adb shell wm size`:
-    //       - The ABS_X and ABS_Y max values of the screen retain their
-    //         original values equal to the value set when launching the device
-    //         (which equals the video size here).
-    //       - The sent ABS_X and ABS_Y values need to be scaled based on the
-    //         ratio between the max size (video size) and in-browser size.
-    const scaling =
-        scaleWidth ? videoWidth / elementWidth : videoHeight / elementHeight;
-
-    let xArr = [];
-    let yArr = [];
-    let idArr = [];
-    let slotArr = [];
-
-    if (eventType == 'mouse' || eventType == 'point') {
-      xArr.push(e.offsetX);
-      yArr.push(e.offsetY);
-
-      let thisId = -1;
-      if (eventType == 'point') {
-        thisId = e.pointerId;
-      }
-
-      slotArr.push(0);
-      idArr.push(thisId);
-    } else if (eventType == 'touch') {
-      // touchstart: list of touch points that became active
-      // touchmove: list of touch points that changed
-      // touchend: list of touch points that were removed
-      let changes = e.changedTouches;
-      let rect = e.target.getBoundingClientRect();
-      for (let i = 0; i < changes.length; i++) {
-        xArr.push(changes[i].pageX - rect.left);
-        yArr.push(changes[i].pageY - rect.top);
-        if (ctx.touchIdSlotMap.has(changes[i].identifier)) {
-          let slot = ctx.touchIdSlotMap.get(changes[i].identifier);
-
-          slotArr.push(slot);
-          if (e.type == 'touchstart') {
-            // error
-            console.error('touchstart when already have slot');
-            return;
-          } else if (e.type == 'touchmove') {
-            idArr.push(changes[i].identifier);
-          } else if (e.type == 'touchend') {
-            ctx.touchSlots[slot] = false;
-            ctx.touchIdSlotMap.delete(changes[i].identifier);
-            idArr.push(-1);
-          }
-        } else {
-          if (e.type == 'touchstart') {
-            let slot = -1;
-            for (let j = 0; j < ctx.touchSlots.length; j++) {
-              if (!ctx.touchSlots[j]) {
-                slot = j;
-                break;
-              }
-            }
-            if (slot == -1) {
-              slot = ctx.touchSlots.length;
-              ctx.touchSlots.push(true);
-            }
-            slotArr.push(slot);
-            ctx.touchSlots[slot] = true;
-            ctx.touchIdSlotMap.set(changes[i].identifier, slot);
-            idArr.push(changes[i].identifier);
-          } else if (e.type == 'touchmove') {
-            // error
-            console.error('touchmove when no slot');
-            return;
-          } else if (e.type == 'touchend') {
-            // error
-            console.error('touchend when no slot');
-            return;
-          }
-        }
-      }
-    }
-
-    for (let i = 0; i < xArr.length; i++) {
-      xArr[i] = xArr[i] * scaling;
-      yArr[i] = yArr[i] * scaling;
-
-      // Substract the offset produced by the difference in aspect ratio, if
-      // any.
-      if (scaleWidth) {
-        // Width was scaled, leaving excess content height, so subtract from y.
-        yArr[i] -= (elementHeight * scaling - videoHeight) / 2;
-      } else {
-        // Height was scaled, leaving excess content width, so subtract from x.
-        xArr[i] -= (elementWidth * scaling - videoWidth) / 2;
-      }
-
-      xArr[i] = Math.trunc(xArr[i]);
-      yArr[i] = Math.trunc(yArr[i]);
-    }
-
-    // NOTE: Rotation is handled automatically because the CSS rotation through
-    // transforms also rotates the coordinates of events on the object.
-
-    const display_label = deviceDisplay.id;
-
-    this.#deviceConnection.sendMultiTouch(
-        {idArr, xArr, yArr, down: ctx.down, slotArr, display_label});
-  }
-
-  #updateDisplayVisibility(displayId, powerMode) {
-    const display = document.getElementById('display_' + displayId).parentElement;
-    if (display == null) {
-      console.error('Unknown display id: ' + displayId);
-      return;
-    }
-    powerMode = powerMode.toLowerCase();
-    switch (powerMode) {
-      case 'on':
-        display.style.visibility = 'visible';
-        break;
-      case 'off':
-        display.style.visibility = 'hidden';
-        break;
-      default:
-        console.error('Display ' + displayId + ' has unknown display power mode: ' + powerMode);
-    }
-  }
-
-  #onMicCaptureToggle(enabled) {
-    return this.#deviceConnection.useMic(enabled);
-  }
-
-  #onCameraCaptureToggle(enabled) {
-    return this.#deviceConnection.useCamera(enabled);
-  }
-
-  #getZeroPaddedString(value, desiredLength) {
-    const s = String(value);
-    return '0'.repeat(desiredLength - s.length) + s;
-  }
-
-  #getTimestampString() {
-    const now = new Date();
-    return [
-      now.getFullYear(),
-      this.#getZeroPaddedString(now.getMonth(), 2),
-      this.#getZeroPaddedString(now.getDay(), 2),
-      this.#getZeroPaddedString(now.getHours(), 2),
-      this.#getZeroPaddedString(now.getMinutes(), 2),
-      this.#getZeroPaddedString(now.getSeconds(), 2),
-    ].join('_');
-  }
-
-  #onVideoCaptureToggle(enabled) {
-    const recordToggle = document.getElementById('record-video-control');
-    if (enabled) {
-      let recorders = [];
-
-      const timestamp = this.#getTimestampString();
-
-      let deviceDisplayVideoList =
-        document.getElementsByClassName('device-display-video');
-      for (let i = 0; i < deviceDisplayVideoList.length; i++) {
-        const deviceDisplayVideo = deviceDisplayVideoList[i];
-
-        const recorder = new MediaRecorder(deviceDisplayVideo.captureStream());
-        const recordedData = [];
-
-        recorder.ondataavailable = event => recordedData.push(event.data);
-        recorder.onstop = event => {
-          const recording = new Blob(recordedData, { type: "video/webm" });
-
-          const downloadLink = document.createElement('a');
-          downloadLink.setAttribute('download', timestamp + '_display_' + i + '.webm');
-          downloadLink.setAttribute('href', URL.createObjectURL(recording));
-          downloadLink.click();
-        };
-
-        recorder.start();
-        recorders.push(recorder);
-      }
-      this.#recording['recorders'] = recorders;
-
-      recordToggle.style.backgroundColor = 'red';
-    } else {
-      for (const recorder of this.#recording['recorders']) {
-        recorder.stop();
-      }
-      recordToggle.style.backgroundColor = '';
-    }
-    return Promise.resolve(enabled);
-  }
-
-  #onAudioPlaybackToggle(enabled) {
-    const audioElem = document.getElementById('device-audio');
-    if (enabled) {
-      audioElem.play();
-    } else {
-      audioElem.pause();
-    }
-  }
-
-  #onCustomShellButton(shell_command, e) {
-    // Attempt to init adb again, in case the initial connection failed.
-    // This succeeds immediately if already connected.
-    this.#initializeAdb();
-    if (e.type == 'mousedown') {
-      adbShell(shell_command);
-    }
-  }
-}  // DeviceControlApp
-
-window.addEventListener("load", async evt => {
-  try {
-    setupMessages();
-    let connectorModule = await import('./server_connector.js');
-    let deviceConnection = await ConnectDevice(
-        connectorModule.deviceId(), await connectorModule.createConnector());
-    let deviceControlApp = new DeviceControlApp(deviceConnection);
-    deviceControlApp.start();
-    document.getElementById('device-connection').style.display = 'block';
-  } catch(err) {
-    console.error('Unable to connect: ', err);
-    showError(
-      'No connection to the guest device. ' +
-      'Please ensure the WebRTC process on the host machine is active.');
-  }
-  document.getElementById('loader').style.display = 'none';
-});
diff --git a/host/frontend/webrtc/client/js/cf_webrtc.js b/host/frontend/webrtc/client/js/cf_webrtc.js
deleted file mode 100644
index 5c91383..0000000
--- a/host/frontend/webrtc/client/js/cf_webrtc.js
+++ /dev/null
@@ -1,496 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-function createDataChannel(pc, label, onMessage) {
-  console.debug('creating data channel: ' + label);
-  let dataChannel = pc.createDataChannel(label);
-  // Return an object with a send function like that of the dataChannel, but
-  // that only actually sends over the data channel once it has connected.
-  return {
-    channelPromise: new Promise((resolve, reject) => {
-      dataChannel.onopen = (event) => {
-        resolve(dataChannel);
-      };
-      dataChannel.onclose = () => {
-        console.debug(
-            'Data channel=' + label + ' state=' + dataChannel.readyState);
-      };
-      dataChannel.onmessage = onMessage ? onMessage : (msg) => {
-        console.debug('Data channel=' + label + ' data="' + msg.data + '"');
-      };
-      dataChannel.onerror = err => {
-        reject(err);
-      };
-    }),
-    send: function(msg) {
-      this.channelPromise = this.channelPromise.then(channel => {
-        channel.send(msg);
-        return channel;
-      })
-    },
-  };
-}
-
-function awaitDataChannel(pc, label, onMessage) {
-  console.debug('expecting data channel: ' + label);
-  // Return an object with a send function like that of the dataChannel, but
-  // that only actually sends over the data channel once it has connected.
-  return {
-    channelPromise: new Promise((resolve, reject) => {
-      let prev_ondatachannel = pc.ondatachannel;
-      pc.ondatachannel = ev => {
-        let dataChannel = ev.channel;
-        if (dataChannel.label == label) {
-          dataChannel.onopen = (event) => {
-            resolve(dataChannel);
-          };
-          dataChannel.onclose = () => {
-            console.debug(
-                'Data channel=' + label + ' state=' + dataChannel.readyState);
-          };
-          dataChannel.onmessage = onMessage ? onMessage : (msg) => {
-            console.debug('Data channel=' + label + ' data="' + msg.data + '"');
-          };
-          dataChannel.onerror = err => {
-            reject(err);
-          };
-        } else if (prev_ondatachannel) {
-          prev_ondatachannel(ev);
-        }
-      };
-    }),
-    send: function(msg) {
-      this.channelPromise = this.channelPromise.then(channel => {
-        channel.send(msg);
-        return channel;
-      })
-    },
-  };
-}
-
-class DeviceConnection {
-  #pc;
-  #control;
-  #description;
-
-  #cameraDataChannel;
-  #cameraInputQueue;
-  #controlChannel;
-  #inputChannel;
-  #adbChannel;
-  #bluetoothChannel;
-
-  #streams;
-  #streamPromiseResolvers;
-  #micSenders = [];
-  #cameraSenders = [];
-  #camera_res_x;
-  #camera_res_y;
-
-  #onAdbMessage;
-  #onControlMessage;
-  #onBluetoothMessage;
-
-  constructor(pc, control) {
-    this.#pc = pc;
-    this.#control = control;
-    this.#cameraDataChannel = pc.createDataChannel('camera-data-channel');
-    this.#cameraDataChannel.binaryType = 'arraybuffer';
-    this.#cameraInputQueue = new Array();
-    var self = this;
-    this.#cameraDataChannel.onbufferedamountlow = () => {
-      if (self.#cameraInputQueue.length > 0) {
-        self.sendCameraData(self.#cameraInputQueue.shift());
-      }
-    };
-    this.#inputChannel = createDataChannel(pc, 'input-channel');
-    this.#adbChannel = createDataChannel(pc, 'adb-channel', (msg) => {
-      if (this.#onAdbMessage) {
-        this.#onAdbMessage(msg.data);
-      } else {
-        console.error('Received unexpected ADB message');
-      }
-    });
-    this.#controlChannel = awaitDataChannel(pc, 'device-control', (msg) => {
-      if (this.#onControlMessage) {
-        this.#onControlMessage(msg);
-      } else {
-        console.error('Received unexpected Control message');
-      }
-    });
-    this.#bluetoothChannel =
-        createDataChannel(pc, 'bluetooth-channel', (msg) => {
-          if (this.#onBluetoothMessage) {
-            this.#onBluetoothMessage(msg.data);
-          } else {
-            console.error('Received unexpected Bluetooth message');
-          }
-        });
-    this.#streams = {};
-    this.#streamPromiseResolvers = {};
-
-    pc.addEventListener('track', e => {
-      console.debug('Got remote stream: ', e);
-      for (const stream of e.streams) {
-        this.#streams[stream.id] = stream;
-        if (this.#streamPromiseResolvers[stream.id]) {
-          for (let resolver of this.#streamPromiseResolvers[stream.id]) {
-            resolver();
-          }
-          delete this.#streamPromiseResolvers[stream.id];
-        }
-      }
-    });
-  }
-
-  set description(desc) {
-    this.#description = desc;
-  }
-
-  get description() {
-    return this.#description;
-  }
-
-  get imageCapture() {
-    if (this.#cameraSenders && this.#cameraSenders.length > 0) {
-      let track = this.#cameraSenders[0].track;
-      return new ImageCapture(track);
-    }
-    return undefined;
-  }
-
-  get cameraWidth() {
-    return this.#camera_res_x;
-  }
-
-  get cameraHeight() {
-    return this.#camera_res_y;
-  }
-
-  get cameraEnabled() {
-    return this.#cameraSenders && this.#cameraSenders.length > 0;
-  }
-
-  getStream(stream_id) {
-    return new Promise((resolve, reject) => {
-      if (this.#streams[stream_id]) {
-        resolve(this.#streams[stream_id]);
-      } else {
-        if (!this.#streamPromiseResolvers[stream_id]) {
-          this.#streamPromiseResolvers[stream_id] = [];
-        }
-        this.#streamPromiseResolvers[stream_id].push(resolve);
-      }
-    });
-  }
-
-  #sendJsonInput(evt) {
-    this.#inputChannel.send(JSON.stringify(evt));
-  }
-
-  sendMousePosition({x, y, down, display_label}) {
-    this.#sendJsonInput({
-      type: 'mouse',
-      down: down ? 1 : 0,
-      x,
-      y,
-      display_label,
-    });
-  }
-
-  // TODO (b/124121375): This should probably be an array of pointer events and
-  // have different properties.
-  sendMultiTouch({idArr, xArr, yArr, down, slotArr, display_label}) {
-    this.#sendJsonInput({
-      type: 'multi-touch',
-      id: idArr,
-      x: xArr,
-      y: yArr,
-      down: down ? 1 : 0,
-      slot: slotArr,
-      display_label: display_label,
-    });
-  }
-
-  sendKeyEvent(code, type) {
-    this.#sendJsonInput({type: 'keyboard', keycode: code, event_type: type});
-  }
-
-  disconnect() {
-    this.#pc.close();
-  }
-
-  // Sends binary data directly to the in-device adb daemon (skipping the host)
-  sendAdbMessage(msg) {
-    this.#adbChannel.send(msg);
-  }
-
-  // Provide a callback to receive data from the in-device adb daemon
-  onAdbMessage(cb) {
-    this.#onAdbMessage = cb;
-  }
-
-  // Send control commands to the device
-  sendControlMessage(msg) {
-    this.#controlChannel.send(msg);
-  }
-
-  async #useDevice(in_use, senders_arr, device_opt) {
-    // An empty array means no tracks are currently in use
-    if (senders_arr.length > 0 === !!in_use) {
-      console.warn('Device is already ' + (in_use ? '' : 'not ') + 'in use');
-      return in_use;
-    }
-    let renegotiation_needed = false;
-    if (in_use) {
-      try {
-        let stream = await navigator.mediaDevices.getUserMedia(device_opt);
-        stream.getTracks().forEach(track => {
-          console.info(`Using ${track.kind} device: ${track.label}`);
-          senders_arr.push(this.#pc.addTrack(track));
-          renegotiation_needed = true;
-        });
-      } catch (e) {
-        console.error('Failed to add stream to peer connection: ', e);
-        // Don't return yet, if there were errors some tracks may have been
-        // added so the connection should be renegotiated again.
-      }
-    } else {
-      for (const sender of senders_arr) {
-        console.info(
-            `Removing ${sender.track.kind} device: ${sender.track.label}`);
-        let track = sender.track;
-        track.stop();
-        this.#pc.removeTrack(sender);
-        renegotiation_needed = true;
-      }
-      // Empty the array passed by reference, just assigning [] won't do that.
-      senders_arr.length = 0;
-    }
-    if (renegotiation_needed) {
-      this.#control.renegotiateConnection();
-    }
-    // Return the new state
-    return senders_arr.length > 0;
-  }
-
-  async useMic(in_use) {
-    return this.#useDevice(in_use, this.#micSenders, {audio: true, video: false});
-  }
-
-  async useCamera(in_use) {
-    return this.#useDevice(in_use, this.#micSenders, {audio: false, video: true});
-  }
-
-  sendCameraResolution(stream) {
-    const cameraTracks = stream.getVideoTracks();
-    if (cameraTracks.length > 0) {
-      const settings = cameraTracks[0].getSettings();
-      this.#camera_res_x = settings.width;
-      this.#camera_res_y = settings.height;
-      this.sendControlMessage(JSON.stringify({
-        command: 'camera_settings',
-        width: settings.width,
-        height: settings.height,
-        frame_rate: settings.frameRate,
-        facing: settings.facingMode
-      }));
-    }
-  }
-
-  sendOrQueueCameraData(data) {
-    if (this.#cameraDataChannel.bufferedAmount > 0 ||
-        this.#cameraInputQueue.length > 0) {
-      this.#cameraInputQueue.push(data);
-    } else {
-      this.sendCameraData(data);
-    }
-  }
-
-  sendCameraData(data) {
-    const MAX_SIZE = 65535;
-    const END_MARKER = 'EOF';
-    for (let i = 0; i < data.byteLength; i += MAX_SIZE) {
-      // range is clamped to the valid index range
-      this.#cameraDataChannel.send(data.slice(i, i + MAX_SIZE));
-    }
-    this.#cameraDataChannel.send(END_MARKER);
-  }
-
-  // Provide a callback to receive control-related comms from the device
-  onControlMessage(cb) {
-    this.#onControlMessage = cb;
-  }
-
-  sendBluetoothMessage(msg) {
-    this.#bluetoothChannel.send(msg);
-  }
-
-  onBluetoothMessage(cb) {
-    this.#onBluetoothMessage = cb;
-  }
-
-  // Provide a callback to receive connectionstatechange states.
-  onConnectionStateChange(cb) {
-    this.#pc.addEventListener(
-        'connectionstatechange', evt => cb(this.#pc.connectionState));
-  }
-}
-
-class Controller {
-  #pc;
-  #serverConnector;
-
-  constructor(serverConnector) {
-    this.#serverConnector = serverConnector;
-    serverConnector.onDeviceMsg(msg => this.#onDeviceMessage(msg));
-  }
-
-  #onDeviceMessage(message) {
-    let type = message.type;
-    switch (type) {
-      case 'offer':
-        this.#onOffer({type: 'offer', sdp: message.sdp});
-        break;
-      case 'answer':
-        this.#onAnswer({type: 'answer', sdp: message.sdp});
-        break;
-      case 'ice-candidate':
-          this.#onIceCandidate(new RTCIceCandidate({
-            sdpMid: message.mid,
-            sdpMLineIndex: message.mLineIndex,
-            candidate: message.candidate
-          }));
-        break;
-      case 'error':
-        console.error('Device responded with error message: ', message.error);
-        break;
-      default:
-        console.error('Unrecognized message type from device: ', type);
-    }
-  }
-
-  async #sendClientDescription(desc) {
-    console.debug('sendClientDescription');
-    return this.#serverConnector.sendToDevice({type: 'answer', sdp: desc.sdp});
-  }
-
-  async #sendIceCandidate(candidate) {
-    console.debug('sendIceCandidate');
-    return this.#serverConnector.sendToDevice({type: 'ice-candidate', candidate});
-  }
-
-  async #onOffer(desc) {
-    console.debug('Remote description (offer): ', desc);
-    try {
-      await this.#pc.setRemoteDescription(desc);
-      let answer = await this.#pc.createAnswer();
-      console.debug('Answer: ', answer);
-      await this.#pc.setLocalDescription(answer);
-      await this.#sendClientDescription(answer);
-    } catch (e) {
-      console.error('Error processing remote description (offer)', e)
-      throw e;
-    }
-  }
-
-  async #onAnswer(answer) {
-    console.debug('Remote description (answer): ', answer);
-    try {
-      await this.#pc.setRemoteDescription(answer);
-    } catch (e) {
-      console.error('Error processing remote description (answer)', e)
-      throw e;
-    }
-  }
-
-  #onIceCandidate(iceCandidate) {
-    console.debug(`Remote ICE Candidate: `, iceCandidate);
-    this.#pc.addIceCandidate(iceCandidate);
-  }
-
-  ConnectDevice(pc) {
-    this.#pc = pc;
-    console.debug('ConnectDevice');
-    // ICE candidates will be generated when we add the offer. Adding it here
-    // instead of in _onOffer because this function is called once per peer
-    // connection, while _onOffer may be called more than once due to
-    // renegotiations.
-    this.#pc.addEventListener('icecandidate', evt => {
-      if (evt.candidate) this.#sendIceCandidate(evt.candidate);
-    });
-    this.#serverConnector.sendToDevice({type: 'request-offer'});
-  }
-
-  async renegotiateConnection() {
-    console.debug('Re-negotiating connection');
-    let offer = await this.#pc.createOffer();
-    console.debug('Local description (offer): ', offer);
-    await this.#pc.setLocalDescription(offer);
-    this.#serverConnector.sendToDevice({type: 'offer', sdp: offer.sdp});
-  }
-}
-
-function createPeerConnection(infra_config) {
-  let pc_config = {iceServers: []};
-  for (const stun of infra_config.ice_servers) {
-    pc_config.iceServers.push({urls: 'stun:' + stun});
-  }
-  let pc = new RTCPeerConnection(pc_config);
-
-  pc.addEventListener('icecandidate', evt => {
-    console.debug('Local ICE Candidate: ', evt.candidate);
-  });
-  pc.addEventListener('iceconnectionstatechange', evt => {
-    console.debug(`ICE State Change: ${pc.iceConnectionState}`);
-  });
-  pc.addEventListener(
-      'connectionstatechange',
-      evt => console.debug(
-          `WebRTC Connection State Change: ${pc.connectionState}`));
-  return pc;
-}
-
-export async function Connect(deviceId, serverConnector) {
-  let requestRet = await serverConnector.requestDevice(deviceId);
-  let deviceInfo = requestRet.deviceInfo;
-  let infraConfig = requestRet.infraConfig;
-  console.debug('Device available:');
-  console.debug(deviceInfo);
-  let pc_config = {iceServers: []};
-  if (infraConfig.ice_servers && infraConfig.ice_servers.length > 0) {
-    for (const server of infraConfig.ice_servers) {
-      pc_config.iceServers.push(server);
-    }
-  }
-  let pc = createPeerConnection(infraConfig);
-
-  let control = new Controller(serverConnector);
-  let deviceConnection = new DeviceConnection(pc, control);
-  deviceConnection.description = deviceInfo;
-
-  return new Promise((resolve, reject) => {
-    pc.addEventListener('connectionstatechange', evt => {
-      let state = pc.connectionState;
-      if (state == 'connected') {
-        resolve(deviceConnection);
-      } else if (state == 'failed') {
-        reject(evt);
-      }
-    });
-    control.ConnectDevice(pc);
-  });
-}
diff --git a/host/frontend/webrtc/client/js/controls.js b/host/frontend/webrtc/client/js/controls.js
deleted file mode 100644
index c9aaac4..0000000
--- a/host/frontend/webrtc/client/js/controls.js
+++ /dev/null
@@ -1,342 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Creates a "toggle control", which is a stylized checkbox with an icon. The
-// onToggleCb callback is called every time the control changes state with the
-// new toggle position (true for ON) and is expected to return a promise of the
-// new toggle position which can resolve to the opposite position of the one
-// received if there was error.
-function createToggleControl(elm, iconName, onToggleCb, initialState = false) {
-  let icon = document.createElement('span');
-  icon.classList.add('toggle-control-icon');
-  icon.classList.add('material-icons-outlined');
-  if (iconName) {
-    icon.appendChild(document.createTextNode(iconName));
-  }
-  elm.appendChild(icon);
-  let toggle = document.createElement('label');
-  toggle.classList.add('toggle-control-switch');
-  let input = document.createElement('input');
-  input.type = 'checkbox';
-  input.checked = !!initialState;
-  input.onchange = e => {
-    let nextPr = onToggleCb(e.target.checked);
-    if (nextPr && 'then' in nextPr) {
-      nextPr.then(checked => {
-        e.target.checked = !!checked;
-      });
-    }
-  };
-  toggle.appendChild(input);
-  let slider = document.createElement('span');
-  slider.classList.add('toggle-control-slider');
-  toggle.appendChild(slider);
-  elm.classList.add('toggle-control');
-  elm.appendChild(toggle);
-  return {
-    // Sets the state of the toggle control. This only affects the
-    // visible state of the control in the UI, it doesn't affect the
-    // state of the underlying resources. It's most useful to make
-    // changes of said resources visible to the user.
-    Set: checked => input.checked = !!checked,
-  };
-}
-
-function createButtonListener(button_id_class, func,
-  deviceConnection, listener) {
-  let buttons = [];
-  let ele = document.getElementById(button_id_class);
-  if (ele != null) {
-    buttons.push(ele);
-  } else {
-    buttons = document.getElementsByClassName(button_id_class);
-  }
-  for (var button of buttons) {
-    if (func != null) {
-      button.onclick = func;
-    }
-    button.addEventListener('mousedown', listener);
-  }
-}
-
-function createInputListener(input_id, func, listener) {
-  input = document.getElementById(input_id);
-  if (func != null) {
-    input.oninput = func;
-  }
-  input.addEventListener('input', listener);
-}
-
-function validateMacAddress(val) {
-  var regex = /^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$/;
-  return (regex.test(val));
-}
-
-function validateMacWrapper() {
-  let type = document.getElementById('bluetooth-wizard-type').value;
-  let button = document.getElementById("bluetooth-wizard-device");
-  let macField = document.getElementById('bluetooth-wizard-mac');
-  if (this.id == 'bluetooth-wizard-type') {
-    if (type == "remote_loopback") {
-      button.disabled = false;
-      macField.setCustomValidity('');
-      macField.disabled = true;
-      macField.required = false;
-      macField.placeholder = 'N/A';
-      macField.value = '';
-      return;
-    }
-  }
-  macField.disabled = false;
-  macField.required = true;
-  macField.placeholder = 'Device MAC';
-  if (validateMacAddress($(macField).val())) {
-    button.disabled = false;
-    macField.setCustomValidity('');
-  } else {
-    button.disabled = true;
-    macField.setCustomValidity('MAC address invalid');
-  }
-}
-
-$('[validate-mac]').bind('input', validateMacWrapper);
-$('[validate-mac]').bind('select', validateMacWrapper);
-
-function parseDevice(device) {
-  let id, name, mac;
-  var regex = /([0-9]+):([^@ ]*)(@(([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})))?/;
-  if (regex.test(device)) {
-    let regexMatches = device.match(regex);
-    id = regexMatches[1];
-    name = regexMatches[2];
-    mac = regexMatches[4];
-  }
-  if (mac === undefined) {
-    mac = "";
-  }
-  return [id, name, mac];
-}
-
-function btUpdateAdded(devices) {
-  let deviceArr = devices.split('\r\n');
-  let [id, name, mac] = parseDevice(deviceArr[0]);
-  if (name) {
-    let div = document.getElementById('bluetooth-wizard-confirm').getElementsByClassName('bluetooth-text')[1];
-    div.innerHTML = "";
-    div.innerHTML += "<p>Name: <b>" + id + "</b></p>";
-    div.innerHTML += "<p>Type: <b>" + name + "</b></p>";
-    div.innerHTML += "<p>MAC Addr: <b>" + mac + "</b></p>";
-    return true;
-  }
-  return false;
-}
-
-function parsePhy(phy) {
-  let id = phy.substring(0, phy.indexOf(":"));
-  phy = phy.substring(phy.indexOf(":") + 1);
-  let name = phy.substring(0, phy.indexOf(":"));
-  let devices = phy.substring(phy.indexOf(":") + 1);
-  return [id, name, devices];
-}
-
-function btParsePhys(phys) {
-  if (phys.indexOf("Phys:") < 0) {
-    return null;
-  }
-  let phyDict = {};
-  phys = phys.split('Phys:')[1];
-  let phyArr = phys.split('\r\n');
-  for (var phy of phyArr.slice(1)) {
-    phy = phy.trim();
-    if (phy.length == 0 || phy.indexOf("deleted") >= 0) {
-      continue;
-    }
-    let [id, name, devices] = parsePhy(phy);
-    phyDict[name] = id;
-  }
-  return phyDict;
-}
-
-function btUpdateDeviceList(devices) {
-  let deviceArr = devices.split('\r\n');
-  if (deviceArr[0].indexOf("Devices:") >= 0) {
-    let div = document.getElementById('bluetooth-list').getElementsByClassName('bluetooth-text')[0];
-    div.innerHTML = "";
-    let count = 0;
-    for (var device of deviceArr.slice(1)) {
-      if (device.indexOf("Phys:") >= 0) {
-        break;
-      }
-      count++;
-      if (device.indexOf("deleted") >= 0) {
-        continue;
-      }
-      let [id, name, mac] = parseDevice(device);
-      let innerDiv = '<div><button title="Delete" data-device-id="'
-      innerDiv += id;
-      innerDiv += '" class="bluetooth-list-trash material-icons">delete</button>';
-      innerDiv += name;
-      if (mac) {
-        innerDiv += " | "
-        innerDiv += mac;
-      }
-      innerDiv += '</div>';
-      div.innerHTML += innerDiv;
-    }
-    return count;
-  }
-  return -1;
-}
-
-function createControlPanelButton(
-    command, title, icon_name, listener,
-    parent_id = 'control-panel-default-buttons') {
-  let button = document.createElement('button');
-  document.getElementById(parent_id).appendChild(button);
-  button.title = title;
-  button.dataset.command = command;
-  button.disabled = true;
-  // Capture mousedown/up/out commands instead of click to enable
-  // hold detection. mouseout is used to catch if the user moves the
-  // mouse outside the button while holding down.
-  button.addEventListener('mousedown', listener);
-  button.addEventListener('mouseup', listener);
-  button.addEventListener('mouseout', listener);
-  // Set the button image using Material Design icons.
-  // See http://google.github.io/material-design-icons
-  // and https://material.io/resources/icons
-  button.classList.add('material-icons');
-  button.innerHTML = icon_name;
-  return button;
-}
-
-function positionModal(button_id, modal_id) {
-  const modalButton = document.getElementById(button_id);
-  const modalDiv = document.getElementById(modal_id);
-
-  // Position the modal to the right of the show modal button.
-  modalDiv.style.top = modalButton.offsetTop;
-  modalDiv.style.left = modalButton.offsetWidth + 30;
-}
-
-function createModalButton(button_id, modal_id, close_id, hide_id) {
-  const modalButton = document.getElementById(button_id);
-  const modalDiv = document.getElementById(modal_id);
-  const modalHeader = modalDiv.querySelector('.modal-header');
-  const modalClose = document.getElementById(close_id);
-  const modalDivHide = document.getElementById(hide_id);
-
-  positionModal(button_id, modal_id);
-
-  function showHideModal(show) {
-    if (show) {
-      modalButton.classList.add('modal-button-opened')
-      modalDiv.style.display = 'block';
-    } else {
-      modalButton.classList.remove('modal-button-opened')
-      modalDiv.style.display = 'none';
-    }
-    if (modalDivHide != null) {
-      modalDivHide.style.display = 'none';
-    }
-  }
-  // Allow the show modal button to toggle the modal,
-  modalButton.addEventListener(
-      'click', evt => showHideModal(modalDiv.style.display != 'block'));
-  // but the close button always closes.
-  modalClose.addEventListener('click', evt => showHideModal(false));
-
-  // Allow the modal to be dragged by the header.
-  let modalOffsets = {
-    midDrag: false,
-    mouseDownOffsetX: null,
-    mouseDownOffsetY: null,
-  };
-  modalHeader.addEventListener('mousedown', evt => {
-    modalOffsets.midDrag = true;
-    // Store the offset of the mouse location from the
-    // modal's current location.
-    modalOffsets.mouseDownOffsetX = parseInt(modalDiv.style.left) - evt.clientX;
-    modalOffsets.mouseDownOffsetY = parseInt(modalDiv.style.top) - evt.clientY;
-  });
-  modalHeader.addEventListener('mousemove', evt => {
-    let offsets = modalOffsets;
-    if (offsets.midDrag) {
-      // Move the modal to the mouse location plus the
-      // offset calculated on the initial mouse-down.
-      modalDiv.style.left = evt.clientX + offsets.mouseDownOffsetX;
-      modalDiv.style.top = evt.clientY + offsets.mouseDownOffsetY;
-    }
-  });
-  document.addEventListener('mouseup', evt => {
-    modalOffsets.midDrag = false;
-  });
-}
-
-function cmdConsole(consoleViewName, consoleInputName) {
-  let consoleView = document.getElementById(consoleViewName);
-
-  let addString =
-      function(str) {
-    consoleView.value += str;
-    consoleView.scrollTop = consoleView.scrollHeight;
-  }
-
-  let addLine =
-      function(line) {
-    addString(line + '\r\n');
-  }
-
-  let commandCallbacks = [];
-
-  let addCommandListener =
-      function(f) {
-    commandCallbacks.push(f);
-  }
-
-  let onCommand =
-      function(cmd) {
-    cmd = cmd.trim();
-
-    if (cmd.length == 0) return;
-
-    commandCallbacks.forEach(f => {
-      f(cmd);
-    })
-  }
-
-  addCommandListener(cmd => addLine('>> ' + cmd));
-
-  let consoleInput = document.getElementById(consoleInputName);
-
-  consoleInput.addEventListener('keydown', e => {
-    if ((e.key && e.key == 'Enter') || e.keyCode == 13) {
-      let command = e.target.value;
-
-      e.target.value = '';
-
-      onCommand(command);
-    }
-  });
-
-  return {
-    consoleView: consoleView,
-    consoleInput: consoleInput,
-    addLine: addLine,
-    addString: addString,
-    addCommandListener: addCommandListener,
-  };
-}
diff --git a/host/frontend/webrtc/client/style.css b/host/frontend/webrtc/client/style.css
deleted file mode 100644
index 93aaa05..0000000
--- a/host/frontend/webrtc/client/style.css
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-body {
-  background-color:black;
-  margin: 0;
-  touch-action: none;
-  overscroll-behavior: none;
-}
-
-#device-connection {
-  display: none;
-  max-height: 100vh;
-}
-
-@keyframes spin {
-  0% { transform: rotate(0deg); }
-  100% { transform: rotate(360deg); }
-}
-
-#loader {
-  border-left: 12px solid #4285F4;
-  border-top: 12px solid #34A853;
-  border-right: 12px solid #FBBC05;
-  border-bottom: 12px solid #EA4335;
-  border-radius: 50%;
-  width: 70px;
-  height: 70px;
-  animation: spin 1.2s linear infinite;
-  margin: 100px;
-}
-
-/* Top header row. */
-
-#header {
-  height: 64px;
-  /* Items inside this use a row Flexbox.*/
-  display: flex;
-  align-items: center;
-}
-
-#camera-control {
-  display: none !important;
-}
-#record-video-control {
-  display: none !important;
-}
-
-#app-controls {
-  margin-left: 10px;
-}
-#app-controls > div {
-  display: inline-block;
-  position: relative;
-  margin-right: 6px;
-}
-#device-audio {
-  height: 44px;
-}
-
-#error-message-div {
-  flex-grow: 1;
-}
-#error-message {
-  color: white;
-  font-family: 'Open Sans', sans-serif;
-  padding: 10px;
-  margin: 10px;
-  border-radius: 10px;
-}
-#error-message .close-btn {
-  float: right;
-  cursor: pointer;
-}
-#error-message.hidden {
-  display: none;
-}
-#error-message.warning {
-  /* dark red */
-  background-color: #927836;
-}
-#error-message.error {
-  /* dark red */
-  background-color: #900000;
-}
-#status-div {
-  flex-grow: 1;
-}
-#status-message {
-  color: white;
-  font-family: 'Open Sans', sans-serif;
-  padding: 10px;
-  margin: 10px;
-}
-#status-message.connecting {
-  /* dark yellow */
-  background-color: #927836;
-}
-#status-message.error {
-  /* dark red */
-  background-color: #900000;
-}
-#status-message.connected {
-  /* dark green */
-  background-color: #007000;
-}
-
-/* Control panel buttons and device screen(s). */
-
-#controls-and-displays {
-  height: calc(100% - 84px);
-
-  /* Items inside this use a row Flexbox.*/
-  display: flex;
-}
-
-#controls-and-displays > div {
-  margin-left: 5px;
-  margin-right: 5px;
-}
-
-.modal {
-  /* Start out hidden, and use absolute positioning. */
-  display: none;
-  position: absolute;
-
-  border-radius: 10px;
-  padding: 20px;
-  padding-top: 1px;
-
-  background-color: #5f6368ea; /* Semi-transparent Google grey 500 */
-  color: white;
-  font-family: 'Open Sans', sans-serif;
-}
-.modal-header {
-  cursor: move;
-  /* Items inside this use a row Flexbox.*/
-  display: flex;
-  justify-content: space-between;
-}
-.modal-close {
-  color: white;
-  border: none;
-  outline: none;
-  background-color: transparent;
-}
-.modal-button, .modal-button-highlight {
-  background:    #e8eaed; /* Google grey 200 */
-  border-radius: 10px;
-  box-shadow:    1px 1px #444444;
-  padding:       10px 20px;
-  color:         #000000;
-  display:       inline-block;
-  font:          normal bold 14px/1 "Open Sans", sans-serif;
-  text-align:    center;
-}
-#bluetooth-wizard-mac:valid {
-  border: 2px solid black;
-}
-#bluetooth-wizard-mac:invalid {
-  border: 2px solid red;
-}
-#bluetooth-wizard-mac:invalid + span::before {
-  font-weight: bold;
-  content: 'X';
-  color: red;
-}
-#bluetooth-wizard-mac:valid + span::before {
-  font-weight: bold;
-  content: 'OK';
-  color: green;
-}
-.modal-button {
-  background:    #e8eaed; /* Google grey 200 */
-}
-.modal-button-highlight {
-  background:    #f4cccc;
-}
-#device-details-modal span {
-  white-space: pre;
-}
-#bluetooth-console-input {
-  width: 100%;
-}
-#bluetooth-console-cmd-label {
-  color: white;
-}
-.bluetooth-text, .bluetooth-text-bold, .bluetooth-text-field input {
-  font: normal 18px/1 "Open Sans", sans-serif;
-}
-.bluetooth-text, .bluetooth-text-bold {
-  color: white;
-}
-.bluetooth-text-bold {
-  font: bold;
-}
-.bluetooth-button {
-  text-align: center;
-}
-.bluetooth-drop-down select {
-  font: normal 18px/1 "Open Sans", sans-serif;
-  color: black;
-  width: 500px;
-  margin: 5px;
-  rows: 10;
-  columns: 60;
-}
-.bluetooth-text-field input {
-  color: black;
-  width: 500px;
-  margin: 5px;
-  rows: 10;
-  columns: 60;
-}
-.bluetooth-list-trash {
-  background:    #00000000;
-  border:        0px;
-  color:         #ffffff;
-}
-
-.control-panel-column {
-  width: 50px;
-  /* Items inside this use a column Flexbox.*/
-  display: flex;
-  flex-direction: column;
-}
-#control-panel-custom-buttons {
-  display: none;
-  /* Give the custom buttons column a blue background. */
-  background-color: #1c4587ff;
-  height: fit-content;
-  border-radius: 10px;
-}
-
-.control-panel-column button {
-  margin: 0px 0px 5px 0px;
-  height: 50px;
-  font-size: 32px;
-
-  color: #e8eaed; /* Google grey 200 */
-  border: none;
-  outline: none;
-  background-color: transparent;
-}
-.control-panel-column button:disabled {
-  color: #9aa0a6; /* Google grey 500 */
-}
-.control-panel-column button.modal-button-opened {
-  border-radius: 10px;
-  background-color: #5f6368; /* Google grey 700 */
-}
-
-#device-displays {
-  /* Take up the remaining width of the window.*/
-  flex-grow: 1;
-  /* Don't grow taller than the window.*/
-  max-height: 100vh;
-  /* Allows child elements to be positioned relative to this element. */
-  position: relative;
-}
-
-/*
- * Container <div> used to wrap each display's <video> element which is used for
- * maintaining each display's width and height while the display is potentially
- * rotating.
- */
-.device-display {
-  /* Prevents #device-displays from using this element when computing flex size. */
-  position: absolute;
-}
-
-/* Container <div> to show info about the individual display. */
-.device-display-info {
-  color: white;
-  /* dark green */
-  background-color: #007000;
-  font-family: 'Open Sans', sans-serif;
-  text-indent: 0px;
-  border-radius: 10px;
-  padding: 10px;
-  margin-bottom: 10px;
-}
-
-/* The actual <video> element for each display. */
-.device-display-video {
-  position: absolute;
-  left:  0px;
-  touch-action: none;
-  object-fit: cover;
-}
diff --git a/host/frontend/webrtc/connection_observer.cpp b/host/frontend/webrtc/connection_observer.cpp
index ec632b6..a33b89e 100644
--- a/host/frontend/webrtc/connection_observer.cpp
+++ b/host/frontend/webrtc/connection_observer.cpp
@@ -35,8 +35,10 @@
 #include "common/libs/fs/shared_buf.h"
 #include "host/frontend/webrtc/adb_handler.h"
 #include "host/frontend/webrtc/bluetooth_handler.h"
-#include "host/frontend/webrtc/lib/camera_controller.h"
-#include "host/frontend/webrtc/lib/utils.h"
+#include "host/frontend/webrtc/gpx_locations_handler.h"
+#include "host/frontend/webrtc/kml_locations_handler.h"
+#include "host/frontend/webrtc/libdevice/camera_controller.h"
+#include "host/frontend/webrtc/location_handler.h"
 #include "host/libs/config/cuttlefish_config.h"
 
 DECLARE_bool(write_virtio_input);
@@ -120,8 +122,7 @@
     }
   }
 
-  void OnConnected(std::function<void(const uint8_t *, size_t, bool)>
-                   /*ctrl_msg_sender*/) override {
+  void OnConnected() override {
     auto display_handler = weak_display_handler_.lock();
     if (display_handler) {
       std::thread th([this]() {
@@ -145,8 +146,9 @@
   void OnTouchEvent(const std::string &display_label, int x, int y,
                     bool down) override {
     if (confui_input_.IsConfUiActive()) {
-      ConfUiLog(DEBUG) << "delivering a touch event in confirmation UI mode";
-      confui_input_.TouchEvent(x, y, down);
+      if (down) {
+        confui_input_.TouchEvent(x, y, down);
+      }
       return;
     }
     auto buffer = GetEventBuffer();
@@ -180,10 +182,8 @@
 
       if (confui_input_.IsConfUiActive()) {
         if (down) {
-          ConfUiLog(DEBUG) << "Delivering event (" << x << ", " << y
-                           << ") to conf ui";
+          confui_input_.TouchEvent(this_x, this_y, down);
         }
-        confui_input_.TouchEvent(this_x, this_y, down);
         continue;
       }
 
@@ -219,7 +219,7 @@
 
   void OnKeyboardEvent(uint16_t code, bool down) override {
     if (confui_input_.IsConfUiActive()) {
-      ConfUiLog(DEBUG) << "keyboard event ignored in confirmation UI mode";
+      ConfUiLog(VERBOSE) << "keyboard event ignored in confirmation UI mode";
       return;
     }
 
@@ -235,7 +235,7 @@
                          buffer->size());
   }
 
-  void OnSwitchEvent(uint16_t code, bool state) override {
+  void OnSwitchEvent(uint16_t code, bool state) {
     auto buffer = GetEventBuffer();
     if (!buffer) {
       LOG(ERROR) << "Failed to allocate event buffer";
@@ -269,64 +269,38 @@
     kernel_log_subscription_id_ =
         kernel_log_events_handler_->AddSubscriber(control_message_sender);
   }
-  void OnControlMessage(const uint8_t* msg, size_t size) override {
-    Json::Value evt;
-    const char* msg_str = reinterpret_cast<const char*>(msg);
-    Json::CharReaderBuilder builder;
-    std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
-    std::string errorMessage;
-    if (!json_reader->parse(msg_str, msg_str + size, &evt, &errorMessage)) {
-      LOG(ERROR) << "Received invalid JSON object over control channel: " << errorMessage;
-      return;
-    }
 
-    auto result = webrtc_streaming::ValidationResult::ValidateJsonObject(
-        evt, "command",
-        /*required_fields=*/{{"command", Json::ValueType::stringValue}},
-        /*optional_fields=*/
-        {
-            {"button_state", Json::ValueType::stringValue},
-            {"lid_switch_open", Json::ValueType::booleanValue},
-            {"hinge_angle_value", Json::ValueType::intValue},
-        });
-    if (!result.ok()) {
-      LOG(ERROR) << result.error();
-      return;
-    }
-    auto command = evt["command"].asString();
-
-    if (command == "device_state") {
-      if (evt.isMember("lid_switch_open")) {
-        // InputManagerService treats a value of 0 as open and 1 as closed, so
-        // invert the lid_switch_open value that is sent to the input device.
-        OnSwitchEvent(SW_LID, !evt["lid_switch_open"].asBool());
-      }
-      if (evt.isMember("hinge_angle_value")) {
-        // TODO(b/181157794) Propagate hinge angle sensor data using a custom
-        // Sensor HAL.
-      }
-      return;
-    } else if (command.rfind("camera_", 0) == 0 && camera_controller_) {
-      // Handle commands starting with "camera_" by camera controller
-      camera_controller_->HandleMessage(evt);
-      return;
-    }
-
-    auto button_state = evt["button_state"].asString();
-    LOG(VERBOSE) << "Control command: " << command << " (" << button_state
-                 << ")";
-    if (command == "power") {
-      OnKeyboardEvent(KEY_POWER, button_state == "down");
-    } else if (command == "home") {
-      OnKeyboardEvent(KEY_HOMEPAGE, button_state == "down");
-    } else if (command == "menu") {
-      OnKeyboardEvent(KEY_MENU, button_state == "down");
-    } else if (command == "volumedown") {
-      OnKeyboardEvent(KEY_VOLUMEDOWN, button_state == "down");
-    } else if (command == "volumeup") {
-      OnKeyboardEvent(KEY_VOLUMEUP, button_state == "down");
-    } else if (commands_to_custom_action_servers_.find(command) !=
-               commands_to_custom_action_servers_.end()) {
+  void OnLidStateChange(bool lid_open) override {
+    // InputManagerService treats a value of 0 as open and 1 as closed, so
+    // invert the lid_switch_open value that is sent to the input device.
+    OnSwitchEvent(SW_LID, !lid_open);
+  }
+  void OnHingeAngleChange(int /*hinge_angle*/) override {
+    // TODO(b/181157794) Propagate hinge angle sensor data using a custom
+    // Sensor HAL.
+  }
+  void OnPowerButton(bool button_down) override {
+    OnKeyboardEvent(KEY_POWER, button_down);
+  }
+  void OnBackButton(bool button_down) override {
+    OnKeyboardEvent(KEY_BACK, button_down);
+  }
+  void OnHomeButton(bool button_down) override {
+    OnKeyboardEvent(KEY_HOMEPAGE, button_down);
+  }
+  void OnMenuButton(bool button_down) override {
+    OnKeyboardEvent(KEY_MENU, button_down);
+  }
+  void OnVolumeDownButton(bool button_down) override {
+    OnKeyboardEvent(KEY_VOLUMEDOWN, button_down);
+  }
+  void OnVolumeUpButton(bool button_down) override {
+    OnKeyboardEvent(KEY_VOLUMEUP, button_down);
+  }
+  void OnCustomActionButton(const std::string &command,
+                            const std::string &button_state) override {
+    if (commands_to_custom_action_servers_.find(command) !=
+        commands_to_custom_action_servers_.end()) {
       // Simple protocol for commands forwarded to action servers:
       //   - Always 128 bytes
       //   - Format:   command:button_state
@@ -352,10 +326,71 @@
   void OnBluetoothMessage(const uint8_t *msg, size_t size) override {
     bluetooth_handler_->handleMessage(msg, size);
   }
+  void OnLocationChannelOpen(std::function<bool(const uint8_t *, size_t)>
+                                 location_message_sender) override {
+    LOG(VERBOSE) << "Location channel open";
+    auto config = cuttlefish::CuttlefishConfig::Get();
+    CHECK(config) << "Failed to get config";
+    location_handler_.reset(new cuttlefish::webrtc_streaming::LocationHandler(
+        location_message_sender));
+  }
+  void OnLocationMessage(const uint8_t *msg, size_t size) override {
+    std::string msgstr(msg, msg + size);
+
+    std::vector<std::string> inputs = android::base::Split(msgstr, ",");
+
+    if(inputs.size() != 3){
+      LOG(WARNING) << "Invalid location length , length = " << inputs.size();
+      return;
+    }
+
+    float longitude = std::stod(inputs.at(0));
+    float latitude  = std::stod(inputs.at(1));
+    float elevation = std::stod(inputs.at(2));
+    location_handler_->HandleMessage(longitude, latitude, elevation);
+  }
+
+  void OnKmlLocationsChannelOpen(std::function<bool(const uint8_t *, size_t)>
+                                     kml_locations_message_sender) override {
+    LOG(VERBOSE) << "Kml Locations channel open";
+    auto config = cuttlefish::CuttlefishConfig::Get();
+    CHECK(config) << "Failed to get config";
+    kml_locations_handler_.reset(
+        new cuttlefish::webrtc_streaming::KmlLocationsHandler(
+            kml_locations_message_sender));
+  }
+  void OnKmlLocationsMessage(const uint8_t *msg, size_t size) override {
+    kml_locations_handler_->HandleMessage(msg, size);
+  }
+
+  void OnGpxLocationsChannelOpen(std::function<bool(const uint8_t *, size_t)>
+                                     gpx_locations_message_sender) override {
+    LOG(VERBOSE) << "Gpx Locations channel open";
+    auto config = cuttlefish::CuttlefishConfig::Get();
+    CHECK(config) << "Failed to get config";
+    gpx_locations_handler_.reset(
+        new cuttlefish::webrtc_streaming::GpxLocationsHandler(
+            gpx_locations_message_sender));
+  }
+  void OnGpxLocationsMessage(const uint8_t *msg, size_t size) override {
+    gpx_locations_handler_->HandleMessage(msg, size);
+  }
+
+  void OnCameraControlMsg(const Json::Value& msg) override {
+    if (camera_controller_) {
+      camera_controller_->HandleMessage(msg);
+    } else {
+      LOG(VERBOSE) << "Camera control message received but no camera "
+                      "controller is available";
+    }
+  }
 
   void OnCameraData(const std::vector<char> &data) override {
     if (camera_controller_) {
       camera_controller_->HandleMessage(data);
+    } else {
+      LOG(VERBOSE)
+          << "Camera data received but no camera controller is available";
     }
   }
 
@@ -366,6 +401,12 @@
   std::shared_ptr<cuttlefish::webrtc_streaming::AdbHandler> adb_handler_;
   std::shared_ptr<cuttlefish::webrtc_streaming::BluetoothHandler>
       bluetooth_handler_;
+  std::shared_ptr<cuttlefish::webrtc_streaming::LocationHandler>
+      location_handler_;
+  std::shared_ptr<cuttlefish::webrtc_streaming::KmlLocationsHandler>
+      kml_locations_handler_;
+  std::shared_ptr<cuttlefish::webrtc_streaming::GpxLocationsHandler>
+      gpx_locations_handler_;
   std::map<std::string, cuttlefish::SharedFD> commands_to_custom_action_servers_;
   std::weak_ptr<DisplayHandler> weak_display_handler_;
   std::set<int32_t> active_touch_slots_;
diff --git a/host/frontend/webrtc/connection_observer.h b/host/frontend/webrtc/connection_observer.h
index f31472c..4305d0f 100644
--- a/host/frontend/webrtc/connection_observer.h
+++ b/host/frontend/webrtc/connection_observer.h
@@ -22,8 +22,8 @@
 #include "common/libs/fs/shared_fd.h"
 #include "host/frontend/webrtc/display_handler.h"
 #include "host/frontend/webrtc/kernel_log_events_handler.h"
-#include "host/frontend/webrtc/lib/camera_controller.h"
-#include "host/frontend/webrtc/lib/connection_observer.h"
+#include "host/frontend/webrtc/libdevice/camera_controller.h"
+#include "host/frontend/webrtc/libdevice/connection_observer.h"
 #include "host/libs/confui/host_virtual_input.h"
 
 namespace cuttlefish {
diff --git a/host/frontend/webrtc/cvd_video_frame_buffer.h b/host/frontend/webrtc/cvd_video_frame_buffer.h
index 84b4946..04f81aa 100644
--- a/host/frontend/webrtc/cvd_video_frame_buffer.h
+++ b/host/frontend/webrtc/cvd_video_frame_buffer.h
@@ -18,7 +18,7 @@
 
 #include <vector>
 
-#include "host/frontend/webrtc/lib/video_frame_buffer.h"
+#include "host/frontend/webrtc/libdevice/video_frame_buffer.h"
 
 namespace cuttlefish {
 
@@ -27,8 +27,9 @@
   CvdVideoFrameBuffer(int width, int height);
   CvdVideoFrameBuffer(CvdVideoFrameBuffer&& cvd_frame_buf) = default;
   CvdVideoFrameBuffer(const CvdVideoFrameBuffer& cvd_frame_buf) = default;
-  CvdVideoFrameBuffer& operator=(CvdVideoFrameBuffer&& cvd_frame_buf) = default;
-  CvdVideoFrameBuffer& operator=(const CvdVideoFrameBuffer& cvd_frame_buf) = default;
+  CvdVideoFrameBuffer& operator=(CvdVideoFrameBuffer&& cvd_frame_buf) = delete;
+  CvdVideoFrameBuffer& operator=(const CvdVideoFrameBuffer& cvd_frame_buf) =
+      delete;
   CvdVideoFrameBuffer() = delete;
 
   ~CvdVideoFrameBuffer() override;
diff --git a/host/frontend/webrtc/display_handler.cpp b/host/frontend/webrtc/display_handler.cpp
index 3de1516..9ebdc3f 100644
--- a/host/frontend/webrtc/display_handler.cpp
+++ b/host/frontend/webrtc/display_handler.cpp
@@ -22,12 +22,47 @@
 
 #include <libyuv.h>
 
+#include "host/frontend/webrtc/libdevice/streamer.h"
+
 namespace cuttlefish {
-DisplayHandler::DisplayHandler(
-    std::vector<std::shared_ptr<webrtc_streaming::VideoSink>> display_sinks,
-    ScreenConnector& screen_connector)
-    : display_sinks_(display_sinks), screen_connector_(screen_connector) {
+DisplayHandler::DisplayHandler(webrtc_streaming::Streamer& streamer,
+                               ScreenConnector& screen_connector)
+    : streamer_(streamer), screen_connector_(screen_connector) {
   screen_connector_.SetCallback(std::move(GetScreenConnectorCallback()));
+  screen_connector_.SetDisplayEventCallback([this](const DisplayEvent& event) {
+    std::visit(
+        [this](auto&& e) {
+          using T = std::decay_t<decltype(e)>;
+          if constexpr (std::is_same_v<DisplayCreatedEvent, T>) {
+            LOG(VERBOSE) << "Display:" << e.display_number << " created "
+                         << " w:" << e.display_width
+                         << " h:" << e.display_height;
+
+            const auto display_number = e.display_number;
+            const std::string display_id =
+                "display_" + std::to_string(e.display_number);
+            auto display = streamer_.AddDisplay(display_id, e.display_width,
+                                                e.display_height, 160, true);
+            if (!display) {
+              LOG(ERROR) << "Failed to create display.";
+              return;
+            }
+
+            display_sinks_[display_number] = display;
+          } else if constexpr (std::is_same_v<DisplayDestroyedEvent, T>) {
+            LOG(VERBOSE) << "Display:" << e.display_number << " destroyed.";
+
+            const auto display_number = e.display_number;
+            const auto display_id =
+                "display_" + std::to_string(e.display_number);
+            streamer_.RemoveDisplay(display_id);
+            display_sinks_.erase(display_number);
+          } else {
+            static_assert("Unhandled display event.");
+          }
+        },
+        event);
+  });
 }
 
 DisplayHandler::GenerateProcessedFrameCallback DisplayHandler::GetScreenConnectorCallback() {
@@ -88,7 +123,11 @@
         std::chrono::duration_cast<std::chrono::microseconds>(
             std::chrono::system_clock::now().time_since_epoch())
             .count();
-    display_sinks_[buffer_display]->OnFrame(buffer, time_stamp);
+
+    auto it = display_sinks_.find(buffer_display);
+    if (it != display_sinks_.end()) {
+      it->second->OnFrame(buffer, time_stamp);
+    }
   }
 }
 }  // namespace cuttlefish
diff --git a/host/frontend/webrtc/display_handler.h b/host/frontend/webrtc/display_handler.h
index 1411984..494602c 100644
--- a/host/frontend/webrtc/display_handler.h
+++ b/host/frontend/webrtc/display_handler.h
@@ -21,7 +21,7 @@
 #include <vector>
 
 #include "host/frontend/webrtc/cvd_video_frame_buffer.h"
-#include "host/frontend/webrtc/lib/video_sink.h"
+#include "host/frontend/webrtc/libdevice/video_sink.h"
 #include "host/libs/screen_connector/screen_connector.h"
 
 namespace cuttlefish {
@@ -47,14 +47,18 @@
   }
 };
 
+namespace webrtc_streaming {
+class Streamer;
+}  // namespace webrtc_streaming
+
 class DisplayHandler {
  public:
   using ScreenConnector = cuttlefish::ScreenConnector<WebRtcScProcessedFrame>;
   using GenerateProcessedFrameCallback = ScreenConnector::GenerateProcessedFrameCallback;
+  using WebRtcScProcessedFrame = cuttlefish::WebRtcScProcessedFrame;
 
-  DisplayHandler(
-      std::vector<std::shared_ptr<webrtc_streaming::VideoSink>> display_sinks,
-      ScreenConnector& screen_connector);
+  DisplayHandler(webrtc_streaming::Streamer& streamer,
+                 ScreenConnector& screen_connector);
   ~DisplayHandler() = default;
 
   [[noreturn]] void Loop();
@@ -62,7 +66,9 @@
 
  private:
   GenerateProcessedFrameCallback GetScreenConnectorCallback();
-  std::vector<std::shared_ptr<webrtc_streaming::VideoSink>> display_sinks_;
+  std::map<uint32_t, std::shared_ptr<webrtc_streaming::VideoSink>>
+      display_sinks_;
+  webrtc_streaming::Streamer& streamer_;
   ScreenConnector& screen_connector_;
   std::shared_ptr<webrtc_streaming::VideoFrameBuffer> last_buffer_;
   std::uint32_t last_buffer_display_ = 0;
diff --git a/host/frontend/webrtc/doc/graphics.dot b/host/frontend/webrtc/doc/graphics.dot
new file mode 100644
index 0000000..7cd2f92
--- /dev/null
+++ b/host/frontend/webrtc/doc/graphics.dot
@@ -0,0 +1,68 @@
+digraph {
+  rankdir = "BT"
+  subgraph clients {
+    rank = same;
+    browser [label = "Browser"]
+    vnc_client [label = "VNC Client"]
+  }
+  host_renderer [label = < <font color="blue">gfxstream</font> / virglrenderer >]
+  run_cvd
+  wayland_socket [label = "internal/frames.sock", shape = "rectangle"]
+  webrtc [label = < <b>webrtc</b> >, penwidth = 2]
+  vmm [label = < <font color="blue">crosvm</font> / <font color="red">qemu</font> >]
+
+  subgraph cluster_android {
+    label = "Cuttlefish VM"
+    subgraph devices {
+      rank = same;
+      drm_card [label = "/dev/dri/card0", shape = "rectangle"]
+      drm_render [label = "/dev/dri/renderD128", shape = "rectangle"]
+    }
+    subgraph hals {
+      rank = same;
+      angle
+      egl [shape = "rectangle"]
+      hwcomposer_drm [label = "hwcomposer.drm"]
+      hwcomposer_ranchu [label = "hwcomposer.ranchu"]
+      vulkan [shape = "rectangle"]
+    }
+    minigbm
+    mesa
+    swiftshader [label = "SwiftShader Pastel"]
+    surfaceflinger
+    vulkan_ranchu [color = "blue", label = "Gfxstream Vulkan"]
+  }
+
+  webrtc -> browser [dir = both]
+  vmm -> vnc_client [dir = both, color = "red"]
+
+  webrtc -> run_cvd [dir = back]
+
+  wayland_socket -> webrtc [dir = both, color = "blue", label = < <I>Wayland</I> >]
+  vmm -> wayland_socket [dir = both, color = "blue", label = < <I>Wayland</I> >]
+
+  { rank = same; host_renderer -> vmm [dir = both] }
+
+  drm_render -> vmm
+  drm_card -> vmm
+
+  minigbm -> drm_render
+  mesa -> drm_render
+  vulkan_ranchu -> drm_render
+  hwcomposer_drm -> drm_card
+  hwcomposer_ranchu -> drm_card
+
+  minigbm -> hwcomposer_drm [dir = both, label = "HIDL (graphics.allocator)", style = "dashed"]
+  hwcomposer_ranchu -> minigbm [dir = both, label = "AIDL (graphics.allocator)", style = "dashed"]
+  angle -> egl [dir = "back", label = "Symlink", style = "dashed"]
+  egl -> mesa [label = "Symlink", style = "dashed"]
+  vulkan -> angle [dir = "back", label = "Shared library"]
+  vulkan-> swiftshader  [label = "Symlink", style = "dashed"]
+  vulkan -> vulkan_ranchu [label = "Symlink", style = "dashed"]
+
+  surfaceflinger -> egl [label = "Shared library"]
+  surfaceflinger -> hwcomposer_ranchu [dir = both, label = "AIDL (graphics.composer3)", style = "dashed"]
+  surfaceflinger -> hwcomposer_drm [dir = both, label = "HIDL (graphics.composer)", style = "dashed"]
+  surfaceflinger -> vulkan [label = "Shared library"]
+  surfaceflinger -> minigbm [dir = both, label = "HIDL (graphics.allocator)"]
+}
diff --git a/host/frontend/webrtc/doc/graphics.png b/host/frontend/webrtc/doc/graphics.png
new file mode 100644
index 0000000..7eb6669
--- /dev/null
+++ b/host/frontend/webrtc/doc/graphics.png
Binary files differ
diff --git a/host/frontend/webrtc/doc/graphics.svg b/host/frontend/webrtc/doc/graphics.svg
new file mode 100644
index 0000000..8d2d11b
--- /dev/null
+++ b/host/frontend/webrtc/doc/graphics.svg
@@ -0,0 +1,318 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="891pt" height="650pt"
+ viewBox="0.00 0.00 891.00 650.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 646)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-646 887,-646 887,4 -4,4"/>
+<g id="clust2" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="8,-8 8,-330 875,-330 875,-8 8,-8"/>
+<text text-anchor="middle" x="441.5" y="-15.8" font-family="Times,serif" font-size="14.00">Cuttlefish VM</text>
+</g>
+<!-- browser -->
+<g id="node1" class="node">
+<title>browser</title>
+<ellipse fill="none" stroke="black" cx="458" cy="-624" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="458" y="-620.3" font-family="Times,serif" font-size="14.00">Browser</text>
+</g>
+<!-- vnc_client -->
+<g id="node2" class="node">
+<title>vnc_client</title>
+<ellipse fill="none" stroke="black" cx="346" cy="-624" rx="53.09" ry="18"/>
+<text text-anchor="middle" x="346" y="-620.3" font-family="Times,serif" font-size="14.00">VNC Client</text>
+</g>
+<!-- host_renderer -->
+<g id="node3" class="node">
+<title>host_renderer</title>
+<ellipse fill="none" stroke="black" cx="167" cy="-377" rx="104.78" ry="18"/>
+<text text-anchor="start" x="94.5" y="-374.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="98.5" y="-374.3" font-family="Times,serif" font-size="14.00" fill="blue">gfxstream</text>
+<text text-anchor="start" x="153.5" y="-374.3" font-family="Times,serif" font-size="14.00"> / virglrenderer </text>
+</g>
+<!-- vmm -->
+<g id="node7" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="377" cy="-377" rx="70.39" ry="18"/>
+<text text-anchor="start" x="331" y="-374.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="335" y="-374.3" font-family="Times,serif" font-size="14.00" fill="blue">crosvm</text>
+<text text-anchor="start" x="376" y="-374.3" font-family="Times,serif" font-size="14.00"> / </text>
+<text text-anchor="start" x="388" y="-374.3" font-family="Times,serif" font-size="14.00" fill="red">qemu</text>
+<text text-anchor="start" x="419" y="-374.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- host_renderer&#45;&gt;vmm -->
+<g id="edge6" class="edge">
+<title>host_renderer&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M282.23,-377C286.9,-377 291.56,-377 296.23,-377"/>
+<polygon fill="black" stroke="black" points="282,-373.5 272,-377 282,-380.5 282,-373.5"/>
+<polygon fill="black" stroke="black" points="296.45,-380.5 306.45,-377 296.45,-373.5 296.45,-380.5"/>
+</g>
+<!-- run_cvd -->
+<g id="node4" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" cx="556" cy="-624" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="556" y="-620.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- wayland_socket -->
+<g id="node5" class="node">
+<title>wayland_socket</title>
+<polygon fill="none" stroke="black" points="512,-482 384,-482 384,-446 512,-446 512,-482"/>
+<text text-anchor="middle" x="448" y="-460.3" font-family="Times,serif" font-size="14.00">internal/frames.sock</text>
+</g>
+<!-- webrtc -->
+<g id="node6" class="node">
+<title>webrtc</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="458" cy="-551" rx="41.69" ry="18"/>
+<text text-anchor="start" x="434" y="-548.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="438" y="-548.3" font-family="Times,serif" font-weight="bold" font-size="14.00">webrtc</text>
+<text text-anchor="start" x="478" y="-548.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- wayland_socket&#45;&gt;webrtc -->
+<g id="edge4" class="edge">
+<title>wayland_socket&#45;&gt;webrtc</title>
+<path fill="none" stroke="blue" d="M451.21,-492.27C452.36,-502.04 453.65,-513.06 454.8,-522.83"/>
+<polygon fill="blue" stroke="blue" points="454.67,-491.72 450.02,-482.2 447.72,-492.54 454.67,-491.72"/>
+<polygon fill="blue" stroke="blue" points="451.33,-523.3 455.98,-532.82 458.29,-522.48 451.33,-523.3"/>
+<text text-anchor="start" x="453" y="-504.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="457" y="-504.8" font-family="Times,serif" font-style="italic" font-size="14.00">Wayland</text>
+<text text-anchor="start" x="505" y="-504.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- webrtc&#45;&gt;browser -->
+<g id="edge1" class="edge">
+<title>webrtc&#45;&gt;browser</title>
+<path fill="none" stroke="black" d="M458,-579.25C458,-584.72 458,-590.48 458,-595.95"/>
+<polygon fill="black" stroke="black" points="461.5,-579.19 458,-569.19 454.5,-579.19 461.5,-579.19"/>
+<polygon fill="black" stroke="black" points="454.5,-595.97 458,-605.97 461.5,-595.97 454.5,-595.97"/>
+</g>
+<!-- webrtc&#45;&gt;run_cvd -->
+<g id="edge3" class="edge">
+<title>webrtc&#45;&gt;run_cvd</title>
+<path fill="none" stroke="black" d="M486.65,-572.75C502.39,-584.16 521.54,-598.03 535.72,-608.31"/>
+<polygon fill="black" stroke="black" points="488.46,-569.74 478.31,-566.71 484.35,-575.41 488.46,-569.74"/>
+</g>
+<!-- vmm&#45;&gt;vnc_client -->
+<g id="edge2" class="edge">
+<title>vmm&#45;&gt;vnc_client</title>
+<path fill="none" stroke="red" d="M373.52,-405.47C367.55,-452.67 355.41,-548.65 349.45,-595.73"/>
+<polygon fill="red" stroke="red" points="377.04,-405.59 374.82,-395.23 370.09,-404.71 377.04,-405.59"/>
+<polygon fill="red" stroke="red" points="345.96,-595.43 348.18,-605.79 352.91,-596.31 345.96,-595.43"/>
+</g>
+<!-- vmm&#45;&gt;wayland_socket -->
+<g id="edge5" class="edge">
+<title>vmm&#45;&gt;wayland_socket</title>
+<path fill="none" stroke="blue" d="M397.51,-402.56C406.86,-413.75 417.91,-426.97 427.28,-438.19"/>
+<polygon fill="blue" stroke="blue" points="400.12,-400.22 391.03,-394.79 394.75,-404.71 400.12,-400.22"/>
+<polygon fill="blue" stroke="blue" points="424.66,-440.52 433.76,-445.95 430.03,-436.03 424.66,-440.52"/>
+<text text-anchor="start" x="416" y="-417.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="420" y="-417.8" font-family="Times,serif" font-style="italic" font-size="14.00">Wayland</text>
+<text text-anchor="start" x="468" y="-417.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- drm_card -->
+<g id="node8" class="node">
+<title>drm_card</title>
+<polygon fill="none" stroke="black" points="283,-322 191,-322 191,-286 283,-286 283,-322"/>
+<text text-anchor="middle" x="237" y="-300.3" font-family="Times,serif" font-size="14.00">/dev/dri/card0</text>
+</g>
+<!-- drm_card&#45;&gt;vmm -->
+<g id="edge8" class="edge">
+<title>drm_card&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M270.53,-322.01C290.69,-332.23 316.44,-345.29 337.56,-356"/>
+<polygon fill="black" stroke="black" points="336.1,-359.18 346.6,-360.59 339.27,-352.94 336.1,-359.18"/>
+</g>
+<!-- drm_render -->
+<g id="node9" class="node">
+<title>drm_render</title>
+<polygon fill="none" stroke="black" points="529.5,-322 402.5,-322 402.5,-286 529.5,-286 529.5,-322"/>
+<text text-anchor="middle" x="466" y="-300.3" font-family="Times,serif" font-size="14.00">/dev/dri/renderD128</text>
+</g>
+<!-- drm_render&#45;&gt;vmm -->
+<g id="edge7" class="edge">
+<title>drm_render&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M444.46,-322.19C432.72,-331.55 418.06,-343.24 405.4,-353.34"/>
+<polygon fill="black" stroke="black" points="403.06,-350.73 397.43,-359.7 407.43,-356.2 403.06,-350.73"/>
+</g>
+<!-- angle -->
+<g id="node10" class="node">
+<title>angle</title>
+<ellipse fill="none" stroke="black" cx="719" cy="-144" rx="29.8" ry="18"/>
+<text text-anchor="middle" x="719" y="-140.3" font-family="Times,serif" font-size="14.00">angle</text>
+</g>
+<!-- egl -->
+<g id="node11" class="node">
+<title>egl</title>
+<polygon fill="none" stroke="black" points="867,-162 813,-162 813,-126 867,-126 867,-162"/>
+<text text-anchor="middle" x="840" y="-140.3" font-family="Times,serif" font-size="14.00">egl</text>
+</g>
+<!-- angle&#45;&gt;egl -->
+<g id="edge16" class="edge">
+<title>angle&#45;&gt;egl</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M758.97,-144C776.67,-144 797.03,-144 812.91,-144"/>
+<polygon fill="black" stroke="black" points="758.91,-140.5 748.91,-144 758.91,-147.5 758.91,-140.5"/>
+<text text-anchor="middle" x="780.95" y="-129.8" font-family="Times,serif" font-size="14.00">Symlink</text>
+</g>
+<!-- mesa -->
+<g id="node16" class="node">
+<title>mesa</title>
+<ellipse fill="none" stroke="black" cx="776" cy="-231" rx="29.5" ry="18"/>
+<text text-anchor="middle" x="776" y="-227.3" font-family="Times,serif" font-size="14.00">mesa</text>
+</g>
+<!-- egl&#45;&gt;mesa -->
+<g id="edge17" class="edge">
+<title>egl&#45;&gt;mesa</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M827.05,-162.2C817.55,-174.82 804.53,-192.1 794.02,-206.07"/>
+<polygon fill="black" stroke="black" points="791.07,-204.16 787.85,-214.26 796.66,-208.37 791.07,-204.16"/>
+<text text-anchor="middle" x="834.5" y="-183.8" font-family="Times,serif" font-size="14.00">Symlink</text>
+</g>
+<!-- hwcomposer_drm -->
+<g id="node12" class="node">
+<title>hwcomposer_drm</title>
+<ellipse fill="none" stroke="black" cx="89" cy="-144" rx="72.59" ry="18"/>
+<text text-anchor="middle" x="89" y="-140.3" font-family="Times,serif" font-size="14.00">hwcomposer.drm</text>
+</g>
+<!-- hwcomposer_drm&#45;&gt;drm_card -->
+<g id="edge12" class="edge">
+<title>hwcomposer_drm&#45;&gt;drm_card</title>
+<path fill="none" stroke="black" d="M87.83,-162.25C87.89,-172.4 89.35,-185.14 95,-195 117.09,-233.57 159.03,-262.84 191.63,-281.11"/>
+<polygon fill="black" stroke="black" points="190.1,-284.26 200.56,-285.97 193.45,-278.11 190.1,-284.26"/>
+</g>
+<!-- hwcomposer_ranchu -->
+<g id="node13" class="node">
+<title>hwcomposer_ranchu</title>
+<ellipse fill="none" stroke="black" cx="262" cy="-144" rx="81.79" ry="18"/>
+<text text-anchor="middle" x="262" y="-140.3" font-family="Times,serif" font-size="14.00">hwcomposer.ranchu</text>
+</g>
+<!-- hwcomposer_ranchu&#45;&gt;drm_card -->
+<g id="edge13" class="edge">
+<title>hwcomposer_ranchu&#45;&gt;drm_card</title>
+<path fill="none" stroke="black" d="M258.04,-162.08C256.73,-167.77 255.29,-174.16 254,-180 250.77,-194.64 249.39,-198.2 247,-213 243.63,-233.92 241.03,-257.76 239.33,-275.65"/>
+<polygon fill="black" stroke="black" points="235.84,-275.4 238.42,-285.68 242.81,-276.04 235.84,-275.4"/>
+</g>
+<!-- minigbm -->
+<g id="node15" class="node">
+<title>minigbm</title>
+<ellipse fill="none" stroke="black" cx="300" cy="-231" rx="43.59" ry="18"/>
+<text text-anchor="middle" x="300" y="-227.3" font-family="Times,serif" font-size="14.00">minigbm</text>
+</g>
+<!-- hwcomposer_ranchu&#45;&gt;minigbm -->
+<g id="edge15" class="edge">
+<title>hwcomposer_ranchu&#45;&gt;minigbm</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M258.03,-172.56C258,-180.09 258.97,-188.08 262,-195 264.11,-199.81 267.24,-204.24 270.81,-208.23"/>
+<polygon fill="black" stroke="black" points="261.54,-172.56 258.65,-162.37 254.55,-172.14 261.54,-172.56"/>
+<polygon fill="black" stroke="black" points="268.47,-210.84 278.08,-215.32 273.36,-205.83 268.47,-210.84"/>
+<text text-anchor="middle" x="333" y="-183.8" font-family="Times,serif" font-size="14.00">AIDL (graphics.allocator)</text>
+</g>
+<!-- vulkan -->
+<g id="node14" class="node">
+<title>vulkan</title>
+<polygon fill="none" stroke="black" points="594,-162 540,-162 540,-126 594,-126 594,-162"/>
+<text text-anchor="middle" x="567" y="-140.3" font-family="Times,serif" font-size="14.00">vulkan</text>
+</g>
+<!-- vulkan&#45;&gt;angle -->
+<g id="edge18" class="edge">
+<title>vulkan&#45;&gt;angle</title>
+<path fill="none" stroke="black" d="M604.42,-144C630.44,-144 664.68,-144 688.94,-144"/>
+<polygon fill="black" stroke="black" points="604.17,-140.5 594.17,-144 604.17,-147.5 604.17,-140.5"/>
+<text text-anchor="middle" x="641.55" y="-129.8" font-family="Times,serif" font-size="14.00">Shared library</text>
+</g>
+<!-- swiftshader -->
+<g id="node17" class="node">
+<title>swiftshader</title>
+<ellipse fill="none" stroke="black" cx="439" cy="-231" rx="77.19" ry="18"/>
+<text text-anchor="middle" x="439" y="-227.3" font-family="Times,serif" font-size="14.00">SwiftShader Pastel</text>
+</g>
+<!-- vulkan&#45;&gt;swiftshader -->
+<g id="edge19" class="edge">
+<title>vulkan&#45;&gt;swiftshader</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M541.1,-162.2C521.19,-175.42 493.56,-193.77 472.06,-208.04"/>
+<polygon fill="black" stroke="black" points="469.96,-205.24 463.56,-213.69 473.83,-211.07 469.96,-205.24"/>
+<text text-anchor="middle" x="533.5" y="-183.8" font-family="Times,serif" font-size="14.00">Symlink</text>
+</g>
+<!-- vulkan_ranchu -->
+<g id="node19" class="node">
+<title>vulkan_ranchu</title>
+<ellipse fill="none" stroke="blue" cx="610" cy="-231" rx="76.09" ry="18"/>
+<text text-anchor="middle" x="610" y="-227.3" font-family="Times,serif" font-size="14.00">Gfxstream Vulkan</text>
+</g>
+<!-- vulkan&#45;&gt;vulkan_ranchu -->
+<g id="edge20" class="edge">
+<title>vulkan&#45;&gt;vulkan_ranchu</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M575.7,-162.2C581.7,-174.07 589.8,-190.07 596.62,-203.55"/>
+<polygon fill="black" stroke="black" points="593.67,-205.48 601.31,-212.82 599.92,-202.32 593.67,-205.48"/>
+<text text-anchor="middle" x="614.5" y="-183.8" font-family="Times,serif" font-size="14.00">Symlink</text>
+</g>
+<!-- minigbm&#45;&gt;drm_render -->
+<g id="edge9" class="edge">
+<title>minigbm&#45;&gt;drm_render</title>
+<path fill="none" stroke="black" d="M329.3,-244.53C353.46,-254.87 388.28,-269.76 416.72,-281.92"/>
+<polygon fill="black" stroke="black" points="415.53,-285.22 426.1,-285.93 418.28,-278.78 415.53,-285.22"/>
+</g>
+<!-- minigbm&#45;&gt;hwcomposer_drm -->
+<g id="edge14" class="edge">
+<title>minigbm&#45;&gt;hwcomposer_drm</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M247.76,-225.65C199.66,-220.7 132.72,-211.14 112,-195 104.63,-189.26 99.54,-180.61 96.05,-172.12"/>
+<polygon fill="black" stroke="black" points="247.42,-229.13 257.72,-226.64 248.11,-222.17 247.42,-229.13"/>
+<polygon fill="black" stroke="black" points="99.26,-170.69 92.67,-162.39 92.65,-172.99 99.26,-170.69"/>
+<text text-anchor="middle" x="183" y="-183.8" font-family="Times,serif" font-size="14.00">HIDL (graphics.allocator)</text>
+</g>
+<!-- mesa&#45;&gt;drm_render -->
+<g id="edge10" class="edge">
+<title>mesa&#45;&gt;drm_render</title>
+<path fill="none" stroke="black" d="M749.03,-238.18C702.86,-248.75 607.16,-270.67 539.87,-286.08"/>
+<polygon fill="black" stroke="black" points="538.71,-282.76 529.74,-288.4 540.27,-289.58 538.71,-282.76"/>
+</g>
+<!-- surfaceflinger -->
+<g id="node18" class="node">
+<title>surfaceflinger</title>
+<ellipse fill="none" stroke="black" cx="400" cy="-57" rx="59.59" ry="18"/>
+<text text-anchor="middle" x="400" y="-53.3" font-family="Times,serif" font-size="14.00">surfaceflinger</text>
+</g>
+<!-- surfaceflinger&#45;&gt;egl -->
+<g id="edge21" class="edge">
+<title>surfaceflinger&#45;&gt;egl</title>
+<path fill="none" stroke="black" d="M449.56,-67.14C518.71,-79.96 648.01,-104.15 758,-126 772.69,-128.92 788.83,-132.24 802.88,-135.17"/>
+<polygon fill="black" stroke="black" points="802.19,-138.6 812.69,-137.22 803.62,-131.75 802.19,-138.6"/>
+<text text-anchor="middle" x="698" y="-96.8" font-family="Times,serif" font-size="14.00">Shared library</text>
+</g>
+<!-- surfaceflinger&#45;&gt;hwcomposer_drm -->
+<g id="edge23" class="edge">
+<title>surfaceflinger&#45;&gt;hwcomposer_drm</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M331,-60.87C243.25,-65.23 101.11,-75.01 85,-93 79.53,-99.11 78.63,-107.5 79.67,-115.66"/>
+<polygon fill="black" stroke="black" points="331.33,-64.35 341.15,-60.37 330.99,-57.36 331.33,-64.35"/>
+<polygon fill="black" stroke="black" points="76.3,-116.65 81.84,-125.68 83.15,-115.17 76.3,-116.65"/>
+<text text-anchor="middle" x="159" y="-96.8" font-family="Times,serif" font-size="14.00">HIDL (graphics.composer)</text>
+</g>
+<!-- surfaceflinger&#45;&gt;hwcomposer_ranchu -->
+<g id="edge22" class="edge">
+<title>surfaceflinger&#45;&gt;hwcomposer_ranchu</title>
+<path fill="none" stroke="black" stroke-dasharray="5,2" d="M333.4,-64.32C295.3,-69.46 252.79,-78.34 241,-93 235.28,-100.11 236.91,-109.01 241.13,-117.32"/>
+<polygon fill="black" stroke="black" points="334.01,-67.77 343.49,-63.03 333.12,-60.82 334.01,-67.77"/>
+<polygon fill="black" stroke="black" points="238.27,-119.34 246.51,-125.99 244.21,-115.65 238.27,-119.34"/>
+<text text-anchor="middle" x="318" y="-96.8" font-family="Times,serif" font-size="14.00">AIDL (graphics.composer3)</text>
+</g>
+<!-- surfaceflinger&#45;&gt;vulkan -->
+<g id="edge24" class="edge">
+<title>surfaceflinger&#45;&gt;vulkan</title>
+<path fill="none" stroke="black" d="M429.09,-72.81C457.38,-87.2 500.42,-109.11 530.88,-124.62"/>
+<polygon fill="black" stroke="black" points="529.4,-127.79 539.9,-129.21 532.58,-121.55 529.4,-127.79"/>
+<text text-anchor="middle" x="532" y="-96.8" font-family="Times,serif" font-size="14.00">Shared library</text>
+</g>
+<!-- surfaceflinger&#45;&gt;minigbm -->
+<g id="edge25" class="edge">
+<title>surfaceflinger&#45;&gt;minigbm</title>
+<path fill="none" stroke="black" d="M398.38,-85.25C397.64,-92.68 396.56,-100.69 395,-108 393.24,-116.25 390.35,-117.68 389,-126 386.44,-141.79 383.55,-146.96 389,-162 392.55,-171.79 400.45,-170.21 404,-180 406.27,-186.27 407.89,-189.59 404,-195 403.01,-196.37 371.22,-206.96 343.05,-216.13"/>
+<polygon fill="black" stroke="black" points="401.87,-85.53 399.22,-75.27 394.89,-84.94 401.87,-85.53"/>
+<polygon fill="black" stroke="black" points="341.72,-212.89 333.29,-219.3 343.88,-219.54 341.72,-212.89"/>
+<text text-anchor="middle" x="460" y="-140.3" font-family="Times,serif" font-size="14.00">HIDL (graphics.allocator)</text>
+</g>
+<!-- vulkan_ranchu&#45;&gt;drm_render -->
+<g id="edge11" class="edge">
+<title>vulkan_ranchu&#45;&gt;drm_render</title>
+<path fill="none" stroke="black" d="M578.4,-247.58C558.26,-257.51 531.89,-270.51 509.75,-281.43"/>
+<polygon fill="black" stroke="black" points="507.97,-278.4 500.55,-285.96 511.07,-284.68 507.97,-278.4"/>
+</g>
+</g>
+</svg>
diff --git a/host/frontend/webrtc/doc/interface.dot b/host/frontend/webrtc/doc/interface.dot
new file mode 100644
index 0000000..7dcd035
--- /dev/null
+++ b/host/frontend/webrtc/doc/interface.dot
@@ -0,0 +1,70 @@
+digraph {
+  rankdir = "BT"
+  audio_server [label = "internal/audio_server.sock", shape = "rectangle"]
+  browser [label = "Browser"]
+  confirmationui_sign [label = "internal/confui_sign.sock", shape = "rectangle"]
+  gnss_grpc_proxy
+  gnss_grpc_server [label = "GNSS gRPC server", shape = "rectangle"]
+  host_bluetooth [label = "Host bluetooth (netsim / rootcanal)"]
+  host_confirmationui_in [label = "internal/confui_fifo_vm.in", shape = "rectangle"]
+  host_confirmationui_out [label = "internal/confui_fifo_vm.out", shape = "rectangle"]
+  keyboard_server [label = "internal/keyboard.sock", shape = "rectangle"]
+  touch_server [label = "internal/touch_N.sock", shape = "rectangle"]
+  run_cvd
+  operator_proxy
+  secure_env
+  socket_vsock_proxy
+  wayland_socket [label = "internal/frames.sock", shape = "rectangle"]
+  webrtc [label = < <B>webrtc</B> >, penwidth = 2]
+  vmm [label = "crosvm"]
+
+  subgraph cluster_host {
+    label = "cuttlefish-user Debian package"
+
+    operator [label = "Host Orchestrator"]
+    operator_socket [label = "/run/cuttlefish/operator", shape = "rectangle"]
+  }
+
+  subgraph cluster_android {
+    label = "Cuttlefish VM"
+    subgraph devices {
+      rank = same;
+      confirmationui_console [label = "/dev/hvc8", shape = "rectangle"]
+      keyboard_input [label = "/dev/input/event3 | keyboard", shape = "rectangle"]
+      touch_input [label = "/dev/input/event2 | touch", shape = "rectangle"]
+    }
+    adb_daemon [label = "ADB Daemon"]
+    confirmationui [label = "ConfirmationUI HAL"]
+  }
+
+  operator -> browser [label = < <I>HTTPS</I> (1443) >, dir = both]
+  webrtc -> browser [label = < <I>WebRTC</I> >, dir = both]
+  operator_proxy -> browser [label = < <I>HTTPS</I> (8443) >, dir = both]
+  operator -> operator_proxy [label = < <I>HTTP</I> (1080) >, dir = both]
+  operator_socket -> operator [label = < <I>JSON</I> >, dir = both]
+  webrtc -> operator_socket [label = < <I>JSON</I> >, dir = both]
+  webrtc -> run_cvd [dir = back]
+  webrtc -> host_bluetooth
+
+  webrtc -> confirmationui_sign -> secure_env [dir = both]
+  webrtc -> gnss_grpc_server -> gnss_grpc_proxy [dir = both, label = "TCP"]
+
+  adb_daemon -> socket_vsock_proxy -> webrtc [dir = both]
+
+  audio_server -> webrtc [dir = both]
+  vmm -> audio_server [dir = both]
+
+  vmm -> wayland_socket -> webrtc [label = < <I>Wayland</I> >, dir = both]
+
+  vmm -> keyboard_server -> webrtc [dir = back]
+
+  vmm -> touch_server -> webrtc [dir = back]
+
+  vmm -> host_confirmationui_out -> webrtc
+  vmm -> host_confirmationui_in -> webrtc [dir = back]
+
+  confirmationui -> confirmationui_console -> vmm [dir = both]
+
+  keyboard_input -> vmm
+  touch_input -> vmm
+}
diff --git a/host/frontend/webrtc/doc/interface.png b/host/frontend/webrtc/doc/interface.png
new file mode 100644
index 0000000..4fb643a
--- /dev/null
+++ b/host/frontend/webrtc/doc/interface.png
Binary files differ
diff --git a/host/frontend/webrtc/doc/interface.svg b/host/frontend/webrtc/doc/interface.svg
new file mode 100644
index 0000000..837f522
--- /dev/null
+++ b/host/frontend/webrtc/doc/interface.svg
@@ -0,0 +1,393 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.43.0 (0)
+ -->
+<!-- Title: %3 Pages: 1 -->
+<svg width="1089pt" height="772pt"
+ viewBox="0.00 0.00 1088.50 772.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 768)">
+<title>%3</title>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-768 1084.5,-768 1084.5,4 -4,4"/>
+<g id="clust1" class="cluster">
+<title>cluster_host</title>
+<polygon fill="none" stroke="black" points="887.5,-436 887.5,-598 1072.5,-598 1072.5,-436 887.5,-436"/>
+<text text-anchor="middle" x="980" y="-443.8" font-family="Times,serif" font-size="14.00">cuttlefish&#45;user Debian package</text>
+</g>
+<g id="clust2" class="cluster">
+<title>cluster_android</title>
+<polygon fill="none" stroke="black" points="410.5,-8 410.5,-156 856.5,-156 856.5,-8 410.5,-8"/>
+<text text-anchor="middle" x="633.5" y="-15.8" font-family="Times,serif" font-size="14.00">Cuttlefish VM</text>
+</g>
+<!-- audio_server -->
+<g id="node1" class="node">
+<title>audio_server</title>
+<polygon fill="none" stroke="black" points="985,-308 826,-308 826,-272 985,-272 985,-308"/>
+<text text-anchor="middle" x="905.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/audio_server.sock</text>
+</g>
+<!-- webrtc -->
+<g id="node16" class="node">
+<title>webrtc</title>
+<ellipse fill="none" stroke="black" stroke-width="2" cx="592.5" cy="-377" rx="41.69" ry="18"/>
+<text text-anchor="start" x="568.5" y="-374.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="572.5" y="-374.3" font-family="Times,serif" font-weight="bold" font-size="14.00">webrtc</text>
+<text text-anchor="start" x="612.5" y="-374.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- audio_server&#45;&gt;webrtc -->
+<g id="edge15" class="edge">
+<title>audio_server&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M840.59,-311.02C809.24,-320.48 771.02,-331.7 736.5,-341 703.86,-349.79 666.8,-358.77 638.45,-365.44"/>
+<polygon fill="black" stroke="black" points="841.96,-314.26 850.51,-308.01 839.93,-307.56 841.96,-314.26"/>
+<polygon fill="black" stroke="black" points="637.37,-362.1 628.43,-367.79 638.96,-368.91 637.37,-362.1"/>
+</g>
+<!-- browser -->
+<g id="node2" class="node">
+<title>browser</title>
+<ellipse fill="none" stroke="black" cx="865.5" cy="-746" rx="40.09" ry="18"/>
+<text text-anchor="middle" x="865.5" y="-742.3" font-family="Times,serif" font-size="14.00">Browser</text>
+</g>
+<!-- confirmationui_sign -->
+<g id="node3" class="node">
+<title>confirmationui_sign</title>
+<polygon fill="none" stroke="black" points="351.5,-503 197.5,-503 197.5,-467 351.5,-467 351.5,-503"/>
+<text text-anchor="middle" x="274.5" y="-481.3" font-family="Times,serif" font-size="14.00">internal/confui_sign.sock</text>
+</g>
+<!-- secure_env -->
+<g id="node13" class="node">
+<title>secure_env</title>
+<ellipse fill="none" stroke="black" cx="293.5" cy="-572" rx="50.09" ry="18"/>
+<text text-anchor="middle" x="293.5" y="-568.3" font-family="Times,serif" font-size="14.00">secure_env</text>
+</g>
+<!-- confirmationui_sign&#45;&gt;secure_env -->
+<g id="edge10" class="edge">
+<title>confirmationui_sign&#45;&gt;secure_env</title>
+<path fill="none" stroke="black" d="M280.59,-513.27C282.78,-523.04 285.24,-534.06 287.43,-543.83"/>
+<polygon fill="black" stroke="black" points="283.94,-512.2 278.34,-503.2 277.11,-513.72 283.94,-512.2"/>
+<polygon fill="black" stroke="black" points="284.06,-544.83 289.66,-553.82 290.89,-543.3 284.06,-544.83"/>
+</g>
+<!-- gnss_grpc_proxy -->
+<g id="node4" class="node">
+<title>gnss_grpc_proxy</title>
+<ellipse fill="none" stroke="black" cx="433.5" cy="-572" rx="71.49" ry="18"/>
+<text text-anchor="middle" x="433.5" y="-568.3" font-family="Times,serif" font-size="14.00">gnss_grpc_proxy</text>
+</g>
+<!-- gnss_grpc_server -->
+<g id="node5" class="node">
+<title>gnss_grpc_server</title>
+<polygon fill="none" stroke="black" points="493.5,-503 369.5,-503 369.5,-467 493.5,-467 493.5,-503"/>
+<text text-anchor="middle" x="431.5" y="-481.3" font-family="Times,serif" font-size="14.00">GNSS gRPC server</text>
+</g>
+<!-- gnss_grpc_server&#45;&gt;gnss_grpc_proxy -->
+<g id="edge12" class="edge">
+<title>gnss_grpc_server&#45;&gt;gnss_grpc_proxy</title>
+<path fill="none" stroke="black" d="M432.14,-513.27C432.37,-522.94 432.63,-533.83 432.85,-543.52"/>
+<polygon fill="black" stroke="black" points="435.64,-513.12 431.9,-503.2 428.64,-513.28 435.64,-513.12"/>
+<polygon fill="black" stroke="black" points="429.36,-543.91 433.1,-553.82 436.36,-543.74 429.36,-543.91"/>
+<text text-anchor="middle" x="445" y="-524.8" font-family="Times,serif" font-size="14.00">TCP</text>
+</g>
+<!-- host_bluetooth -->
+<g id="node6" class="node">
+<title>host_bluetooth</title>
+<ellipse fill="none" stroke="black" cx="646.5" cy="-485" rx="135.38" ry="18"/>
+<text text-anchor="middle" x="646.5" y="-481.3" font-family="Times,serif" font-size="14.00">Host bluetooth (netsim / rootcanal)</text>
+</g>
+<!-- host_confirmationui_in -->
+<g id="node7" class="node">
+<title>host_confirmationui_in</title>
+<polygon fill="none" stroke="black" points="161,-308 0,-308 0,-272 161,-272 161,-308"/>
+<text text-anchor="middle" x="80.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/confui_fifo_vm.in</text>
+</g>
+<!-- host_confirmationui_in&#45;&gt;webrtc -->
+<g id="edge26" class="edge">
+<title>host_confirmationui_in&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M171.57,-308.16C318.89,-333.82 481.03,-358.99 553.75,-370.11"/>
+<polygon fill="black" stroke="black" points="171.89,-304.67 161.43,-306.39 170.68,-311.56 171.89,-304.67"/>
+</g>
+<!-- host_confirmationui_out -->
+<g id="node8" class="node">
+<title>host_confirmationui_out</title>
+<polygon fill="none" stroke="black" points="347.5,-308 179.5,-308 179.5,-272 347.5,-272 347.5,-308"/>
+<text text-anchor="middle" x="263.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/confui_fifo_vm.out</text>
+</g>
+<!-- host_confirmationui_out&#45;&gt;webrtc -->
+<g id="edge24" class="edge">
+<title>host_confirmationui_out&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M329.68,-308.1C394.51,-324.85 491.52,-349.91 547.76,-364.44"/>
+<polygon fill="black" stroke="black" points="547.21,-367.91 557.77,-367.03 548.97,-361.14 547.21,-367.91"/>
+</g>
+<!-- keyboard_server -->
+<g id="node9" class="node">
+<title>keyboard_server</title>
+<polygon fill="none" stroke="black" points="505.5,-308 365.5,-308 365.5,-272 505.5,-272 505.5,-308"/>
+<text text-anchor="middle" x="435.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/keyboard.sock</text>
+</g>
+<!-- keyboard_server&#45;&gt;webrtc -->
+<g id="edge20" class="edge">
+<title>keyboard_server&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M476.3,-313.09C505.18,-328.72 542.97,-349.18 567.49,-362.46"/>
+<polygon fill="black" stroke="black" points="477.73,-309.88 467.27,-308.2 474.4,-316.04 477.73,-309.88"/>
+</g>
+<!-- touch_server -->
+<g id="node10" class="node">
+<title>touch_server</title>
+<polygon fill="none" stroke="black" points="661,-308 524,-308 524,-272 661,-272 661,-308"/>
+<text text-anchor="middle" x="592.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/touch_N.sock</text>
+</g>
+<!-- touch_server&#45;&gt;webrtc -->
+<g id="edge22" class="edge">
+<title>touch_server&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M592.5,-318.27C592.5,-331.58 592.5,-347.19 592.5,-358.82"/>
+<polygon fill="black" stroke="black" points="596,-318.2 592.5,-308.2 589,-318.2 596,-318.2"/>
+</g>
+<!-- run_cvd -->
+<g id="node11" class="node">
+<title>run_cvd</title>
+<ellipse fill="none" stroke="black" cx="839.5" cy="-485" rx="39.79" ry="18"/>
+<text text-anchor="middle" x="839.5" y="-481.3" font-family="Times,serif" font-size="14.00">run_cvd</text>
+</g>
+<!-- operator_proxy -->
+<g id="node12" class="node">
+<title>operator_proxy</title>
+<ellipse fill="none" stroke="black" cx="866.5" cy="-659" rx="64.99" ry="18"/>
+<text text-anchor="middle" x="866.5" y="-655.3" font-family="Times,serif" font-size="14.00">operator_proxy</text>
+</g>
+<!-- operator_proxy&#45;&gt;browser -->
+<g id="edge3" class="edge">
+<title>operator_proxy&#45;&gt;browser</title>
+<path fill="none" stroke="black" d="M866.18,-687.27C866.07,-696.94 865.94,-707.83 865.82,-717.52"/>
+<polygon fill="black" stroke="black" points="869.68,-687.24 866.3,-677.2 862.68,-687.16 869.68,-687.24"/>
+<polygon fill="black" stroke="black" points="862.32,-717.78 865.7,-727.82 869.32,-717.87 862.32,-717.78"/>
+<text text-anchor="start" x="865.5" y="-699.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="869.5" y="-699.8" font-family="Times,serif" font-style="italic" font-size="14.00">HTTPS</text>
+<text text-anchor="start" x="909.5" y="-699.8" font-family="Times,serif" font-size="14.00"> (8443) </text>
+</g>
+<!-- socket_vsock_proxy -->
+<g id="node14" class="node">
+<title>socket_vsock_proxy</title>
+<ellipse fill="none" stroke="black" cx="991.5" cy="-130" rx="83.39" ry="18"/>
+<text text-anchor="middle" x="991.5" y="-126.3" font-family="Times,serif" font-size="14.00">socket_vsock_proxy</text>
+</g>
+<!-- socket_vsock_proxy&#45;&gt;webrtc -->
+<g id="edge14" class="edge">
+<title>socket_vsock_proxy&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M1001.76,-157.9C1014.37,-196.06 1030.04,-265.94 994.5,-308 950.21,-360.41 742.41,-372.44 644.19,-375.19"/>
+<polygon fill="black" stroke="black" points="1004.91,-156.33 998.32,-148.03 998.3,-158.62 1004.91,-156.33"/>
+<polygon fill="black" stroke="black" points="644.09,-371.69 634.18,-375.45 644.27,-378.69 644.09,-371.69"/>
+</g>
+<!-- wayland_socket -->
+<g id="node15" class="node">
+<title>wayland_socket</title>
+<polygon fill="none" stroke="black" points="807.5,-308 679.5,-308 679.5,-272 807.5,-272 807.5,-308"/>
+<text text-anchor="middle" x="743.5" y="-286.3" font-family="Times,serif" font-size="14.00">internal/frames.sock</text>
+</g>
+<!-- wayland_socket&#45;&gt;webrtc -->
+<g id="edge18" class="edge">
+<title>wayland_socket&#45;&gt;webrtc</title>
+<path fill="none" stroke="black" d="M703.95,-313.26C679.64,-326.95 648.83,-344.29 625.77,-357.27"/>
+<polygon fill="black" stroke="black" points="705.95,-316.16 712.94,-308.2 702.51,-310.06 705.95,-316.16"/>
+<polygon fill="black" stroke="black" points="624.03,-354.23 617.03,-362.19 627.46,-360.33 624.03,-354.23"/>
+<text text-anchor="start" x="676.5" y="-330.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="680.5" y="-330.8" font-family="Times,serif" font-style="italic" font-size="14.00">Wayland</text>
+<text text-anchor="start" x="728.5" y="-330.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- webrtc&#45;&gt;browser -->
+<g id="edge2" class="edge">
+<title>webrtc&#45;&gt;browser</title>
+<path fill="none" stroke="black" d="M540.91,-379.2C444.66,-382.42 242.54,-394.12 188.5,-436 170.23,-450.16 168.5,-460.89 168.5,-484 168.5,-484 168.5,-484 168.5,-660 168.5,-725.2 655.23,-740.79 815.21,-744.15"/>
+<polygon fill="black" stroke="black" points="541.12,-382.7 551,-378.88 540.9,-375.7 541.12,-382.7"/>
+<polygon fill="black" stroke="black" points="815.2,-747.65 825.27,-744.35 815.34,-740.65 815.2,-747.65"/>
+<text text-anchor="start" x="168.5" y="-569.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="172.5" y="-569.3" font-family="Times,serif" font-style="italic" font-size="14.00">WebRTC</text>
+<text text-anchor="start" x="221.5" y="-569.3" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- webrtc&#45;&gt;confirmationui_sign -->
+<g id="edge9" class="edge">
+<title>webrtc&#45;&gt;confirmationui_sign</title>
+<path fill="none" stroke="black" d="M544.11,-386.23C496.36,-395.24 421.82,-411.72 360.5,-436 342.96,-442.95 324.56,-452.82 309.3,-461.81"/>
+<polygon fill="black" stroke="black" points="544.98,-389.63 554.17,-384.37 543.7,-382.75 544.98,-389.63"/>
+<polygon fill="black" stroke="black" points="307.48,-458.82 300.71,-466.97 311.09,-464.82 307.48,-458.82"/>
+</g>
+<!-- webrtc&#45;&gt;gnss_grpc_server -->
+<g id="edge11" class="edge">
+<title>webrtc&#45;&gt;gnss_grpc_server</title>
+<path fill="none" stroke="black" d="M561.23,-397.92C543.87,-409.03 521.92,-423.18 502.5,-436 490.11,-444.18 476.59,-453.27 464.71,-461.33"/>
+<polygon fill="black" stroke="black" points="563.31,-400.74 569.85,-392.4 559.54,-394.84 563.31,-400.74"/>
+<polygon fill="black" stroke="black" points="462.69,-458.47 456.38,-466.98 466.62,-464.26 462.69,-458.47"/>
+<text text-anchor="middle" x="549" y="-416.8" font-family="Times,serif" font-size="14.00">TCP</text>
+</g>
+<!-- webrtc&#45;&gt;host_bluetooth -->
+<g id="edge8" class="edge">
+<title>webrtc&#45;&gt;host_bluetooth</title>
+<path fill="none" stroke="black" d="M601.18,-395.03C609.84,-412.03 623.26,-438.38 633.25,-457.99"/>
+<polygon fill="black" stroke="black" points="630.19,-459.69 637.85,-467.01 636.42,-456.51 630.19,-459.69"/>
+</g>
+<!-- webrtc&#45;&gt;run_cvd -->
+<g id="edge7" class="edge">
+<title>webrtc&#45;&gt;run_cvd</title>
+<path fill="none" stroke="black" d="M641.51,-385.46C683.29,-393.38 743.82,-408.65 790.5,-436 804.47,-444.19 817.43,-457.22 826.5,-467.64"/>
+<polygon fill="black" stroke="black" points="642.05,-382 631.58,-383.65 640.79,-388.89 642.05,-382"/>
+</g>
+<!-- operator_socket -->
+<g id="node19" class="node">
+<title>operator_socket</title>
+<polygon fill="none" stroke="black" points="1038,-503 897,-503 897,-467 1038,-467 1038,-503"/>
+<text text-anchor="middle" x="967.5" y="-481.3" font-family="Times,serif" font-size="14.00">/run/cuttlefish/operator</text>
+</g>
+<!-- webrtc&#45;&gt;operator_socket -->
+<g id="edge6" class="edge">
+<title>webrtc&#45;&gt;operator_socket</title>
+<path fill="none" stroke="black" d="M643.01,-382.43C703.17,-388.93 805.89,-403.95 888.5,-436 904.95,-442.38 921.83,-452.13 935.75,-461.18"/>
+<polygon fill="black" stroke="black" points="643.32,-378.94 633.01,-381.38 642.59,-385.9 643.32,-378.94"/>
+<polygon fill="black" stroke="black" points="934.18,-464.34 944.44,-466.99 938.07,-458.52 934.18,-464.34"/>
+<text text-anchor="start" x="865.5" y="-417.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="869.5" y="-417.8" font-family="Times,serif" font-style="italic" font-size="14.00">JSON</text>
+<text text-anchor="start" x="901.5" y="-417.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- vmm -->
+<g id="node17" class="node">
+<title>vmm</title>
+<ellipse fill="none" stroke="black" cx="592.5" cy="-203" rx="37.09" ry="18"/>
+<text text-anchor="middle" x="592.5" y="-199.3" font-family="Times,serif" font-size="14.00">crosvm</text>
+</g>
+<!-- vmm&#45;&gt;audio_server -->
+<g id="edge16" class="edge">
+<title>vmm&#45;&gt;audio_server</title>
+<path fill="none" stroke="black" d="M635.32,-213.82C664.03,-220.56 702.65,-229.88 736.5,-239 771.02,-248.3 809.24,-259.52 840.59,-268.98"/>
+<polygon fill="black" stroke="black" points="635.75,-210.33 625.22,-211.47 634.16,-217.15 635.75,-210.33"/>
+<polygon fill="black" stroke="black" points="839.93,-272.44 850.51,-271.99 841.96,-265.74 839.93,-272.44"/>
+</g>
+<!-- vmm&#45;&gt;host_confirmationui_in -->
+<g id="edge25" class="edge">
+<title>vmm&#45;&gt;host_confirmationui_in</title>
+<path fill="none" stroke="black" d="M547.38,-210.86C470.11,-222.71 307.51,-248.03 170.5,-272 167.52,-272.52 164.49,-273.06 161.43,-273.61"/>
+<polygon fill="black" stroke="black" points="548.35,-214.25 557.71,-209.28 547.3,-207.33 548.35,-214.25"/>
+</g>
+<!-- vmm&#45;&gt;host_confirmationui_out -->
+<g id="edge23" class="edge">
+<title>vmm&#45;&gt;host_confirmationui_out</title>
+<path fill="none" stroke="black" d="M560.66,-212.23C510.01,-225.31 409.69,-251.23 339.69,-269.31"/>
+<polygon fill="black" stroke="black" points="338.4,-266.03 329.6,-271.92 340.15,-272.81 338.4,-266.03"/>
+</g>
+<!-- vmm&#45;&gt;keyboard_server -->
+<g id="edge19" class="edge">
+<title>vmm&#45;&gt;keyboard_server</title>
+<path fill="none" stroke="black" d="M559.54,-221.84C532.35,-236.57 494.02,-257.32 467.18,-271.85"/>
+<polygon fill="black" stroke="black" points="561.53,-224.75 568.66,-216.91 558.19,-218.59 561.53,-224.75"/>
+</g>
+<!-- vmm&#45;&gt;touch_server -->
+<g id="edge21" class="edge">
+<title>vmm&#45;&gt;touch_server</title>
+<path fill="none" stroke="black" d="M592.5,-231.27C592.5,-244.58 592.5,-260.19 592.5,-271.82"/>
+<polygon fill="black" stroke="black" points="596,-231.2 592.5,-221.2 589,-231.2 596,-231.2"/>
+</g>
+<!-- vmm&#45;&gt;wayland_socket -->
+<g id="edge17" class="edge">
+<title>vmm&#45;&gt;wayland_socket</title>
+<path fill="none" stroke="black" d="M624.55,-222.04C647.86,-235.16 679.55,-253 704.37,-266.97"/>
+<polygon fill="black" stroke="black" points="626.19,-218.95 615.76,-217.09 622.76,-225.05 626.19,-218.95"/>
+<polygon fill="black" stroke="black" points="702.74,-270.07 713.17,-271.93 706.17,-263.97 702.74,-270.07"/>
+<text text-anchor="start" x="676.5" y="-243.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="680.5" y="-243.8" font-family="Times,serif" font-style="italic" font-size="14.00">Wayland</text>
+<text text-anchor="start" x="728.5" y="-243.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- operator -->
+<g id="node18" class="node">
+<title>operator</title>
+<ellipse fill="none" stroke="black" cx="968.5" cy="-572" rx="73.39" ry="18"/>
+<text text-anchor="middle" x="968.5" y="-568.3" font-family="Times,serif" font-size="14.00">Host Orchestrator</text>
+</g>
+<!-- operator&#45;&gt;browser -->
+<g id="edge1" class="edge">
+<title>operator&#45;&gt;browser</title>
+<path fill="none" stroke="black" d="M973.44,-600.35C977.26,-630.36 978.44,-678.45 954.5,-710 944.54,-723.13 928.92,-731.34 913.54,-736.47"/>
+<polygon fill="black" stroke="black" points="976.87,-599.57 971.96,-590.18 969.94,-600.58 976.87,-599.57"/>
+<polygon fill="black" stroke="black" points="912.12,-733.24 903.52,-739.42 914.1,-739.95 912.12,-733.24"/>
+<text text-anchor="start" x="974.5" y="-656.3" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="978.5" y="-656.3" font-family="Times,serif" font-style="italic" font-size="14.00">HTTPS</text>
+<text text-anchor="start" x="1018.5" y="-656.3" font-family="Times,serif" font-size="14.00"> (1443) </text>
+</g>
+<!-- operator&#45;&gt;operator_proxy -->
+<g id="edge4" class="edge">
+<title>operator&#45;&gt;operator_proxy</title>
+<path fill="none" stroke="black" d="M900.95,-586.79C890.03,-591.81 879.9,-598.66 872.5,-608 867.52,-614.29 865.33,-622.49 864.59,-630.43"/>
+<polygon fill="black" stroke="black" points="902.33,-590.01 910.26,-582.99 899.68,-583.53 902.33,-590.01"/>
+<polygon fill="black" stroke="black" points="861.08,-630.45 864.31,-640.54 868.08,-630.63 861.08,-630.45"/>
+<text text-anchor="start" x="872.5" y="-612.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="876.5" y="-612.8" font-family="Times,serif" font-style="italic" font-size="14.00">HTTP</text>
+<text text-anchor="start" x="910.5" y="-612.8" font-family="Times,serif" font-size="14.00"> (1080) </text>
+</g>
+<!-- operator_socket&#45;&gt;operator -->
+<g id="edge5" class="edge">
+<title>operator_socket&#45;&gt;operator</title>
+<path fill="none" stroke="black" d="M967.82,-513.27C967.93,-522.94 968.06,-533.83 968.18,-543.52"/>
+<polygon fill="black" stroke="black" points="971.32,-513.16 967.7,-503.2 964.32,-513.24 971.32,-513.16"/>
+<polygon fill="black" stroke="black" points="964.68,-543.87 968.3,-553.82 971.68,-543.78 964.68,-543.87"/>
+<text text-anchor="start" x="967.5" y="-525.8" font-family="Times,serif" font-size="14.00"> </text>
+<text text-anchor="start" x="971.5" y="-525.8" font-family="Times,serif" font-style="italic" font-size="14.00">JSON</text>
+<text text-anchor="start" x="1003.5" y="-525.8" font-family="Times,serif" font-size="14.00"> </text>
+</g>
+<!-- confirmationui_console -->
+<g id="node20" class="node">
+<title>confirmationui_console</title>
+<polygon fill="none" stroke="black" points="848.5,-148 778.5,-148 778.5,-112 848.5,-112 848.5,-148"/>
+<text text-anchor="middle" x="813.5" y="-126.3" font-family="Times,serif" font-size="14.00">/dev/hvc8</text>
+</g>
+<!-- confirmationui_console&#45;&gt;vmm -->
+<g id="edge28" class="edge">
+<title>confirmationui_console&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M777.08,-152.64C774.55,-153.85 772.01,-154.98 769.5,-156 725.67,-173.79 672.9,-186.45 636.45,-193.93"/>
+<polygon fill="black" stroke="black" points="778.81,-155.68 786.12,-148.01 775.62,-149.45 778.81,-155.68"/>
+<polygon fill="black" stroke="black" points="635.74,-190.5 626.62,-195.9 637.11,-197.37 635.74,-190.5"/>
+</g>
+<!-- keyboard_input -->
+<g id="node21" class="node">
+<title>keyboard_input</title>
+<polygon fill="none" stroke="black" points="590.5,-148 418.5,-148 418.5,-112 590.5,-112 590.5,-148"/>
+<text text-anchor="middle" x="504.5" y="-126.3" font-family="Times,serif" font-size="14.00">/dev/input/event3 | keyboard</text>
+</g>
+<!-- keyboard_input&#45;&gt;vmm -->
+<g id="edge29" class="edge">
+<title>keyboard_input&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M525.8,-148.19C537.9,-157.95 553.16,-170.26 566.02,-180.64"/>
+<polygon fill="black" stroke="black" points="564.1,-183.58 574.08,-187.13 568.49,-178.13 564.1,-183.58"/>
+</g>
+<!-- touch_input -->
+<g id="node22" class="node">
+<title>touch_input</title>
+<polygon fill="none" stroke="black" points="760.5,-148 608.5,-148 608.5,-112 760.5,-112 760.5,-148"/>
+<text text-anchor="middle" x="684.5" y="-126.3" font-family="Times,serif" font-size="14.00">/dev/input/event2 | touch</text>
+</g>
+<!-- touch_input&#45;&gt;vmm -->
+<g id="edge30" class="edge">
+<title>touch_input&#45;&gt;vmm</title>
+<path fill="none" stroke="black" d="M662.23,-148.19C649.37,-158.11 633.1,-170.66 619.52,-181.15"/>
+<polygon fill="black" stroke="black" points="617.24,-178.49 611.46,-187.37 621.51,-184.03 617.24,-178.49"/>
+</g>
+<!-- adb_daemon -->
+<g id="node23" class="node">
+<title>adb_daemon</title>
+<ellipse fill="none" stroke="black" cx="786.5" cy="-57" rx="61.99" ry="18"/>
+<text text-anchor="middle" x="786.5" y="-53.3" font-family="Times,serif" font-size="14.00">ADB Daemon</text>
+</g>
+<!-- adb_daemon&#45;&gt;socket_vsock_proxy -->
+<g id="edge13" class="edge">
+<title>adb_daemon&#45;&gt;socket_vsock_proxy</title>
+<path fill="none" stroke="black" d="M834.07,-74.48C865.68,-85.42 907.17,-99.79 939.63,-111.03"/>
+<polygon fill="black" stroke="black" points="835.17,-71.15 824.58,-71.19 832.88,-77.77 835.17,-71.15"/>
+<polygon fill="black" stroke="black" points="938.79,-114.45 949.39,-114.41 941.08,-107.83 938.79,-114.45"/>
+</g>
+<!-- confirmationui -->
+<g id="node24" class="node">
+<title>confirmationui</title>
+<ellipse fill="none" stroke="black" cx="618.5" cy="-57" rx="88.28" ry="18"/>
+<text text-anchor="middle" x="618.5" y="-53.3" font-family="Times,serif" font-size="14.00">ConfirmationUI HAL</text>
+</g>
+<!-- confirmationui&#45;&gt;confirmationui_console -->
+<g id="edge27" class="edge">
+<title>confirmationui&#45;&gt;confirmationui_console</title>
+<path fill="none" stroke="black" d="M670.49,-76.23C697.38,-85.74 730.94,-97.74 768.76,-111.76"/>
+<polygon fill="black" stroke="black" points="671.51,-72.87 660.91,-72.84 669.17,-79.48 671.51,-72.87"/>
+<polygon fill="black" stroke="black" points="767.87,-115.17 778.47,-115.37 770.31,-108.61 767.87,-115.17"/>
+</g>
+</g>
+</svg>
diff --git a/host/frontend/webrtc/gpx_locations_handler.cpp b/host/frontend/webrtc/gpx_locations_handler.cpp
new file mode 100644
index 0000000..7fb2fdb
--- /dev/null
+++ b/host/frontend/webrtc/gpx_locations_handler.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/gpx_locations_handler.h"
+#include <android-base/logging.h>
+#include <unistd.h>
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/location/GnssClient.h"
+#include "host/libs/location/GpxParser.h"
+#include "string.h"
+
+#include <chrono>
+#include <iostream>
+#include <sstream>
+#include <thread>
+#include <vector>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+GpxLocationsHandler::GpxLocationsHandler(
+    std::function<void(const uint8_t *, size_t)> send_to_client) {}
+
+GpxLocationsHandler::~GpxLocationsHandler() {}
+
+void GpxLocationsHandler::HandleMessage(const uint8_t *msg, size_t len) {
+  LOG(DEBUG) << "ENTER GpxLocationsHandler handleMessage , size: " << len;
+  std::string error;
+  GpsFixArray coordinates;
+  if (!GpxParser::parseString((const char *)&msg[0], len, &coordinates,
+                              &error)) {
+    LOG(ERROR) << " Parsing Error: " << error << std::endl;
+    return;
+  }
+
+  LOG(DEBUG) << "Number of parsed points: " << coordinates.size() << std::endl;
+  auto config = CuttlefishConfig::Get();
+  if (!config) {
+    LOG(ERROR) << "Failed to obtain config object";
+    return;
+  }
+  auto instance = config->ForDefaultInstance();
+  auto server_port = instance.gnss_grpc_proxy_server_port();
+  std::string socket_name =
+      std::string("localhost:") + std::to_string(server_port);
+  LOG(DEBUG) << "Server port: " << server_port << " socket: " << socket_name
+             << std::endl;
+
+  GnssClient gpsclient(
+      grpc::CreateChannel(socket_name, grpc::InsecureChannelCredentials()));
+
+  auto reply = gpsclient.SendGpsLocations(1000,coordinates);
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/gpx_locations_handler.h b/host/frontend/webrtc/gpx_locations_handler.h
new file mode 100644
index 0000000..edc9e00
--- /dev/null
+++ b/host/frontend/webrtc/gpx_locations_handler.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/fs/shared_select.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+struct GpxLocationsHandler {
+  explicit GpxLocationsHandler(
+      std::function<void(const uint8_t *, size_t)> send_to_client);
+
+  ~GpxLocationsHandler();
+
+  void HandleMessage(const uint8_t *msg, size_t len);
+};
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/html_client/Android.bp b/host/frontend/webrtc/html_client/Android.bp
new file mode 100644
index 0000000..2476f15
--- /dev/null
+++ b/host/frontend/webrtc/html_client/Android.bp
@@ -0,0 +1,82 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_client.html",
+    src: "client.html",
+    filename: "client.html",
+    sub_dir: "webrtc/assets",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_style.css",
+    src: "style.css",
+    filename: "style.css",
+    sub_dir: "webrtc/assets",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_controls.css",
+    src: "controls.css",
+    filename: "controls.css",
+    sub_dir: "webrtc/assets",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_adb.js",
+    src: "js/adb.js",
+    filename: "adb.js",
+    sub_dir: "webrtc/assets/js",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_cf.js",
+    src: "js/cf_webrtc.js",
+    filename: "cf_webrtc.js",
+    sub_dir: "webrtc/assets/js",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_app.js",
+    src: "js/app.js",
+    filename: "app.js",
+    sub_dir: "webrtc/assets/js",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_controls.js",
+    src: "js/controls.js",
+    filename: "controls.js",
+    sub_dir: "webrtc/assets/js",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_rootcanal.js",
+    src: "js/rootcanal.js",
+    filename: "rootcanal.js",
+    sub_dir: "webrtc/assets/js",
+}
+
+prebuilt_usr_share_host {
+    name: "webrtc_location.js",
+    src: "js/location.js",
+    filename: "location.js",
+    sub_dir: "webrtc/assets/js",
+}
+
diff --git a/host/frontend/webrtc/html_client/client.html b/host/frontend/webrtc/html_client/client.html
new file mode 100644
index 0000000..02330ba
--- /dev/null
+++ b/host/frontend/webrtc/html_client/client.html
@@ -0,0 +1,220 @@
+<?--
+ Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ -->
+
+<html>
+    <head>
+        <link rel="stylesheet" type="text/css" href="style.css" >
+        <link rel="stylesheet" type="text/css" href="controls.css" >
+        <link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons+Outlined">
+        <link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
+        <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.0/jquery.min.js"></script>
+    </head>
+
+    <body>
+      <div id="loader"></div>
+      <div id="error-message-div">
+        <h3 id="error-message" class="hidden">
+          <span class="material-icons close-btn">close</span>
+        </h3>
+      </div>
+      <section id="device-connection">
+        <audio id="device-audio"></audio>
+        <div id='controls-and-displays'>
+          <div id='control-panel-default-buttons' class='control-panel-column'>
+            <button id='power_btn' title='Power' disabled='true' class='material-icons'>power_settings_new</button>
+            <button id='device-details-button' title='Device Details' class='material-icons'>
+              settings
+            </button>
+            <button id='bluetooth-modal-button' title='Bluetooth console' class='material-icons'>
+              settings_bluetooth
+            </button>
+            <button id='back_btn' title='Back' disabled='true' class='material-icons'>arrow_back</button>
+            <button id='home_btn' title='Home' disabled='true' class='material-icons'>home</button>
+            <button id='menu_btn' title='Menu' disabled='true' class='material-icons'>menu</button>
+            <button id='rotate_left_btn' title='Rotate left' disabled='true' class='material-icons' data-adb="true">rotate_90_degrees_ccw</button>
+            <button id='rotate_right_btn' title='Rotate right' disabled='true' class='material-icons' data-adb="true">rotate_90_degrees_cw</button>
+            <button id='volume_up_btn' title='Volume up' disabled='true' class='material-icons'>volume_up</button>
+            <button id='volume_down_btn' title='Volume down' disabled='true' class='material-icons'>volume_down</button>
+            <button id='volume_off_btn' title='Volume off' class='material-icons'>volume_off</button>
+            <button id='camera_off_btn' title='Capture camera' class='material-icons'>videocam_off</button>
+            <button id='record_video_btn' title='Record screen' class='material-icons'>movie_creation</button>
+            <button id='mic_btn' title='Microphone' disabled='true' class='material-icons'>mic</button>
+            <button id='location-modal-button' title='location console' class='material-icons'>location_on </button>
+          </div>
+          <div id='control-panel-custom-buttons' class='control-panel-column'></div>
+          <!-- tabindex="-1" allows this element to capture keyboard events -->
+          <div id='device-displays' tabindex="-1">
+          </div>
+        </div>
+      </section>
+      <div id='device-details-modal' class='modal'>
+        <div id='device-details-modal-header' class='modal-header'>
+          <h2>Device Details</h2>
+          <button id='device-details-close' title='Close' class='material-icons modal-close'>close</button>
+        </div>
+        <hr>
+        <h3>Hardware Configuration</h3>
+        <span id='device-details-hardware'>unknown</span>
+      </div>
+
+      <div id='bluetooth-modal' class='modal-wrapper'>
+        <div id='bluetooth-prompt' class='modal'>
+          <div id='bluetooth-prompt-header' class='modal-header'>
+            <h2>Bluetooth</h2>
+            <button id='bluetooth-prompt-close' title='Close' class='material-icons modal-close'>close</button>
+          </div>
+          <div>
+            <div id='bluetooth-prompt-text' class='bluetooth-text'>
+            We have enabled a BT Wizard to simplify adding a<br>bluetooth device.<br>
+            Alternatively, you can enter the BT Console if you<br>want to exercise full control.</div><br>
+            <div class='bluetooth-button'>
+              <button id='bluetooth-prompt-wizard' title='Start Wizard' class='modal-button-highlight'>Start Wizard</button>
+              <button id='bluetooth-prompt-list' title='Device List' class='modal-button'>Device List</button>
+              <button id='bluetooth-prompt-console' title='BT Console' class='modal-button'>BT Console</button>
+            </div>
+          </div>
+        </div>
+        <div id='bluetooth-wizard' class='modal'>
+          <div id='bluetooth-wizard-modal-header' class='modal-header'>
+            <h2>BT Wizard</h2>
+            <button id='bluetooth-wizard-close' title='Close' class='material-icons modal-close'>close</button>
+          </div>
+          <div>
+            <div class='bluetooth-text-field'><input type="text" id='bluetooth-wizard-name' placeholder="Device Name"></input></div>
+            <div class='bluetooth-drop-down'>
+              <select id='bluetooth-wizard-type' validate-mac="true" required>
+                <option value="beacon">Beacon</option>
+                <option value="beacon_swarm">Beacon Swarm</option>
+                <!-- Disabled because they were "started but never finished" (according to mylesgw@)
+                <option value="car_kit">Car Kit</option>
+                <option value="classic">Classic</option> -->
+                <option value="keyboard">Keyboard</option>
+                <option value="remote_loopback">Remote Loopback</option>
+                <option value="scripted_beacon">Scripted Beacon</option>
+                <!-- Disabled because it will never show up in the UI
+                <option value="sniffer">Sniffer</option> -->
+              </select>
+            </div>
+            <div class='bluetooth-text-field'><input type="text" id='bluetooth-wizard-mac' placeholder="Device MAC" validate-mac="true" required></input><span></span></div>
+            <div class='bluetooth-button'>
+              <button id='bluetooth-wizard-device' title='Add Device' class='modal-button-highlight' disabled>Add Device</button>
+              <button id='bluetooth-wizard-cancel' title='Cancel' class='modal-button'>Cancel</button>
+            </div>
+          </div>
+        </div>
+        <div id='bluetooth-wizard-confirm' class='modal'>
+          <div id='bluetooth-wizard-confirm-header' class='modal-header'>
+            <h2>BT Wizard</h2>
+            <button id='bluetooth-wizard-confirm-close' title='Close' class='material-icons modal-close'>close</button>
+          </div>
+          <div id='bluetooth-wizard-text' class='bluetooth-text'>Device added. See device details below.</div><br>
+          <div class='bluetooth-text'>
+            <p>Name: <b>GKeyboard</b></p>
+            <p>Type: <b>Keyboard</b></p>
+            <p>MAC Addr: <b>be:ac:01:55:00:03</b></p>
+          </div>
+          <div class='bluetooth-button'><button id='bluetooth-wizard-another' title='Add Another' class='modal-button-highlight'>Add Another</button></div>
+        </div>
+        <div id='bluetooth-list' class='modal'>
+          <div id='bluetooth-list-header' class='modal-header'>
+            <h2>Device List</h2>
+            <button id='bluetooth-list-close' title='Close' class='material-icons modal-close'>close</button>
+          </div>
+          <div class='bluetooth-text'>
+            <div><button title="Delete" data-device-id="delete" class="bluetooth-list-trash material-icons">delete</button>GKeyboard | Keyboard | be:ac:01:55:00:03</div>
+            <div><button title="Delete" data-device-id="delete" class="bluetooth-list-trash material-icons">delete</button>GHeadphones | Audio | dc:fa:32:00:55:02</div>
+          </div>
+        </div>
+        <div id='bluetooth-console' class='modal'>
+          <div id='bluetooth-console-modal-header' class='modal-header'>
+            <h2>BT Console</h2>
+            <button id='bluetooth-console-close' title='Close' class='material-icons modal-close'>close</button>
+          </div>
+          <div>
+            <div colspan='2'><textarea id='bluetooth-console-view' readonly rows='10' cols='60'></textarea></div>
+            <div width='1'><p id='bluetooth-console-cmd-label'>Command:</p></div>
+            <div width='100'><input id='bluetooth-console-input' type='text'></input></div>
+      </div>
+    </div>
+  </div>
+
+  <div id='location-modal' class='modal-wrapper'>
+
+    <!-- location-prompt-modal modal -->
+    <div id='location-prompt-modal' class='modal'>
+      <div id='location-prompt-modal-header' class='modal-header'>
+        <h2>Location</h2>
+        <button id='location-prompt-modal-close' title='Close' class='material-icons modal-close'>close</button>
+      </div>
+      <div>
+        <div id='location-prompt-text' class='location-text'>
+          <div class='location-button'>
+            <button id='location-set-wizard' title='Set location' class='modal-button-highlight'>Set Location</button>
+            <button id='locations-import-wizard' title='Import locations' class='modal-button'>Import Locations</button>
+          </div>
+        </div>
+      </div>
+    </div>
+  </div>
+  <!-- location-set modal -->
+  <div id='location-set-modal' class='modal'>
+    <div id='location-set-modal-header' class='modal-header'>
+      <h2>Set Location</h2>
+      <button id='location-set-modal-close' title='Close' class='material-icons modal-close'>close</button>
+    </div>
+    <div>
+      <div class='location-text-field'><input type=number step=0.01 min="-180" max="180" value=-122.083 id='location-set-longitude' placeholder="Longitude" required></input></div>
+      <div class='location-text-field'><input type=number step=0.01 min="-90" max="90" value=37.415 id='location-set-latitude' placeholder="Latitude" required></input></div>
+      <div class='location-text-field'><input type=number step=0.01 value=0.0 id='location-set-altitude' placeholder="Elevation"required></input>
+    </div>
+      <div class='location-button'>
+        <button id='location-set-confirm' title='Confirm Location' class='modal-button-highlight'>Update Location</button>
+        <button id='location-set-cancel' title='Cancel' class='modal-button'>Cancel</button>
+      </div>
+    </div>
+  </div>
+
+  <!-- locations-import modal -->
+  <div id='locations-import-modal' class='modal'>
+    <div id='locations-import-modal-header' class='modal-header'>
+      <h2>Import Locations</h2>
+      <button id='locations-import-modal-close' title='Close' class='material-icons modal-close'>close</button>
+    </div>
+
+          <div class='location-text'>
+            <input type='file' id='locations_select_file' accept=".gpx, .kml">
+            <button id='locations-send-btn' title='Send Locations' class='modal-button-highlight'>Send Locations</button>
+
+          </div>
+
+  </div>
+
+       <script src="js/adb.js"></script>
+       <script src="js/location.js"></script>
+       <script src="js/rootcanal.js"></script>
+       <script src="js/cf_webrtc.js" type="module"></script>
+       <script src="js/controls.js"></script>
+       <script src="js/app.js"></script>
+       <template id="display-template">
+         <div class="device-display">
+           <div class="device-display-info"></div>
+           <div class="device-video-container">
+             <video autoplay muted class="device-display-video"></video>
+           </div>
+         </div>
+       </template>
+    </body>
+</html>
diff --git a/host/frontend/webrtc/html_client/controls.css b/host/frontend/webrtc/html_client/controls.css
new file mode 100644
index 0000000..4635484
--- /dev/null
+++ b/host/frontend/webrtc/html_client/controls.css
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Dark theme (default) */
+body, body.dark-theme {
+  --toggle-off-bg: red;
+  --toggle-off-fg: white;
+}
+/* Light theme */
+body.light-theme {
+  --toggle-off-bg: red;
+  --toggle-off-fg: white;
+}
+
+.toggle-control.toggle-off {
+  background-color: var(--toggle-off-bg);
+  color: var(--toggle-off-fg);
+  border-radius: 10px;
+}
+
diff --git a/host/frontend/webrtc/client/js/adb.js b/host/frontend/webrtc/html_client/js/adb.js
similarity index 100%
rename from host/frontend/webrtc/client/js/adb.js
rename to host/frontend/webrtc/html_client/js/adb.js
diff --git a/host/frontend/webrtc/html_client/js/app.js b/host/frontend/webrtc/html_client/js/app.js
new file mode 100644
index 0000000..424ba90
--- /dev/null
+++ b/host/frontend/webrtc/html_client/js/app.js
@@ -0,0 +1,1176 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+// Set the theme as soon as possible.
+const params = new URLSearchParams(location.search);
+let theme = params.get('theme');
+if (theme === 'light') {
+  document.querySelector('body').classList.add('light-theme');
+} else if (theme === 'dark') {
+  document.querySelector('body').classList.add('dark-theme');
+}
+
+async function ConnectDevice(deviceId, serverConnector) {
+  console.debug('Connect: ' + deviceId);
+  // Prepare messages in case of connection failure
+  let connectionAttemptDuration = 0;
+  const intervalMs = 15000;
+  let connectionInterval = setInterval(() => {
+    connectionAttemptDuration += intervalMs;
+    if (connectionAttemptDuration > 30000) {
+      showError(
+          'Connection should have occurred by now. ' +
+          'Please attempt to restart the guest device.');
+      clearInterval(connectionInterval);
+    } else if (connectionAttemptDuration > 15000) {
+      showWarning('Connection is taking longer than expected');
+    }
+  }, intervalMs);
+
+  let module = await import('./cf_webrtc.js');
+  let deviceConnection = await module.Connect(deviceId, serverConnector);
+  console.info('Connected to ' + deviceId);
+  clearInterval(connectionInterval);
+  return deviceConnection;
+}
+
+function setupMessages() {
+  let closeBtn = document.querySelector('#error-message .close-btn');
+  closeBtn.addEventListener('click', evt => {
+    evt.target.parentElement.className = 'hidden';
+  });
+}
+
+function showMessage(msg, className, duration) {
+  let element = document.getElementById('error-message');
+  let previousTimeout = element.dataset.timeout;
+  if (previousTimeout !== undefined) {
+    clearTimeout(previousTimeout);
+  }
+  if (element.childNodes.length < 2) {
+    // First time, no text node yet
+    element.insertAdjacentText('afterBegin', msg);
+  } else {
+    element.childNodes[0].data = msg;
+  }
+  element.className = className;
+
+  if (duration !== undefined) {
+    element.dataset.timeout = setTimeout(() => {
+      element.className = 'hidden';
+    }, duration);
+  }
+}
+
+function showInfo(msg, duration) {
+  showMessage(msg, 'info', duration);
+}
+
+function showWarning(msg, duration) {
+  showMessage(msg, 'warning', duration);
+}
+
+function showError(msg, duration) {
+  showMessage(msg, 'error', duration);
+}
+
+
+class DeviceDetailsUpdater {
+  #element;
+
+  constructor() {
+    this.#element = document.getElementById('device-details-hardware');
+  }
+
+  setHardwareDetailsText(text) {
+    this.#element.dataset.hardwareDetailsText = text;
+    return this;
+  }
+
+  setDeviceStateDetailsText(text) {
+    this.#element.dataset.deviceStateDetailsText = text;
+    return this;
+  }
+
+  update() {
+    this.#element.textContent =
+        [
+          this.#element.dataset.hardwareDetailsText,
+          this.#element.dataset.deviceStateDetailsText,
+        ].filter(e => e /*remove empty*/)
+            .join('\n');
+  }
+}  // DeviceDetailsUpdater
+
+// These classes provide the same interface as those from the server_connector,
+// but can't inherit from them because older versions of server_connector.js
+// don't provide them.
+// These classes are only meant to avoid having to check for null everytime.
+class EmptyDeviceDisplaysMessage {
+  addDisplay(display_id, width, height) {}
+  send() {}
+}
+
+class EmptyParentController {
+  createDeviceDisplaysMessage(rotation) {
+    return new EmptyDeviceDisplaysMessage();
+  }
+}
+
+class DeviceControlApp {
+  #deviceConnection = {};
+  #parentController = null;
+  #currentRotation = 0;
+  #currentScreenStyles = {};
+  #displayDescriptions = [];
+  #recording = {};
+  #phys = {};
+  #deviceCount = 0;
+  #micActive = false;
+  #adbConnected = false;
+
+  constructor(deviceConnection, parentController) {
+    this.#deviceConnection = deviceConnection;
+    this.#parentController = parentController;
+  }
+
+  start() {
+    console.debug('Device description: ', this.#deviceConnection.description);
+    this.#deviceConnection.onControlMessage(msg => this.#onControlMessage(msg));
+    createToggleControl(
+        document.getElementById('camera_off_btn'),
+        enabled => this.#onCameraCaptureToggle(enabled));
+    createToggleControl(
+        document.getElementById('record_video_btn'),
+        enabled => this.#onVideoCaptureToggle(enabled));
+    const audioElm = document.getElementById('device-audio');
+
+    let audioPlaybackCtrl = createToggleControl(
+        document.getElementById('volume_off_btn'),
+        enabled => this.#onAudioPlaybackToggle(enabled), !audioElm.paused);
+    // The audio element may start or stop playing at any time, this ensures the
+    // audio control always show the right state.
+    audioElm.onplay = () => audioPlaybackCtrl.Set(true);
+    audioElm.onpause = () => audioPlaybackCtrl.Set(false);
+
+    // Enable non-ADB buttons, these buttons use data channels to communicate
+    // with the host, so they're ready to go as soon as the webrtc connection is
+    // established.
+    this.#getControlPanelButtons()
+        .filter(b => !b.dataset.adb)
+        .forEach(b => b.disabled = false);
+
+    this.#showDeviceUI();
+  }
+
+  #showDeviceUI() {
+    // Set up control panel buttons
+    addMouseListeners(
+        document.querySelector('#power_btn'),
+        evt => this.#onControlPanelButton(evt, 'power'));
+    addMouseListeners(
+        document.querySelector('#back_btn'),
+        evt => this.#onControlPanelButton(evt, 'back'));
+    addMouseListeners(
+        document.querySelector('#home_btn'),
+        evt => this.#onControlPanelButton(evt, 'home'));
+    addMouseListeners(
+        document.querySelector('#menu_btn'),
+        evt => this.#onControlPanelButton(evt, 'menu'));
+    addMouseListeners(
+        document.querySelector('#rotate_left_btn'),
+        evt => this.#onRotateLeftButton(evt, 'rotate'));
+    addMouseListeners(
+        document.querySelector('#rotate_right_btn'),
+        evt => this.#onRotateRightButton(evt, 'rotate'));
+    addMouseListeners(
+        document.querySelector('#volume_up_btn'),
+        evt => this.#onControlPanelButton(evt, 'volumeup'));
+    addMouseListeners(
+        document.querySelector('#volume_down_btn'),
+        evt => this.#onControlPanelButton(evt, 'volumedown'));
+    addMouseListeners(
+        document.querySelector('#mic_btn'), evt => this.#onMicButton(evt));
+
+    createModalButton(
+        'device-details-button', 'device-details-modal',
+        'device-details-close');
+    createModalButton(
+        'bluetooth-modal-button', 'bluetooth-prompt', 'bluetooth-prompt-close');
+    createModalButton(
+        'bluetooth-prompt-wizard', 'bluetooth-wizard', 'bluetooth-wizard-close',
+        'bluetooth-prompt');
+    createModalButton(
+        'bluetooth-wizard-device', 'bluetooth-wizard-confirm',
+        'bluetooth-wizard-confirm-close', 'bluetooth-wizard');
+    createModalButton(
+        'bluetooth-wizard-another', 'bluetooth-wizard',
+        'bluetooth-wizard-close', 'bluetooth-wizard-confirm');
+    createModalButton(
+        'bluetooth-prompt-list', 'bluetooth-list', 'bluetooth-list-close',
+        'bluetooth-prompt');
+    createModalButton(
+        'bluetooth-prompt-console', 'bluetooth-console',
+        'bluetooth-console-close', 'bluetooth-prompt');
+    createModalButton(
+        'bluetooth-wizard-cancel', 'bluetooth-prompt', 'bluetooth-wizard-close',
+        'bluetooth-wizard');
+
+    createModalButton('location-modal-button', 'location-prompt-modal',
+        'location-prompt-modal-close');
+    createModalButton(
+        'location-set-wizard', 'location-set-modal', 'location-set-modal-close',
+        'location-prompt-modal');
+
+    createModalButton(
+        'locations-import-wizard', 'locations-import-modal', 'locations-import-modal-close',
+        'location-prompt-modal');
+    createModalButton(
+        'location-set-cancel', 'location-prompt-modal', 'location-set-modal-close',
+        'location-set-modal');
+
+    positionModal('device-details-button', 'bluetooth-modal');
+    positionModal('device-details-button', 'bluetooth-prompt');
+    positionModal('device-details-button', 'bluetooth-wizard');
+    positionModal('device-details-button', 'bluetooth-wizard-confirm');
+    positionModal('device-details-button', 'bluetooth-list');
+    positionModal('device-details-button', 'bluetooth-console');
+
+    positionModal('device-details-button', 'location-modal');
+    positionModal('device-details-button', 'location-prompt-modal');
+    positionModal('device-details-button', 'location-set-modal');
+    positionModal('device-details-button', 'locations-import-modal');
+
+    createButtonListener('bluetooth-prompt-list', null, this.#deviceConnection,
+      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
+    createButtonListener('bluetooth-wizard-device', null, this.#deviceConnection,
+      evt => this.#onRootCanalCommand(this.#deviceConnection, "add", evt));
+    createButtonListener('bluetooth-list-trash', null, this.#deviceConnection,
+      evt => this.#onRootCanalCommand(this.#deviceConnection, "del", evt));
+    createButtonListener('bluetooth-prompt-wizard', null, this.#deviceConnection,
+      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
+    createButtonListener('bluetooth-wizard-another', null, this.#deviceConnection,
+      evt => this.#onRootCanalCommand(this.#deviceConnection, "list", evt));
+
+    createButtonListener('locations-send-btn', null, this.#deviceConnection,
+      evt => this.#onImportLocationsFile(this.#deviceConnection,evt));
+
+    createButtonListener('location-set-confirm', null, this.#deviceConnection,
+      evt => this.#onSendLocation(this.#deviceConnection, evt));
+
+    if (this.#deviceConnection.description.custom_control_panel_buttons.length >
+        0) {
+      document.getElementById('control-panel-custom-buttons').style.display =
+          'flex';
+      for (const button of this.#deviceConnection.description
+               .custom_control_panel_buttons) {
+        if (button.shell_command) {
+          // This button's command is handled by sending an ADB shell command.
+          let element = createControlPanelButton(
+              button.title, button.icon_name,
+              e => this.#onCustomShellButton(button.shell_command, e),
+              'control-panel-custom-buttons');
+          element.dataset.adb = true;
+        } else if (button.device_states) {
+          // This button corresponds to variable hardware device state(s).
+          let element = createControlPanelButton(
+              button.title, button.icon_name,
+              this.#getCustomDeviceStateButtonCb(button.device_states),
+              'control-panel-custom-buttons');
+          for (const device_state of button.device_states) {
+            // hinge_angle is currently injected via an adb shell command that
+            // triggers a guest binary.
+            if ('hinge_angle_value' in device_state) {
+              element.dataset.adb = true;
+            }
+          }
+        } else {
+          // This button's command is handled by custom action server.
+          createControlPanelButton(
+              button.title, button.icon_name,
+              evt => this.#onControlPanelButton(evt, button.command),
+              'control-panel-custom-buttons');
+        }
+      }
+    }
+
+    // Set up displays
+    this.#updateDeviceDisplays();
+    this.#deviceConnection.onStreamChange(stream => this.#onStreamChange(stream));
+
+    // Set up audio
+    const deviceAudio = document.getElementById('device-audio');
+    for (const audio_desc of this.#deviceConnection.description.audio_streams) {
+      let stream_id = audio_desc.stream_id;
+      this.#deviceConnection.onStream(stream_id)
+          .then(stream => {
+            deviceAudio.srcObject = stream;
+            deviceAudio.play();
+          })
+          .catch(e => console.error('Unable to get audio stream: ', e));
+    }
+
+    // Set up keyboard capture
+    this.#startKeyboardCapture();
+
+    this.#updateDeviceHardwareDetails(
+        this.#deviceConnection.description.hardware);
+
+    // Show the error message and disable buttons when the WebRTC connection
+    // fails.
+    this.#deviceConnection.onConnectionStateChange(state => {
+      if (state == 'disconnected' || state == 'failed') {
+        this.#showWebrtcError();
+      }
+    });
+
+    let bluetoothConsole =
+        cmdConsole('bluetooth-console-view', 'bluetooth-console-input');
+    bluetoothConsole.addCommandListener(cmd => {
+      let inputArr = cmd.split(' ');
+      let command = inputArr[0];
+      inputArr.shift();
+      let args = inputArr;
+      this.#deviceConnection.sendBluetoothMessage(
+          createRootcanalMessage(command, args));
+    });
+    this.#deviceConnection.onBluetoothMessage(msg => {
+      let decoded = decodeRootcanalMessage(msg);
+      let deviceCount = btUpdateDeviceList(decoded);
+      console.debug("deviceCount= " +deviceCount);
+      console.debug("decoded= " +decoded);
+      if (deviceCount > 0) {
+        this.#deviceCount = deviceCount;
+        createButtonListener('bluetooth-list-trash', null, this.#deviceConnection,
+           evt => this.#onRootCanalCommand(this.#deviceConnection, "del", evt));
+      }
+      btUpdateAdded(decoded);
+      let phyList = btParsePhys(decoded);
+      if (phyList) {
+        this.#phys = phyList;
+      }
+      bluetoothConsole.addLine(decoded);
+    });
+
+    this.#deviceConnection.onLocationMessage(msg => {
+      console.debug("onLocationMessage = " +msg);
+    });
+  }
+
+  #onStreamChange(stream) {
+    let stream_id = stream.id;
+    if (stream_id.startsWith('display_')) {
+      this.#updateDeviceDisplays();
+    }
+  }
+
+  #onRootCanalCommand(deviceConnection, cmd, evt) {
+
+    if (cmd == "list") {
+      deviceConnection.sendBluetoothMessage(createRootcanalMessage("list", []));
+    }
+    if (cmd == "del") {
+      let id = evt.srcElement.getAttribute("data-device-id");
+      deviceConnection.sendBluetoothMessage(createRootcanalMessage("del", [id]));
+      deviceConnection.sendBluetoothMessage(createRootcanalMessage("list", []));
+    }
+    if (cmd == "add") {
+      let name = document.getElementById('bluetooth-wizard-name').value;
+      let type = document.getElementById('bluetooth-wizard-type').value;
+      if (type == "remote_loopback") {
+        deviceConnection.sendBluetoothMessage(createRootcanalMessage("add", [type]));
+      } else {
+        let mac = document.getElementById('bluetooth-wizard-mac').value;
+        deviceConnection.sendBluetoothMessage(createRootcanalMessage("add", [type, mac]));
+      }
+      let phyId = this.#phys["LOW_ENERGY"].toString();
+      if (type == "remote_loopback") {
+        phyId = this.#phys["BR_EDR"].toString();
+      }
+      let devId = this.#deviceCount.toString();
+      this.#deviceCount++;
+      deviceConnection.sendBluetoothMessage(createRootcanalMessage("add_device_to_phy", [devId, phyId]));
+    }
+  }
+
+  #onSendLocation(deviceConnection, evt) {
+
+    let longitude = document.getElementById('location-set-longitude').value;
+    let latitude = document.getElementById('location-set-latitude').value;
+    let altitude = document.getElementById('location-set-altitude').value;
+    if (longitude == null || longitude == '' || latitude == null  || latitude == ''||
+        altitude == null  || altitude == '') {
+      return;
+    }
+    let location_msg = longitude + "," +latitude + "," + altitude;
+    deviceConnection.sendLocationMessage(location_msg);
+  }
+  #onImportLocationsFile(deviceConnection, evt) {
+
+    function onLoad_send_kml_data(xml) {
+      deviceConnection.sendKmlLocationsMessage(xml);
+    }
+
+    function onLoad_send_gpx_data(xml) {
+      deviceConnection.sendGpxLocationsMessage(xml);
+    }
+
+    let file_selector=document.getElementById("locations_select_file");
+
+    if (!file_selector.files) {
+        alert("input parameter is not of file type");
+        return;
+    }
+
+    if (!file_selector.files[0]) {
+        alert("Please select a file ");
+        return;
+    }
+
+    var filename= file_selector.files[0];
+    if (filename.type.match('\gpx')) {
+      console.debug("import Gpx locations handling");
+      loadFile(onLoad_send_gpx_data);
+    } else if(filename.type.match('\kml')){
+      console.debug("import Kml locations handling");
+      loadFile(onLoad_send_kml_data);
+    }
+
+  }
+
+  #showWebrtcError() {
+    showError(
+        'No connection to the guest device.  Please ensure the WebRTC' +
+        'process on the host machine is active.');
+    const deviceDisplays = document.getElementById('device-displays');
+    deviceDisplays.style.display = 'none';
+    this.#getControlPanelButtons().forEach(b => b.disabled = true);
+  }
+
+  #getControlPanelButtons() {
+    return [
+      ...document.querySelectorAll('#control-panel-default-buttons button'),
+      ...document.querySelectorAll('#control-panel-custom-buttons button'),
+    ];
+  }
+
+  #takePhoto() {
+    const imageCapture = this.#deviceConnection.imageCapture;
+    if (imageCapture) {
+      const photoSettings = {
+        imageWidth: this.#deviceConnection.cameraWidth,
+        imageHeight: this.#deviceConnection.cameraHeight
+      };
+      imageCapture.takePhoto(photoSettings)
+          .then(blob => blob.arrayBuffer())
+          .then(buffer => this.#deviceConnection.sendOrQueueCameraData(buffer))
+          .catch(error => console.error(error));
+    }
+  }
+
+  #getCustomDeviceStateButtonCb(device_states) {
+    let states = device_states;
+    let index = 0;
+    return e => {
+      if (e.type == 'mousedown') {
+        // Reset any overridden device state.
+        adbShell('cmd device_state state reset');
+        // Send a device_state message for the current state.
+        let message = {
+          command: 'device_state',
+          ...states[index],
+        };
+        this.#deviceConnection.sendControlMessage(JSON.stringify(message));
+        console.debug('Control message sent: ', JSON.stringify(message));
+        let lidSwitchOpen = null;
+        if ('lid_switch_open' in states[index]) {
+          lidSwitchOpen = states[index].lid_switch_open;
+        }
+        let hingeAngle = null;
+        if ('hinge_angle_value' in states[index]) {
+          hingeAngle = states[index].hinge_angle_value;
+          // TODO(b/181157794): Use a custom Sensor HAL for hinge_angle
+          // injection instead of this guest binary.
+          adbShell(
+              '/vendor/bin/cuttlefish_sensor_injection hinge_angle ' +
+              states[index].hinge_angle_value);
+        }
+        // Update the Device Details view.
+        this.#updateDeviceStateDetails(lidSwitchOpen, hingeAngle);
+        // Cycle to the next state.
+        index = (index + 1) % states.length;
+      }
+    }
+  }
+
+  #rotateDisplays(rotation) {
+    if ((rotation - this.#currentRotation) % 360 == 0) {
+      return;
+    }
+
+    document.querySelectorAll('.device-display-video').forEach((v, i) => {
+      const stream = v.srcObject;
+      if (stream == null) {
+        console.error('Missing corresponding device display video stream', l);
+        return;
+      }
+
+      const streamVideoTracks = stream.getVideoTracks();
+      if (streamVideoTracks == null || streamVideoTracks.length == 0) {
+        return;
+      }
+
+      const streamSettings = stream.getVideoTracks()[0].getSettings();
+      const streamWidth = streamSettings.width;
+      const streamHeight = streamSettings.height;
+      if (streamWidth == 0 || streamHeight == 0) {
+        console.error('Stream dimensions not yet available?', stream);
+        return;
+      }
+
+      const aspectRatio = streamWidth / streamHeight;
+
+      let keyFrames = [];
+      let from = this.#currentScreenStyles[v.id];
+      if (from) {
+        // If the screen was already rotated, use that state as starting point,
+        // otherwise the animation will start at the element's default state.
+        keyFrames.push(from);
+      }
+      let to = getStyleAfterRotation(rotation, aspectRatio);
+      keyFrames.push(to);
+      v.animate(keyFrames, {duration: 400 /*ms*/, fill: 'forwards'});
+      this.#currentScreenStyles[v.id] = to;
+    });
+
+    this.#currentRotation = rotation;
+    this.#updateDeviceDisplaysInfo();
+  }
+
+  #updateDeviceDisplaysInfo() {
+    let labels = document.querySelectorAll('.device-display-info');
+
+    // #currentRotation is device's physical rotation and currently used to
+    // determine display's rotation. It would be obtained from device's
+    // accelerometer sensor.
+    let deviceDisplaysMessage =
+        this.#parentController.createDeviceDisplaysMessage(
+            this.#currentRotation);
+
+    labels.forEach(l => {
+      let deviceDisplay = l.closest('.device-display');
+      if (deviceDisplay == null) {
+        console.error('Missing corresponding device display', l);
+        return;
+      }
+
+      let deviceDisplayVideo =
+          deviceDisplay.querySelector('.device-display-video');
+      if (deviceDisplayVideo == null) {
+        console.error('Missing corresponding device display video', l);
+        return;
+      }
+
+      const DISPLAY_PREFIX = 'display_';
+      let displayId = deviceDisplayVideo.id;
+      if (displayId == null || !displayId.startsWith(DISPLAY_PREFIX)) {
+        console.error('Unexpected device display video id', displayId);
+        return;
+      }
+      displayId = displayId.slice(DISPLAY_PREFIX.length);
+
+      let stream = deviceDisplayVideo.srcObject;
+      if (stream == null) {
+        console.error('Missing corresponding device display video stream', l);
+        return;
+      }
+
+      let text = `Display ${displayId} `;
+
+      let streamVideoTracks = stream.getVideoTracks();
+      if (streamVideoTracks.length > 0) {
+        let streamSettings = stream.getVideoTracks()[0].getSettings();
+        // Width and height may not be available immediately after the track is
+        // added but before frames arrive.
+        if ('width' in streamSettings && 'height' in streamSettings) {
+          let streamWidth = streamSettings.width;
+          let streamHeight = streamSettings.height;
+
+          deviceDisplaysMessage.addDisplay(
+              displayId, streamWidth, streamHeight);
+
+          text += `${streamWidth}x${streamHeight}`;
+        }
+      }
+
+      if (this.#currentRotation != 0) {
+        text += ` (Rotated ${this.#currentRotation}deg)`
+      }
+
+      l.textContent = text;
+    });
+
+    deviceDisplaysMessage.send();
+  }
+
+  #onControlMessage(message) {
+    let message_data = JSON.parse(message.data);
+    console.debug('Control message received: ', message_data)
+    let metadata = message_data.metadata;
+    if (message_data.event == 'VIRTUAL_DEVICE_BOOT_STARTED') {
+      // Start the adb connection after receiving the BOOT_STARTED message.
+      // (This is after the adbd start message. Attempting to connect
+      // immediately after adbd starts causes issues.)
+      this.#initializeAdb();
+    }
+    if (message_data.event == 'VIRTUAL_DEVICE_SCREEN_CHANGED') {
+      this.#rotateDisplays(+metadata.rotation);
+    }
+    if (message_data.event == 'VIRTUAL_DEVICE_CAPTURE_IMAGE') {
+      if (this.#deviceConnection.cameraEnabled) {
+        this.#takePhoto();
+      }
+    }
+    if (message_data.event == 'VIRTUAL_DEVICE_DISPLAY_POWER_MODE_CHANGED') {
+      this.#updateDisplayVisibility(metadata.display, metadata.mode);
+    }
+  }
+
+  #updateDeviceStateDetails(lidSwitchOpen, hingeAngle) {
+    let deviceStateDetailsTextLines = [];
+    if (lidSwitchOpen != null) {
+      let state = lidSwitchOpen ? 'Opened' : 'Closed';
+      deviceStateDetailsTextLines.push(`Lid Switch - ${state}`);
+    }
+    if (hingeAngle != null) {
+      deviceStateDetailsTextLines.push(`Hinge Angle - ${hingeAngle}`);
+    }
+    let deviceStateDetailsText = deviceStateDetailsTextLines.join('\n');
+    new DeviceDetailsUpdater()
+        .setDeviceStateDetailsText(deviceStateDetailsText)
+        .update();
+  }
+
+  #updateDeviceHardwareDetails(hardware) {
+    let hardwareDetailsTextLines = [];
+    Object.keys(hardware).forEach((key) => {
+      let value = hardware[key];
+      hardwareDetailsTextLines.push(`${key} - ${value}`);
+    });
+
+    let hardwareDetailsText = hardwareDetailsTextLines.join('\n');
+    new DeviceDetailsUpdater()
+        .setHardwareDetailsText(hardwareDetailsText)
+        .update();
+  }
+
+  // Creates a <video> element and a <div> container element for each display.
+  // The extra <div> container elements are used to maintain the width and
+  // height of the device as the CSS 'transform' property used on the <video>
+  // element for rotating the device only affects the visuals of the element
+  // and not its layout.
+  #updateDeviceDisplays() {
+    let anyDisplayLoaded = false;
+    const deviceDisplays = document.getElementById('device-displays');
+
+    const MAX_DISPLAYS = 16;
+    for (let i = 0; i < MAX_DISPLAYS; i++) {
+      const stream_id = 'display_' + i.toString();
+      const stream = this.#deviceConnection.getStream(stream_id);
+
+      let deviceDisplayVideo = document.querySelector('#' + stream_id);
+      const deviceDisplayIsPresent = deviceDisplayVideo != null;
+      const deviceDisplayStreamIsActive = stream != null && stream.active;
+      if (deviceDisplayStreamIsActive == deviceDisplayIsPresent) {
+          continue;
+      }
+
+      if (deviceDisplayStreamIsActive) {
+        console.debug('Adding display', i);
+
+        let displayFragment =
+            document.querySelector('#display-template').content.cloneNode(true);
+
+        let deviceDisplayInfo =
+            displayFragment.querySelector('.device-display-info');
+        deviceDisplayInfo.id = stream_id + '_info';
+
+        deviceDisplayVideo = displayFragment.querySelector('video');
+        deviceDisplayVideo.id = stream_id;
+        deviceDisplayVideo.srcObject = stream;
+        deviceDisplayVideo.addEventListener('loadeddata', (evt) => {
+          if (!anyDisplayLoaded) {
+            anyDisplayLoaded = true;
+            this.#onDeviceDisplayLoaded();
+          }
+        });
+        deviceDisplayVideo.addEventListener('loadedmetadata', (evt) => {
+          this.#updateDeviceDisplaysInfo();
+        });
+
+        this.#addMouseTracking(deviceDisplayVideo);
+
+        deviceDisplays.appendChild(displayFragment);
+
+        // Confusingly, events for adding tracks occur on the peer connection
+        // but events for removing tracks occur on the stream.
+        stream.addEventListener('removetrack', evt => {
+          this.#updateDeviceDisplays();
+        });
+      } else {
+        console.debug('Removing display', i);
+
+        let deviceDisplay = deviceDisplayVideo.closest('.device-display');
+        if (deviceDisplay == null) {
+          console.error('Failed to find device display for ', stream_id);
+        } else {
+          deviceDisplays.removeChild(deviceDisplay);
+        }
+      }
+    }
+
+    this.#updateDeviceDisplaysInfo();
+  }
+
+  #initializeAdb() {
+    init_adb(
+        this.#deviceConnection, () => this.#onAdbConnected(),
+        () => this.#showAdbError());
+  }
+
+  #onAdbConnected() {
+    if (this.#adbConnected) {
+       return;
+    }
+    // Screen changed messages are not reported until after boot has completed.
+    // Certain default adb buttons change screen state, so wait for boot
+    // completion before enabling these buttons.
+    showInfo('adb connection established successfully.', 5000);
+    this.#adbConnected = true;
+    this.#getControlPanelButtons()
+        .filter(b => b.dataset.adb)
+        .forEach(b => b.disabled = false);
+  }
+
+  #showAdbError() {
+    showError('adb connection failed.');
+    this.#getControlPanelButtons()
+        .filter(b => b.dataset.adb)
+        .forEach(b => b.disabled = true);
+  }
+
+  #onDeviceDisplayLoaded() {
+    if (!this.#adbConnected) {
+      // ADB may have connected before, don't show this message in that case
+      showInfo('Awaiting bootup and adb connection. Please wait...', 10000);
+    }
+    this.#updateDeviceDisplaysInfo();
+
+    let deviceDisplayList = document.getElementsByClassName('device-display');
+    for (const deviceDisplay of deviceDisplayList) {
+      deviceDisplay.style.visibility = 'visible';
+    }
+
+    // Start the adb connection if it is not already started.
+    this.#initializeAdb();
+  }
+
+  #onRotateLeftButton(e) {
+    if (e.type == 'mousedown') {
+      this.#onRotateButton(this.#currentRotation + 90);
+    }
+  }
+
+  #onRotateRightButton(e) {
+    if (e.type == 'mousedown') {
+      this.#onRotateButton(this.#currentRotation - 90);
+    }
+  }
+
+  #onRotateButton(rotation) {
+    // Attempt to init adb again, in case the initial connection failed.
+    // This succeeds immediately if already connected.
+    this.#initializeAdb();
+    this.#rotateDisplays(rotation);
+    adbShell(`/vendor/bin/cuttlefish_sensor_injection rotate ${rotation}`);
+  }
+
+  #onControlPanelButton(e, command) {
+    if (e.type == 'mouseout' && e.which == 0) {
+      // Ignore mouseout events if no mouse button is pressed.
+      return;
+    }
+    this.#deviceConnection.sendControlMessage(JSON.stringify({
+      command: command,
+      button_state: e.type == 'mousedown' ? 'down' : 'up',
+    }));
+  }
+
+  #startKeyboardCapture() {
+    const deviceArea = document.querySelector('#device-displays');
+    deviceArea.addEventListener('keydown', evt => this.#onKeyEvent(evt));
+    deviceArea.addEventListener('keyup', evt => this.#onKeyEvent(evt));
+  }
+
+  #onKeyEvent(e) {
+    this.#deviceConnection.sendKeyEvent(e.code, e.type);
+  }
+
+  #addMouseTracking(displayDeviceVideo) {
+    let $this = this;
+    let mouseIsDown = false;
+    let mouseCtx = {
+      down: false,
+      touchIdSlotMap: new Map(),
+      touchSlots: [],
+    };
+    function onStartDrag(e) {
+      // Can't prevent event default behavior to allow the element gain focus
+      // when touched and start capturing keyboard input in the parent.
+      // console.debug("mousedown at " + e.pageX + " / " + e.pageY);
+      mouseCtx.down = true;
+
+      $this.#sendEventUpdate(mouseCtx, e);
+    }
+
+    function onEndDrag(e) {
+      // Can't prevent event default behavior to allow the element gain focus
+      // when touched and start capturing keyboard input in the parent.
+      // console.debug("mouseup at " + e.pageX + " / " + e.pageY);
+      mouseCtx.down = false;
+
+      $this.#sendEventUpdate(mouseCtx, e);
+    }
+
+    function onContinueDrag(e) {
+      // Can't prevent event default behavior to allow the element gain focus
+      // when touched and start capturing keyboard input in the parent.
+      // console.debug("mousemove at " + e.pageX + " / " + e.pageY + ", down=" +
+      // mouseIsDown);
+      if (mouseCtx.down) {
+        $this.#sendEventUpdate(mouseCtx, e);
+      }
+    }
+
+    if (window.PointerEvent) {
+      displayDeviceVideo.addEventListener('pointerdown', onStartDrag);
+      displayDeviceVideo.addEventListener('pointermove', onContinueDrag);
+      displayDeviceVideo.addEventListener('pointerup', onEndDrag);
+    } else if (window.TouchEvent) {
+      displayDeviceVideo.addEventListener('touchstart', onStartDrag);
+      displayDeviceVideo.addEventListener('touchmove', onContinueDrag);
+      displayDeviceVideo.addEventListener('touchend', onEndDrag);
+    } else if (window.MouseEvent) {
+      displayDeviceVideo.addEventListener('mousedown', onStartDrag);
+      displayDeviceVideo.addEventListener('mousemove', onContinueDrag);
+      displayDeviceVideo.addEventListener('mouseup', onEndDrag);
+    }
+  }
+
+  #sendEventUpdate(ctx, e) {
+    let eventType = e.type.substring(0, 5);
+
+    // The <video> element:
+    const deviceDisplay = e.target;
+
+    // Before the first video frame arrives there is no way to know width and
+    // height of the device's screen, so turn every click into a click at 0x0.
+    // A click at that position is not more dangerous than anywhere else since
+    // the user is clicking blind anyways.
+    const videoWidth = deviceDisplay.videoWidth ? deviceDisplay.videoWidth : 1;
+    const elementWidth =
+        deviceDisplay.offsetWidth ? deviceDisplay.offsetWidth : 1;
+    const scaling = videoWidth / elementWidth;
+
+    let xArr = [];
+    let yArr = [];
+    let idArr = [];
+    let slotArr = [];
+
+    if (eventType == 'mouse' || eventType == 'point') {
+      xArr.push(e.offsetX);
+      yArr.push(e.offsetY);
+
+      let thisId = -1;
+      if (eventType == 'point') {
+        thisId = e.pointerId;
+      }
+
+      slotArr.push(0);
+      idArr.push(thisId);
+    } else if (eventType == 'touch') {
+      // touchstart: list of touch points that became active
+      // touchmove: list of touch points that changed
+      // touchend: list of touch points that were removed
+      let changes = e.changedTouches;
+      let rect = e.target.getBoundingClientRect();
+      for (let i = 0; i < changes.length; i++) {
+        xArr.push(changes[i].pageX - rect.left);
+        yArr.push(changes[i].pageY - rect.top);
+        if (ctx.touchIdSlotMap.has(changes[i].identifier)) {
+          let slot = ctx.touchIdSlotMap.get(changes[i].identifier);
+
+          slotArr.push(slot);
+          if (e.type == 'touchstart') {
+            // error
+            console.error('touchstart when already have slot');
+            return;
+          } else if (e.type == 'touchmove') {
+            idArr.push(changes[i].identifier);
+          } else if (e.type == 'touchend') {
+            ctx.touchSlots[slot] = false;
+            ctx.touchIdSlotMap.delete(changes[i].identifier);
+            idArr.push(-1);
+          }
+        } else {
+          if (e.type == 'touchstart') {
+            let slot = -1;
+            for (let j = 0; j < ctx.touchSlots.length; j++) {
+              if (!ctx.touchSlots[j]) {
+                slot = j;
+                break;
+              }
+            }
+            if (slot == -1) {
+              slot = ctx.touchSlots.length;
+              ctx.touchSlots.push(true);
+            }
+            slotArr.push(slot);
+            ctx.touchSlots[slot] = true;
+            ctx.touchIdSlotMap.set(changes[i].identifier, slot);
+            idArr.push(changes[i].identifier);
+          } else if (e.type == 'touchmove') {
+            // error
+            console.error('touchmove when no slot');
+            return;
+          } else if (e.type == 'touchend') {
+            // error
+            console.error('touchend when no slot');
+            return;
+          }
+        }
+      }
+    }
+
+    for (let i = 0; i < xArr.length; i++) {
+      xArr[i] = Math.trunc(xArr[i] * scaling);
+      yArr[i] = Math.trunc(yArr[i] * scaling);
+    }
+
+    // NOTE: Rotation is handled automatically because the CSS rotation through
+    // transforms also rotates the coordinates of events on the object.
+
+    const display_label = deviceDisplay.id;
+
+    this.#deviceConnection.sendMultiTouch(
+        {idArr, xArr, yArr, down: ctx.down, slotArr, display_label});
+  }
+
+  #updateDisplayVisibility(displayId, powerMode) {
+    const displayVideo = document.getElementById('display_' + displayId);
+    if (displayVideo == null) {
+      console.error('Unknown display id: ' + displayId);
+      return;
+    }
+
+    const display = displayVideo.parentElement;
+    if (display == null) {
+      console.error('Unknown display id: ' + displayId);
+      return;
+    }
+    powerMode = powerMode.toLowerCase();
+    switch (powerMode) {
+      case 'on':
+        display.style.visibility = 'visible';
+        break;
+      case 'off':
+        display.style.visibility = 'hidden';
+        break;
+      default:
+        console.error('Display ' + displayId + ' has unknown display power mode: ' + powerMode);
+    }
+  }
+
+  #onMicButton(evt) {
+    let nextState = evt.type == 'mousedown';
+    if (this.#micActive == nextState) {
+      return;
+    }
+    this.#micActive = nextState;
+    this.#deviceConnection.useMic(nextState);
+  }
+
+  #onCameraCaptureToggle(enabled) {
+    return this.#deviceConnection.useCamera(enabled);
+  }
+
+  #getZeroPaddedString(value, desiredLength) {
+    const s = String(value);
+    return '0'.repeat(desiredLength - s.length) + s;
+  }
+
+  #getTimestampString() {
+    const now = new Date();
+    return [
+      now.getFullYear(),
+      this.#getZeroPaddedString(now.getMonth(), 2),
+      this.#getZeroPaddedString(now.getDay(), 2),
+      this.#getZeroPaddedString(now.getHours(), 2),
+      this.#getZeroPaddedString(now.getMinutes(), 2),
+      this.#getZeroPaddedString(now.getSeconds(), 2),
+    ].join('_');
+  }
+
+  #onVideoCaptureToggle(enabled) {
+    const recordToggle = document.getElementById('record-video-control');
+    if (enabled) {
+      let recorders = [];
+
+      const timestamp = this.#getTimestampString();
+
+      let deviceDisplayVideoList =
+        document.getElementsByClassName('device-display-video');
+      for (let i = 0; i < deviceDisplayVideoList.length; i++) {
+        const deviceDisplayVideo = deviceDisplayVideoList[i];
+
+        const recorder = new MediaRecorder(deviceDisplayVideo.captureStream());
+        const recordedData = [];
+
+        recorder.ondataavailable = event => recordedData.push(event.data);
+        recorder.onstop = event => {
+          const recording = new Blob(recordedData, { type: "video/webm" });
+
+          const downloadLink = document.createElement('a');
+          downloadLink.setAttribute('download', timestamp + '_display_' + i + '.webm');
+          downloadLink.setAttribute('href', URL.createObjectURL(recording));
+          downloadLink.click();
+        };
+
+        recorder.start();
+        recorders.push(recorder);
+      }
+      this.#recording['recorders'] = recorders;
+
+      recordToggle.style.backgroundColor = 'red';
+    } else {
+      for (const recorder of this.#recording['recorders']) {
+        recorder.stop();
+      }
+      recordToggle.style.backgroundColor = '';
+    }
+    return Promise.resolve(enabled);
+  }
+
+  #onAudioPlaybackToggle(enabled) {
+    const audioElem = document.getElementById('device-audio');
+    if (enabled) {
+      audioElem.play();
+    } else {
+      audioElem.pause();
+    }
+  }
+
+  #onCustomShellButton(shell_command, e) {
+    // Attempt to init adb again, in case the initial connection failed.
+    // This succeeds immediately if already connected.
+    this.#initializeAdb();
+    if (e.type == 'mousedown') {
+      adbShell(shell_command);
+    }
+  }
+}  // DeviceControlApp
+
+window.addEventListener("load", async evt => {
+  try {
+    setupMessages();
+    let connectorModule = await import('./server_connector.js');
+    let deviceId = connectorModule.deviceId();
+    document.title = deviceId;
+    let deviceConnection =
+        await ConnectDevice(deviceId, await connectorModule.createConnector());
+    let parentController = null;
+    if (connectorModule.createParentController) {
+      parentController = connectorModule.createParentController();
+    }
+    if (!parentController) {
+      parentController = new EmptyParentController();
+    }
+    let deviceControlApp = new DeviceControlApp(deviceConnection, parentController);
+    deviceControlApp.start();
+    document.getElementById('device-connection').style.display = 'block';
+  } catch(err) {
+    console.error('Unable to connect: ', err);
+    showError(
+      'No connection to the guest device. ' +
+      'Please ensure the WebRTC process on the host machine is active.');
+  }
+  document.getElementById('loader').style.display = 'none';
+});
+
+// The formulas in this function are derived from the following facts:
+// * The video element's aspect ratio (ar) is fixed.
+// * CSS rotations are centered on the geometrical center of the element.
+// * The aspect ratio is the tangent of the angle between the left-top to
+// right-bottom diagonal (d) and the left side.
+// * d = w/sin(arctan(ar)) = h/cos(arctan(ar)), with w = width and h = height.
+// * After any rotation, the element's total width is the maximum size of the
+// projection of the diagonals on the X axis (Y axis for height).
+// Deriving the formulas is left as an exercise to the reader.
+function getStyleAfterRotation(rotationDeg, ar) {
+  // Convert the rotation angle to radians
+  let r = Math.PI * rotationDeg / 180;
+
+  // width <= parent_with / abs(cos(r) + sin(r)/ar)
+  // and
+  // width <= parent_with / abs(cos(r) - sin(r)/ar)
+  let den1 = Math.abs((Math.sin(r) / ar) + Math.cos(r));
+  let den2 = Math.abs((Math.sin(r) / ar) - Math.cos(r));
+  let denominator = Math.max(den1, den2);
+  let maxWidth = `calc(100% / ${denominator})`;
+
+  // height <= parent_height / abs(cos(r) + sin(r)*ar)
+  // and
+  // height <= parent_height / abs(cos(r) - sin(r)*ar)
+  den1 = Math.abs(Math.cos(r) - (Math.sin(r) * ar));
+  den2 = Math.abs(Math.cos(r) + (Math.sin(r) * ar));
+  denominator = Math.max(den1, den2);
+  let maxHeight = `calc(100% / ${denominator})`;
+
+  // rotated_left >= left * (abs(cos(r)+sin(r)/ar)-1)/2
+  // and
+  // rotated_left >= left * (abs(cos(r)-sin(r)/ar)-1)/2
+  let tmp1 = Math.max(
+      Math.abs(Math.cos(r) + (Math.sin(r) / ar)),
+      Math.abs(Math.cos(r) - (Math.sin(r) / ar)));
+  let leftFactor = (tmp1 - 1) / 2;
+  // rotated_top >= top * (abs(cos(r)+sin(r)*ar)-1)/2
+  // and
+  // rotated_top >= top * (abs(cos(r)-sin(r)*ar)-1)/2
+  let tmp2 = Math.max(
+      Math.abs(Math.cos(r) - (Math.sin(r) * ar)),
+      Math.abs(Math.cos(r) + (Math.sin(r) * ar)));
+  let rightFactor = (tmp2 - 1) / 2;
+
+  // CSS rotations are in the opposite direction as Android screen rotations
+  rotationDeg = -rotationDeg;
+
+  let transform = `translate(calc(100% * ${leftFactor}), calc(100% * ${
+      rightFactor})) rotate(${rotationDeg}deg)`;
+
+  return {transform, maxWidth, maxHeight};
+}
diff --git a/host/frontend/webrtc/html_client/js/cf_webrtc.js b/host/frontend/webrtc/html_client/js/cf_webrtc.js
new file mode 100644
index 0000000..b812694
--- /dev/null
+++ b/host/frontend/webrtc/html_client/js/cf_webrtc.js
@@ -0,0 +1,619 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+function createDataChannel(pc, label, onMessage) {
+  console.debug('creating data channel: ' + label);
+  let dataChannel = pc.createDataChannel(label);
+  // Return an object with a send function like that of the dataChannel, but
+  // that only actually sends over the data channel once it has connected.
+  return {
+    channelPromise: new Promise((resolve, reject) => {
+      dataChannel.onopen = (event) => {
+        resolve(dataChannel);
+      };
+      dataChannel.onclose = () => {
+        console.debug(
+            'Data channel=' + label + ' state=' + dataChannel.readyState);
+      };
+      dataChannel.onmessage = onMessage ? onMessage : (msg) => {
+        console.debug('Data channel=' + label + ' data="' + msg.data + '"');
+      };
+      dataChannel.onerror = err => {
+        reject(err);
+      };
+    }),
+    send: function(msg) {
+      this.channelPromise = this.channelPromise.then(channel => {
+        channel.send(msg);
+        return channel;
+      })
+    },
+  };
+}
+
+function awaitDataChannel(pc, label, onMessage) {
+  console.debug('expecting data channel: ' + label);
+  // Return an object with a send function like that of the dataChannel, but
+  // that only actually sends over the data channel once it has connected.
+  return {
+    channelPromise: new Promise((resolve, reject) => {
+      let prev_ondatachannel = pc.ondatachannel;
+      pc.ondatachannel = ev => {
+        let dataChannel = ev.channel;
+        if (dataChannel.label == label) {
+          dataChannel.onopen = (event) => {
+            resolve(dataChannel);
+          };
+          dataChannel.onclose = () => {
+            console.debug(
+                'Data channel=' + label + ' state=' + dataChannel.readyState);
+          };
+          dataChannel.onmessage = onMessage ? onMessage : (msg) => {
+            console.debug('Data channel=' + label + ' data="' + msg.data + '"');
+          };
+          dataChannel.onerror = err => {
+            reject(err);
+          };
+        } else if (prev_ondatachannel) {
+          prev_ondatachannel(ev);
+        }
+      };
+    }),
+    send: function(msg) {
+      this.channelPromise = this.channelPromise.then(channel => {
+        channel.send(msg);
+        return channel;
+      })
+    },
+  };
+}
+
+class DeviceConnection {
+  #pc;
+  #control;
+  #description;
+
+  #cameraDataChannel;
+  #cameraInputQueue;
+  #controlChannel;
+  #inputChannel;
+  #adbChannel;
+  #bluetoothChannel;
+  #locationChannel;
+  #kmlLocationsChannel;
+  #gpxLocationsChannel;
+
+  #streams;
+  #streamPromiseResolvers;
+  #streamChangeCallback;
+  #micSenders = [];
+  #cameraSenders = [];
+  #camera_res_x;
+  #camera_res_y;
+
+  #onAdbMessage;
+  #onControlMessage;
+  #onBluetoothMessage;
+  #onLocationMessage;
+  #onKmlLocationsMessage;
+  #onGpxLocationsMessage;
+
+  #micRequested = false;
+  #cameraRequested = false;
+
+  constructor(pc, control) {
+    this.#pc = pc;
+    this.#control = control;
+    this.#cameraDataChannel = pc.createDataChannel('camera-data-channel');
+    this.#cameraDataChannel.binaryType = 'arraybuffer';
+    this.#cameraInputQueue = new Array();
+    var self = this;
+    this.#cameraDataChannel.onbufferedamountlow = () => {
+      if (self.#cameraInputQueue.length > 0) {
+        self.sendCameraData(self.#cameraInputQueue.shift());
+      }
+    };
+    this.#inputChannel = createDataChannel(pc, 'input-channel');
+    this.#adbChannel = createDataChannel(pc, 'adb-channel', (msg) => {
+      if (!this.#onAdbMessage) {
+        console.error('Received unexpected ADB message');
+        return;
+      }
+      this.#onAdbMessage(msg.data);
+    });
+    this.#controlChannel = awaitDataChannel(pc, 'device-control', (msg) => {
+      if (!this.#onControlMessage) {
+        console.error('Received unexpected Control message');
+        return;
+      }
+      this.#onControlMessage(msg);
+    });
+    this.#bluetoothChannel =
+        createDataChannel(pc, 'bluetooth-channel', (msg) => {
+          if (!this.#onBluetoothMessage) {
+            console.error('Received unexpected Bluetooth message');
+            return;
+          }
+          this.#onBluetoothMessage(msg.data);
+        });
+    this.#locationChannel =
+        createDataChannel(pc, 'location-channel', (msg) => {
+          if (!this.#onLocationMessage) {
+            console.error('Received unexpected Location message');
+            return;
+          }
+          this.#onLocationMessage(msg.data);
+        });
+
+    this.#kmlLocationsChannel =
+        createDataChannel(pc, 'kml-locations-channel', (msg) => {
+          if (!this.#onKmlLocationsMessage) {
+            console.error('Received unexpected KML Locations message');
+            return;
+          }
+          this.#onKmlLocationsMessage(msg.data);
+        });
+
+    this.#gpxLocationsChannel =
+        createDataChannel(pc, 'gpx-locations-channel', (msg) => {
+          if (!this.#onGpxLocationsMessage) {
+            console.error('Received unexpected KML Locations message');
+            return;
+          }
+          this.#onGpxLocationsMessage(msg.data);
+        });
+    this.#streams = {};
+    this.#streamPromiseResolvers = {};
+
+    pc.addEventListener('track', e => {
+      console.debug('Got remote stream: ', e);
+      for (const stream of e.streams) {
+        this.#streams[stream.id] = stream;
+        if (this.#streamPromiseResolvers[stream.id]) {
+          for (let resolver of this.#streamPromiseResolvers[stream.id]) {
+            resolver();
+          }
+          delete this.#streamPromiseResolvers[stream.id];
+        }
+
+        if (this.#streamChangeCallback) {
+          this.#streamChangeCallback(stream);
+        }
+      }
+    });
+  }
+
+  set description(desc) {
+    this.#description = desc;
+  }
+
+  get description() {
+    return this.#description;
+  }
+
+  get imageCapture() {
+    if (this.#cameraSenders && this.#cameraSenders.length > 0) {
+      let track = this.#cameraSenders[0].track;
+      return new ImageCapture(track);
+    }
+    return undefined;
+  }
+
+  get cameraWidth() {
+    return this.#camera_res_x;
+  }
+
+  get cameraHeight() {
+    return this.#camera_res_y;
+  }
+
+  get cameraEnabled() {
+    return this.#cameraSenders && this.#cameraSenders.length > 0;
+  }
+
+  getStream(stream_id) {
+    if (stream_id in this.#streams) {
+      return this.#streams[stream_id];
+    }
+    return null;
+  }
+
+  onStream(stream_id) {
+    return new Promise((resolve, reject) => {
+      if (this.#streams[stream_id]) {
+        resolve(this.#streams[stream_id]);
+      } else {
+        if (!this.#streamPromiseResolvers[stream_id]) {
+          this.#streamPromiseResolvers[stream_id] = [];
+        }
+        this.#streamPromiseResolvers[stream_id].push(resolve);
+      }
+    });
+  }
+
+  onStreamChange(cb) {
+    this.#streamChangeCallback = cb;
+  }
+
+  #sendJsonInput(evt) {
+    this.#inputChannel.send(JSON.stringify(evt));
+  }
+
+  sendMousePosition({x, y, down, display_label}) {
+    this.#sendJsonInput({
+      type: 'mouse',
+      down: down ? 1 : 0,
+      x,
+      y,
+      display_label,
+    });
+  }
+
+  // TODO (b/124121375): This should probably be an array of pointer events and
+  // have different properties.
+  sendMultiTouch({idArr, xArr, yArr, down, slotArr, display_label}) {
+    this.#sendJsonInput({
+      type: 'multi-touch',
+      id: idArr,
+      x: xArr,
+      y: yArr,
+      down: down ? 1 : 0,
+      slot: slotArr,
+      display_label: display_label,
+    });
+  }
+
+  sendKeyEvent(code, type) {
+    this.#sendJsonInput({type: 'keyboard', keycode: code, event_type: type});
+  }
+
+  disconnect() {
+    this.#pc.close();
+  }
+
+  // Sends binary data directly to the in-device adb daemon (skipping the host)
+  sendAdbMessage(msg) {
+    this.#adbChannel.send(msg);
+  }
+
+  // Provide a callback to receive data from the in-device adb daemon
+  onAdbMessage(cb) {
+    this.#onAdbMessage = cb;
+  }
+
+  // Send control commands to the device
+  sendControlMessage(msg) {
+    this.#controlChannel.send(msg);
+  }
+
+  async #useDevice(
+      in_use, senders_arr, device_opt, requestedFn = () => {in_use}, enabledFn = (stream) => {}) {
+    // An empty array means no tracks are currently in use
+    if (senders_arr.length > 0 === !!in_use) {
+      return in_use;
+    }
+    let renegotiation_needed = false;
+    if (in_use) {
+      try {
+        let stream = await navigator.mediaDevices.getUserMedia(device_opt);
+        // The user may have changed their mind by the time we obtain the
+        // stream, check again
+        if (!!in_use != requestedFn()) {
+          return requestedFn();
+        }
+        enabledFn(stream);
+        stream.getTracks().forEach(track => {
+          console.info(`Using ${track.kind} device: ${track.label}`);
+          senders_arr.push(this.#pc.addTrack(track));
+          renegotiation_needed = true;
+        });
+      } catch (e) {
+        console.error('Failed to add stream to peer connection: ', e);
+        // Don't return yet, if there were errors some tracks may have been
+        // added so the connection should be renegotiated again.
+      }
+    } else {
+      for (const sender of senders_arr) {
+        console.info(
+            `Removing ${sender.track.kind} device: ${sender.track.label}`);
+        let track = sender.track;
+        track.stop();
+        this.#pc.removeTrack(sender);
+        renegotiation_needed = true;
+      }
+      // Empty the array passed by reference, just assigning [] won't do that.
+      senders_arr.length = 0;
+    }
+    if (renegotiation_needed) {
+      await this.#control.renegotiateConnection();
+    }
+    // Return the new state
+    return senders_arr.length > 0;
+  }
+
+  async useMic(in_use) {
+    if (this.#micRequested == !!in_use) {
+      return in_use;
+    }
+    this.#micRequested = !!in_use;
+    return this.#useDevice(
+        in_use, this.#micSenders, {audio: true, video: false},
+        () => this.#micRequested);
+  }
+
+  async useCamera(in_use) {
+    if (this.#cameraRequested == !!in_use) {
+      return in_use;
+    }
+    this.#cameraRequested = !!in_use;
+    return this.#useDevice(
+        in_use, this.#micSenders, {audio: false, video: true},
+        () => this.#cameraRequested,
+        (stream) => this.sendCameraResolution(stream));
+  }
+
+  sendCameraResolution(stream) {
+    const cameraTracks = stream.getVideoTracks();
+    if (cameraTracks.length > 0) {
+      const settings = cameraTracks[0].getSettings();
+      this.#camera_res_x = settings.width;
+      this.#camera_res_y = settings.height;
+      this.sendControlMessage(JSON.stringify({
+        command: 'camera_settings',
+        width: settings.width,
+        height: settings.height,
+        frame_rate: settings.frameRate,
+        facing: settings.facingMode
+      }));
+    }
+  }
+
+  sendOrQueueCameraData(data) {
+    if (this.#cameraDataChannel.bufferedAmount > 0 ||
+        this.#cameraInputQueue.length > 0) {
+      this.#cameraInputQueue.push(data);
+    } else {
+      this.sendCameraData(data);
+    }
+  }
+
+  sendCameraData(data) {
+    const MAX_SIZE = 65535;
+    const END_MARKER = 'EOF';
+    for (let i = 0; i < data.byteLength; i += MAX_SIZE) {
+      // range is clamped to the valid index range
+      this.#cameraDataChannel.send(data.slice(i, i + MAX_SIZE));
+    }
+    this.#cameraDataChannel.send(END_MARKER);
+  }
+
+  // Provide a callback to receive control-related comms from the device
+  onControlMessage(cb) {
+    this.#onControlMessage = cb;
+  }
+
+  sendBluetoothMessage(msg) {
+    this.#bluetoothChannel.send(msg);
+  }
+
+  onBluetoothMessage(cb) {
+    this.#onBluetoothMessage = cb;
+  }
+
+  sendLocationMessage(msg) {
+    this.#locationChannel.send(msg);
+  }
+
+  onLocationMessage(cb) {
+    this.#onLocationMessage = cb;
+  }
+
+  sendKmlLocationsMessage(msg) {
+    this.#kmlLocationsChannel.send(msg);
+  }
+
+  onKmlLocationsMessage(cb) {
+    this.#kmlLocationsChannel = cb;
+  }
+
+  sendGpxLocationsMessage(msg) {
+    this.#gpxLocationsChannel.send(msg);
+  }
+
+  onGpxLocationsMessage(cb) {
+    this.#gpxLocationsChannel = cb;
+  }
+
+  // Provide a callback to receive connectionstatechange states.
+  onConnectionStateChange(cb) {
+    this.#pc.addEventListener(
+        'connectionstatechange', evt => cb(this.#pc.connectionState));
+  }
+}
+
+class Controller {
+  #pc;
+  #serverConnector;
+  #connectedPr = Promise.resolve({});
+  // A list of callbacks that need to be called when the remote description is
+  // successfully added to the peer connection.
+  #onRemoteDescriptionSetCbs = [];
+
+  constructor(serverConnector) {
+    this.#serverConnector = serverConnector;
+    serverConnector.onDeviceMsg(msg => this.#onDeviceMessage(msg));
+  }
+
+  #onDeviceMessage(message) {
+    let type = message.type;
+    switch (type) {
+      case 'offer':
+        this.#onOffer({type: 'offer', sdp: message.sdp});
+        break;
+      case 'answer':
+        this.#onRemoteDescription({type: 'answer', sdp: message.sdp});
+        break;
+      case 'ice-candidate':
+          this.#onIceCandidate(new RTCIceCandidate({
+            sdpMid: message.mid,
+            sdpMLineIndex: message.mLineIndex,
+            candidate: message.candidate
+          }));
+        break;
+      case 'error':
+        console.error('Device responded with error message: ', message.error);
+        break;
+      default:
+        console.error('Unrecognized message type from device: ', type);
+    }
+  }
+
+  async #sendClientDescription(desc) {
+    console.debug('sendClientDescription');
+    return this.#serverConnector.sendToDevice({type: 'answer', sdp: desc.sdp});
+  }
+
+  async #sendIceCandidate(candidate) {
+    console.debug('sendIceCandidate');
+    return this.#serverConnector.sendToDevice({type: 'ice-candidate', candidate});
+  }
+
+  async #onOffer(desc) {
+    try {
+      await this.#onRemoteDescription(desc);
+      let answer = await this.#pc.createAnswer();
+      console.debug('Answer: ', answer);
+      await this.#pc.setLocalDescription(answer);
+      await this.#sendClientDescription(answer);
+    } catch (e) {
+      console.error('Error processing remote description (offer)', e)
+      throw e;
+    }
+  }
+
+  async #onRemoteDescription(desc) {
+    console.debug(`Remote description (${desc.type}): `, desc);
+    try {
+      await this.#pc.setRemoteDescription(desc);
+      for (const cb of this.#onRemoteDescriptionSetCbs) {
+        cb();
+      }
+      this.#onRemoteDescriptionSetCbs = [];
+    } catch (e) {
+      console.error(`Error processing remote description (${desc.type})`, e)
+      throw e;
+    }
+  }
+
+  #onIceCandidate(iceCandidate) {
+    console.debug(`Remote ICE Candidate: `, iceCandidate);
+    this.#pc.addIceCandidate(iceCandidate);
+  }
+
+  // This effectively ensures work that changes connection state doesn't run
+  // concurrently.
+  // Returns a promise that resolves if the connection is successfully
+  // established after the provided work is done.
+  #onReadyToNegotiate(work_cb) {
+    const connectedPr = this.#connectedPr.then(() => {
+      const controller = new AbortController();
+      const pr = new Promise((resolve, reject) => {
+        // The promise resolves when the connection changes state to 'connected'
+        // or when a remote description is set and the connection was already in
+        // 'connected' state.
+        this.#onRemoteDescriptionSetCbs.push(() => {
+          if (this.#pc.connectionState == 'connected') {
+            resolve({});
+          }
+        });
+        this.#pc.addEventListener('connectionstatechange', evt => {
+          let state = this.#pc.connectionState;
+          if (state == 'connected') {
+            resolve(evt);
+          } else if (state == 'failed') {
+            reject(evt);
+          }
+        }, {signal: controller.signal});
+      });
+      // Remove the listener once the promise fulfills.
+      pr.finally(() => controller.abort());
+      work_cb();
+      // Don't return pr.finally() since that is never rejected.
+      return pr;
+    });
+    // A failure is also a sign that renegotiation is possible again
+    this.#connectedPr = connectedPr.catch(_ => {});
+    return connectedPr;
+  }
+
+  async ConnectDevice(pc, infraConfig) {
+    this.#pc = pc;
+    console.debug('ConnectDevice');
+    // ICE candidates will be generated when we add the offer. Adding it here
+    // instead of in #onOffer because this function is called once per peer
+    // connection, while #onOffer may be called more than once due to
+    // renegotiations.
+    this.#pc.addEventListener('icecandidate', evt => {
+      if (evt.candidate) this.#sendIceCandidate(evt.candidate);
+    });
+    return this.#onReadyToNegotiate(_ => {
+      this.#serverConnector.sendToDevice(
+        {type: 'request-offer', ice_servers: infraConfig.ice_servers});
+    });
+  }
+
+  async renegotiateConnection() {
+    return this.#onReadyToNegotiate(async () => {
+      console.debug('Re-negotiating connection');
+      let offer = await this.#pc.createOffer();
+      console.debug('Local description (offer): ', offer);
+      await this.#pc.setLocalDescription(offer);
+      await this.#serverConnector.sendToDevice({type: 'offer', sdp: offer.sdp});
+    });
+  }
+}
+
+function createPeerConnection(infra_config) {
+  let pc_config = {iceServers: infra_config.ice_servers};
+  let pc = new RTCPeerConnection(pc_config);
+
+  pc.addEventListener('icecandidate', evt => {
+    console.debug('Local ICE Candidate: ', evt.candidate);
+  });
+  pc.addEventListener('iceconnectionstatechange', evt => {
+    console.debug(`ICE State Change: ${pc.iceConnectionState}`);
+  });
+  pc.addEventListener(
+      'connectionstatechange',
+      evt => console.debug(
+          `WebRTC Connection State Change: ${pc.connectionState}`));
+  return pc;
+}
+
+export async function Connect(deviceId, serverConnector) {
+  let requestRet = await serverConnector.requestDevice(deviceId);
+  let deviceInfo = requestRet.deviceInfo;
+  let infraConfig = requestRet.infraConfig;
+  console.debug('Device available:');
+  console.debug(deviceInfo);
+  let pc = createPeerConnection(infraConfig);
+
+  let control = new Controller(serverConnector);
+  let deviceConnection = new DeviceConnection(pc, control);
+  deviceConnection.description = deviceInfo;
+
+  return control.ConnectDevice(pc, infraConfig).then(_ => deviceConnection);
+}
diff --git a/host/frontend/webrtc/html_client/js/controls.js b/host/frontend/webrtc/html_client/js/controls.js
new file mode 100644
index 0000000..eb21a34
--- /dev/null
+++ b/host/frontend/webrtc/html_client/js/controls.js
@@ -0,0 +1,343 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Creates a "toggle control". The onToggleCb callback is called every time the
+// control changes state with the new toggle position (true for ON) and is
+// expected to return a promise of the new toggle position which can resolve to
+// the opposite position of the one received if there was error.
+function createToggleControl(elm, onToggleCb, initialState = false) {
+  elm.classList.add('toggle-control');
+  let offClass = 'toggle-off';
+  let onClass = 'toggle-on';
+  let state = !!initialState;
+  let toggle = {
+    // Sets the state of the toggle control. This only affects the
+    // visible state of the control in the UI, it doesn't affect the
+    // state of the underlying resources. It's most useful to make
+    // changes of said resources visible to the user.
+    Set: enabled => {
+      state = enabled;
+      if (enabled) {
+        elm.classList.remove(offClass);
+        elm.classList.add(onClass);
+      } else {
+        elm.classList.add(offClass);
+        elm.classList.remove(onClass);
+      }
+    }
+  };
+  toggle.Set(initialState);
+  addMouseListeners(elm, e => {
+    if (e.type != 'mousedown') {
+      return;
+    }
+    // Enable it if it's currently disabled
+    let enableNow = !state;
+    let nextPr = onToggleCb(enableNow);
+    if (nextPr && 'then' in nextPr) {
+      nextPr.then(enabled => toggle.Set(enabled));
+    }
+  });
+  return toggle;
+}
+
+function createButtonListener(button_id_class, func,
+  deviceConnection, listener) {
+  let buttons = [];
+  let ele = document.getElementById(button_id_class);
+  if (ele != null) {
+    buttons.push(ele);
+  } else {
+    buttons = document.getElementsByClassName(button_id_class);
+  }
+  for (var button of buttons) {
+    if (func != null) {
+      button.onclick = func;
+    }
+    button.addEventListener('mousedown', listener);
+  }
+}
+
+function createInputListener(input_id, func, listener) {
+  input = document.getElementById(input_id);
+  if (func != null) {
+    input.oninput = func;
+  }
+  input.addEventListener('input', listener);
+}
+
+function validateMacAddress(val) {
+  var regex = /^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$/;
+  return (regex.test(val));
+}
+
+function validateMacWrapper() {
+  let type = document.getElementById('bluetooth-wizard-type').value;
+  let button = document.getElementById("bluetooth-wizard-device");
+  let macField = document.getElementById('bluetooth-wizard-mac');
+  if (this.id == 'bluetooth-wizard-type') {
+    if (type == "remote_loopback") {
+      button.disabled = false;
+      macField.setCustomValidity('');
+      macField.disabled = true;
+      macField.required = false;
+      macField.placeholder = 'N/A';
+      macField.value = '';
+      return;
+    }
+  }
+  macField.disabled = false;
+  macField.required = true;
+  macField.placeholder = 'Device MAC';
+  if (validateMacAddress($(macField).val())) {
+    button.disabled = false;
+    macField.setCustomValidity('');
+  } else {
+    button.disabled = true;
+    macField.setCustomValidity('MAC address invalid');
+  }
+}
+
+$('[validate-mac]').bind('input', validateMacWrapper);
+$('[validate-mac]').bind('select', validateMacWrapper);
+
+function parseDevice(device) {
+  let id, name, mac;
+  var regex = /([0-9]+):([^@ ]*)(@(([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})))?/;
+  if (regex.test(device)) {
+    let regexMatches = device.match(regex);
+    id = regexMatches[1];
+    name = regexMatches[2];
+    mac = regexMatches[4];
+  }
+  if (mac === undefined) {
+    mac = "";
+  }
+  return [id, name, mac];
+}
+
+function btUpdateAdded(devices) {
+  let deviceArr = devices.split('\r\n');
+  let [id, name, mac] = parseDevice(deviceArr[0]);
+  if (name) {
+    let div = document.getElementById('bluetooth-wizard-confirm').getElementsByClassName('bluetooth-text')[1];
+    div.innerHTML = "";
+    div.innerHTML += "<p>Name: <b>" + id + "</b></p>";
+    div.innerHTML += "<p>Type: <b>" + name + "</b></p>";
+    div.innerHTML += "<p>MAC Addr: <b>" + mac + "</b></p>";
+    return true;
+  }
+  return false;
+}
+
+function parsePhy(phy) {
+  let id = phy.substring(0, phy.indexOf(":"));
+  phy = phy.substring(phy.indexOf(":") + 1);
+  let name = phy.substring(0, phy.indexOf(":"));
+  let devices = phy.substring(phy.indexOf(":") + 1);
+  return [id, name, devices];
+}
+
+function btParsePhys(phys) {
+  if (phys.indexOf("Phys:") < 0) {
+    return null;
+  }
+  let phyDict = {};
+  phys = phys.split('Phys:')[1];
+  let phyArr = phys.split('\r\n');
+  for (var phy of phyArr.slice(1)) {
+    phy = phy.trim();
+    if (phy.length == 0 || phy.indexOf("deleted") >= 0) {
+      continue;
+    }
+    let [id, name, devices] = parsePhy(phy);
+    phyDict[name] = id;
+  }
+  return phyDict;
+}
+
+function btUpdateDeviceList(devices) {
+  let deviceArr = devices.split('\r\n');
+  if (deviceArr[0].indexOf("Devices:") >= 0) {
+    let div = document.getElementById('bluetooth-list').getElementsByClassName('bluetooth-text')[0];
+    div.innerHTML = "";
+    let count = 0;
+    for (var device of deviceArr.slice(1)) {
+      if (device.indexOf("Phys:") >= 0) {
+        break;
+      }
+      count++;
+      if (device.indexOf("deleted") >= 0) {
+        continue;
+      }
+      let [id, name, mac] = parseDevice(device);
+      let innerDiv = '<div><button title="Delete" data-device-id="'
+      innerDiv += id;
+      innerDiv += '" class="bluetooth-list-trash material-icons">delete</button>';
+      innerDiv += name;
+      if (mac) {
+        innerDiv += " | "
+        innerDiv += mac;
+      }
+      innerDiv += '</div>';
+      div.innerHTML += innerDiv;
+    }
+    return count;
+  }
+  return -1;
+}
+
+function addMouseListeners(button, listener) {
+  // Capture mousedown/up/out commands instead of click to enable
+  // hold detection. mouseout is used to catch if the user moves the
+  // mouse outside the button while holding down.
+  button.addEventListener('mousedown', listener);
+  button.addEventListener('mouseup', listener);
+  button.addEventListener('mouseout', listener);
+}
+
+function createControlPanelButton(
+    title, icon_name, listener, parent_id = 'control-panel-default-buttons') {
+  let button = document.createElement('button');
+  document.getElementById(parent_id).appendChild(button);
+  button.title = title;
+  button.disabled = true;
+  addMouseListeners(button, listener);
+  // Set the button image using Material Design icons.
+  // See http://google.github.io/material-design-icons
+  // and https://material.io/resources/icons
+  button.classList.add('material-icons');
+  button.innerHTML = icon_name;
+  return button;
+}
+
+function positionModal(button_id, modal_id) {
+  const modalButton = document.getElementById(button_id);
+  const modalDiv = document.getElementById(modal_id);
+
+  // Position the modal to the right of the show modal button.
+  modalDiv.style.top = modalButton.offsetTop;
+  modalDiv.style.left = modalButton.offsetWidth + 30;
+}
+
+function createModalButton(button_id, modal_id, close_id, hide_id) {
+  const modalButton = document.getElementById(button_id);
+  const modalDiv = document.getElementById(modal_id);
+  const modalHeader = modalDiv.querySelector('.modal-header');
+  const modalClose = document.getElementById(close_id);
+  const modalDivHide = document.getElementById(hide_id);
+
+  positionModal(button_id, modal_id);
+
+  function showHideModal(show) {
+    if (show) {
+      modalButton.classList.add('modal-button-opened')
+      modalDiv.style.display = 'block';
+    } else {
+      modalButton.classList.remove('modal-button-opened')
+      modalDiv.style.display = 'none';
+    }
+    if (modalDivHide != null) {
+      modalDivHide.style.display = 'none';
+    }
+  }
+  // Allow the show modal button to toggle the modal,
+  modalButton.addEventListener(
+      'click', evt => showHideModal(modalDiv.style.display != 'block'));
+  // but the close button always closes.
+  modalClose.addEventListener('click', evt => showHideModal(false));
+
+  // Allow the modal to be dragged by the header.
+  let modalOffsets = {
+    midDrag: false,
+    mouseDownOffsetX: null,
+    mouseDownOffsetY: null,
+  };
+  modalHeader.addEventListener('mousedown', evt => {
+    modalOffsets.midDrag = true;
+    // Store the offset of the mouse location from the
+    // modal's current location.
+    modalOffsets.mouseDownOffsetX = parseInt(modalDiv.style.left) - evt.clientX;
+    modalOffsets.mouseDownOffsetY = parseInt(modalDiv.style.top) - evt.clientY;
+  });
+  modalHeader.addEventListener('mousemove', evt => {
+    let offsets = modalOffsets;
+    if (offsets.midDrag) {
+      // Move the modal to the mouse location plus the
+      // offset calculated on the initial mouse-down.
+      modalDiv.style.left = evt.clientX + offsets.mouseDownOffsetX;
+      modalDiv.style.top = evt.clientY + offsets.mouseDownOffsetY;
+    }
+  });
+  document.addEventListener('mouseup', evt => {
+    modalOffsets.midDrag = false;
+  });
+}
+
+function cmdConsole(consoleViewName, consoleInputName) {
+  let consoleView = document.getElementById(consoleViewName);
+
+  let addString =
+      function(str) {
+    consoleView.value += str;
+    consoleView.scrollTop = consoleView.scrollHeight;
+  }
+
+  let addLine =
+      function(line) {
+    addString(line + '\r\n');
+  }
+
+  let commandCallbacks = [];
+
+  let addCommandListener =
+      function(f) {
+    commandCallbacks.push(f);
+  }
+
+  let onCommand =
+      function(cmd) {
+    cmd = cmd.trim();
+
+    if (cmd.length == 0) return;
+
+    commandCallbacks.forEach(f => {
+      f(cmd);
+    })
+  }
+
+  addCommandListener(cmd => addLine('>> ' + cmd));
+
+  let consoleInput = document.getElementById(consoleInputName);
+
+  consoleInput.addEventListener('keydown', e => {
+    if ((e.key && e.key == 'Enter') || e.keyCode == 13) {
+      let command = e.target.value;
+
+      e.target.value = '';
+
+      onCommand(command);
+    }
+  });
+
+  return {
+    consoleView: consoleView,
+    consoleInput: consoleInput,
+    addLine: addLine,
+    addString: addString,
+    addCommandListener: addCommandListener,
+  };
+}
diff --git a/host/frontend/webrtc/html_client/js/location.js b/host/frontend/webrtc/html_client/js/location.js
new file mode 100644
index 0000000..284ae22
--- /dev/null
+++ b/host/frontend/webrtc/html_client/js/location.js
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+function loadFile(onload_func) {
+    let input = document.getElementById("locations_select_file");
+
+    if (!input.files[0]) {
+        alert("Please select a file ");
+        return;
+    }
+
+    let file = input.files[0];
+    let fr = new FileReader();
+    fr.addEventListener('error', () => {
+        alert("Error occurred reading file");
+    });
+
+    fr.addEventListener('load', () => {
+        console.debug("the location file is fully loaded");
+        onload_func(fr.result);
+    });
+
+    fr.readAsText(file);
+}
\ No newline at end of file
diff --git a/host/frontend/webrtc/client/js/rootcanal.js b/host/frontend/webrtc/html_client/js/rootcanal.js
similarity index 100%
rename from host/frontend/webrtc/client/js/rootcanal.js
rename to host/frontend/webrtc/html_client/js/rootcanal.js
diff --git a/host/frontend/webrtc/html_client/style.css b/host/frontend/webrtc/html_client/style.css
new file mode 100644
index 0000000..d07c29d
--- /dev/null
+++ b/host/frontend/webrtc/html_client/style.css
@@ -0,0 +1,369 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Dark theme, the default if none specified */
+body, body.dark-theme {
+  --main-bg: black;
+  --main-fg: white;
+  --error-bg: #900000; /* dark red */
+  --alert-bg: #927836; /* dark yellow */
+  --info-bg: #007000; /* dark green */
+  --modal-bg: #5f6368ea; /* Semi-transparent Google grey 500 */
+  --modal-button-bg: #e8eaed; /* Google grey 200 */
+  --modal-button-shadow: #444444;
+  --modal-button-fg: black;
+  --modal-button-border: black;
+  --modal-button-invalid-border: red;
+  --modal-button-highlight-bg: #f4cccc; /* light red */
+  --bt-label-fg: green;
+  --bt-label-invalid-fg: red;
+  --bt-action-bg: transparent;
+  --custom-button-bg: #1c4587ff; /* blue */
+  --button-fg: #e8eaed; /* Google grey 200 */
+  --button-bg: transparent;
+  --button-disabled-fg: #9aa0a6; /* Google grey 500 */
+  --button-pressed-border: #505050;
+  --button-pressed-bg: #303030;
+}
+/* End of dark theme */
+
+/* Light theme */
+body.light-theme {
+  --main-bg: #fafafa;
+  --main-fg: black;
+  --error-bg: #ea9da0; /* light red */
+  --alert-bg: #f3ef9e; /* light yellow */
+  --info-bg: #a5d5a5; /* light green */
+  --modal-bg: #d9d9d9ea; /* Semi-transparent Google grey 200 */
+  --modal-button-bg: #7b7b7b; /* Google grey 500 */
+  --modal-button-shadow: #666666;
+  --modal-button-fg: #fafafa;
+  --modal-button-border: #c4c4c4; /* Google grey 300 */
+  --modal-button-invalid-border: #c3413d; /*light red */
+  --modal-button-highlight-bg: #a05555; /* dark red-ish */
+  --bt-label-fg: green;
+  --bt-label-invalid-fg: #c3413d; /* light red */
+  --bt-action-bg: transparent;
+  --custom-button-bg: #8fc3ea; /* light blue */
+  --button-fg: #555555; /* Google grey 600 */
+  --button-bg: transparent;
+  --button-disabled-fg: #c4c4c4; /* Google grey 300 */
+  --button-pressed-border: #c4c4c4; /* Google grey 300 */
+  --button-pressed-bg: #d9d9d9; /* Google grey 200 */
+}
+/* End of light theme */
+
+body {
+  background-color: var(--main-bg);
+  margin: 0;
+  touch-action: none;
+  overscroll-behavior: none;
+}
+
+#device-connection {
+  display: none;
+  max-height: 100vh;
+}
+
+@keyframes spin {
+  0% { transform: rotate(0deg); }
+  100% { transform: rotate(360deg); }
+}
+
+#loader {
+  border-left: 12px solid #4285F4;
+  border-top: 12px solid #34A853;
+  border-right: 12px solid #FBBC05;
+  border-bottom: 12px solid #EA4335;
+  border-radius: 50%;
+  width: 70px;
+  height: 70px;
+  animation: spin 1.2s linear infinite;
+  margin: 100px;
+}
+
+/* Top header row. */
+
+#header {
+  height: 64px;
+  /* Items inside this use a row Flexbox.*/
+  display: flex;
+  align-items: center;
+}
+
+#camera_off_btn {
+  display: none !important;
+}
+#record_video_btn {
+  display: none !important;
+}
+
+#error-message-div {
+  position: absolute;
+  top: 0;
+  left: 0;
+  width: 100%;
+  z-index: 999;
+  opacity: 0.97;
+}
+#error-message {
+  color: var(--main-fg);
+  font-family: 'Open Sans', sans-serif;
+  padding: 10px;
+  margin: 10px;
+  border-radius: 10px;
+}
+#error-message .close-btn {
+  float: right;
+  cursor: pointer;
+}
+#error-message.hidden {
+  display: none;
+}
+#error-message.info {
+  background-color: var(--info-bg);
+}
+#error-message.warning {
+  background-color: var(--alert-bg);
+}
+#error-message.error {
+  background-color: var(--error-bg);
+}
+/* Control panel buttons and device screen(s). */
+
+#controls-and-displays {
+  height: 100%;
+
+  /* Items inside this use a row Flexbox.*/
+  display: flex;
+}
+
+#controls-and-displays > div {
+  margin-left: 5px;
+  margin-right: 5px;
+}
+
+.modal {
+  /* Start out hidden, and use absolute positioning. */
+  display: none;
+  position: absolute;
+
+  border-radius: 10px;
+  padding: 20px;
+  padding-top: 1px;
+
+  background-color: var(--modal-bg);
+  color: var(--main-fg);
+  font-family: 'Open Sans', sans-serif;
+}
+.modal-header {
+  cursor: move;
+  /* Items inside this use a row Flexbox.*/
+  display: flex;
+  justify-content: space-between;
+}
+.modal-close {
+  color: var(--main-fg);
+  border: none;
+  outline: none;
+  background-color: transparent;
+}
+.modal-button, .modal-button-highlight {
+  background:    var(--modal-button-bg)
+  border-radius: 10px;
+  box-shadow:    1px 1px var(--modal-button-shadow);
+  padding:       10px 20px;
+  color:         var(--modal-button-fg);
+  display:       inline-block;
+  font:          normal bold 14px/1 "Open Sans", sans-serif;
+  text-align:    center;
+}
+#bluetooth-wizard-mac:valid {
+  border: 2px solid var(--modal-button-border);
+}
+#bluetooth-wizard-mac:invalid {
+  border: 2px solid var(--modal-button-invalid-border);
+}
+#bluetooth-wizard-mac:invalid + span::before {
+  font-weight: bold;
+  content: 'X';
+  color: var(--bt-label-invalid-fg);
+}
+#bluetooth-wizard-mac:valid + span::before {
+  font-weight: bold;
+  content: 'OK';
+  color: var(--bt-label-fg);
+}
+.modal-button {
+  background: var(--modal-button-bg);
+}
+.modal-button-highlight {
+  background: var(--modal-button-highlight-bg);
+}
+#device-details-modal span {
+  white-space: pre;
+}
+#bluetooth-console-input {
+  width: 100%;
+}
+#bluetooth-console-cmd-label {
+  color: var(--main-fg);
+}
+.bluetooth-text, .bluetooth-text-bold, .bluetooth-text-field input {
+  font: normal 18px/1 "Open Sans", sans-serif;
+}
+.bluetooth-text, .bluetooth-text-bold {
+  color: var(--main-fg);
+}
+.bluetooth-text-bold {
+  font: bold;
+}
+.bluetooth-button {
+  text-align: center;
+}
+.bluetooth-drop-down select {
+  font: normal 18px/1 "Open Sans", sans-serif;
+  color: var(--modal-button-fg);
+  width: 500px;
+  margin: 5px;
+  rows: 10;
+  columns: 60;
+}
+.bluetooth-text-field input {
+  color: var(--modal-button-fg);
+  width: 500px;
+  margin: 5px;
+  rows: 10;
+  columns: 60;
+}
+.bluetooth-list-trash {
+  background:    var(--bt-action-bg);
+  border:        0px;
+  color:         var(--main-fg);
+}
+
+.location-text, .location-text-bold, .location-text-field input {
+  font: normal 18px/1 "Open Sans", sans-serif;
+}
+.location-text, .location-text-bold {
+  color: var(--main-fg);
+}
+.location-text-bold {
+  font: bold;
+}
+.location-button {
+  text-align: center;
+}
+
+.control-panel-column {
+  width: 50px;
+  /* Items inside this use a column Flexbox.*/
+  display: flex;
+  flex-direction: column;
+  padding: 1px;
+}
+#control-panel-custom-buttons {
+  display: none;
+  /* Give the custom buttons column a blue background. */
+  background-color: var(--custom-button-bg);
+  height: fit-content;
+  border-radius: 10px;
+}
+
+.control-panel-column button {
+  margin: 0px 1px 5px 1px;
+  height: 50px;
+  font-size: 32px;
+
+  color: var(--button-fg);
+  border: none;
+  border-radius: 10px;
+  outline: none;
+  background-color: var(--button-bg);
+}
+
+.control-panel-column button.modal-button-opened {
+  background-color: var(--modal-bg);
+}
+
+.control-panel-column button:disabled {
+  color: var(--button-disabled-fg);
+  cursor: not-allowed;
+}
+
+.control-panel-column button:enabled {
+  cursor: pointer;
+}
+
+.control-panel-column button:active {
+  margin: 0px 0px 5px 0px;
+  border: solid 1px var(--button-pressed-border);
+  background-color: var(--button-pressed-bg);
+}
+
+#device-displays {
+  /* Take up the remaining width of the window.*/
+  flex-grow: 1;
+  /* Don't grow taller than the window.*/
+  max-height: 100vh;
+  /* Allows child elements to be positioned relative to this element. */
+  position: relative;
+  display: flex;
+  flex-direction: row;
+  /* Don't show an outline when using the keyboard */
+  outline: 0px solid transparent;
+}
+
+/*
+ * Container <div> used to wrap each display's <video> element which is used for
+ * maintaining each display's width and height while the display is potentially
+ * rotating.
+ */
+.device-display {
+  position: relative;
+  display: flex;
+  flex-direction: column;
+  flex-grow: 1;
+  margin: 10px;
+  visibility: hidden;
+}
+
+.device-video-container {
+  flex-grow: 1;
+  overflow: hidden;
+}
+
+/* Container <div> to show info about the individual display. */
+.device-display-info {
+  color: var(--main-fg);
+  /* dark green */
+  background-color: var(--info-bg);
+  font-family: 'Open Sans', sans-serif;
+  text-indent: 0px;
+  border-radius: 10px;
+  padding: 10px;
+  margin-bottom: 10px;
+  flex-grow: 0;
+  flex-shrink: 0;
+}
+
+/* The actual <video> element for each display. */
+.device-display-video {
+  position: relative;
+  max-width: 100%;
+  max-height: 100%;
+  touch-action: none;
+}
diff --git a/host/frontend/webrtc/kernel_log_events_handler.cpp b/host/frontend/webrtc/kernel_log_events_handler.cpp
index e31e4da..7bd8e5a 100644
--- a/host/frontend/webrtc/kernel_log_events_handler.cpp
+++ b/host/frontend/webrtc/kernel_log_events_handler.cpp
@@ -99,6 +99,11 @@
 int KernelLogEventsHandler::AddSubscriber(
     std::function<void(const Json::Value&)> subscriber) {
   std::lock_guard<std::mutex> lock(subscribers_mtx_);
+  for (const auto& event : last_events_) {
+    // Deliver the last event of each type to the new subscriber so that it can
+    // show the correct state.
+    subscriber(event);
+  }
   subscribers_[++last_subscriber_id_] = subscriber;
   return last_subscriber_id_;
 }
@@ -110,6 +115,18 @@
 
 void KernelLogEventsHandler::DeliverEvent(const Json::Value& event) {
   std::lock_guard<std::mutex> lock(subscribers_mtx_);
+  // event["event"] is actually the type of the event.
+  // This would be more efficient with a set, but a list maintains the order in
+  // which events arrived. And for just a handful of elements the list can
+  // actually perform better.
+  for (auto it = last_events_.begin();
+       it != last_events_.end(); it++) {
+    if ((*it)["event"].asString() == event["event"].asString()) {
+      last_events_.erase(it);
+      break;
+    }
+  }
+  last_events_.push_back(event);
   for (const auto& entry : subscribers_) {
     entry.second(event);
   }
diff --git a/host/frontend/webrtc/kernel_log_events_handler.h b/host/frontend/webrtc/kernel_log_events_handler.h
index 5ce99aa..7f1d7e5 100644
--- a/host/frontend/webrtc/kernel_log_events_handler.h
+++ b/host/frontend/webrtc/kernel_log_events_handler.h
@@ -17,10 +17,11 @@
 #pragma once
 
 #include <atomic>
+#include <list>
+#include <map>
 #include <memory>
 #include <mutex>
 #include <thread>
-#include <map>
 
 #include <json/json.h>
 
@@ -47,6 +48,7 @@
   std::map<int, std::function<void(const Json::Value&)>> subscribers_;
   int last_subscriber_id_ = 0;
   std::mutex subscribers_mtx_;
+  std::list<Json::Value> last_events_;
 };
 
 }  // namespace cuttlefish
diff --git a/host/frontend/webrtc/kml_locations_handler.cpp b/host/frontend/webrtc/kml_locations_handler.cpp
new file mode 100644
index 0000000..a181b84
--- /dev/null
+++ b/host/frontend/webrtc/kml_locations_handler.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/kml_locations_handler.h"
+#include <android-base/logging.h>
+#include <unistd.h>
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/location/GnssClient.h"
+#include "host/libs/location/KmlParser.h"
+#include "string.h"
+
+#include <chrono>
+#include <iostream>
+#include <sstream>
+#include <thread>
+#include <vector>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+KmlLocationsHandler::KmlLocationsHandler(
+    std::function<void(const uint8_t *, size_t)> send_to_client) {}
+
+KmlLocationsHandler::~KmlLocationsHandler() {}
+
+void KmlLocationsHandler::HandleMessage(const uint8_t *msg, size_t len) {
+  LOG(DEBUG) << "ENTER KmlLocationsHandler handleMessage , size: " << len;
+  std::string error;
+  GpsFixArray coordinates;
+  if (!KmlParser::parseString((const char *)&msg[0], len, &coordinates,
+                              &error)) {
+    LOG(ERROR) << " Parsing Error: " << error << std::endl;
+    return;
+  }
+
+  LOG(DEBUG) << "Number of parsed points: " << coordinates.size() << std::endl;
+  auto config = CuttlefishConfig::Get();
+  if (!config) {
+    LOG(ERROR) << "Failed to obtain config object";
+    return;
+  }
+  auto instance = config->ForDefaultInstance();
+  auto server_port = instance.gnss_grpc_proxy_server_port();
+  std::string socket_name =
+      std::string("localhost:") + std::to_string(server_port);
+  LOG(DEBUG) << "Server port: " << server_port << " socket: " << socket_name
+             << std::endl;
+
+
+  GnssClient gpsclient(
+      grpc::CreateChannel(socket_name, grpc::InsecureChannelCredentials()));
+
+
+  auto reply = gpsclient.SendGpsLocations(1000,coordinates);
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/kml_locations_handler.h b/host/frontend/webrtc/kml_locations_handler.h
new file mode 100644
index 0000000..d8b2ffc
--- /dev/null
+++ b/host/frontend/webrtc/kml_locations_handler.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/fs/shared_select.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+struct KmlLocationsHandler {
+  explicit KmlLocationsHandler(
+      std::function<void(const uint8_t *, size_t)> send_to_client);
+
+  ~KmlLocationsHandler();
+
+  void HandleMessage(const uint8_t *msg, size_t len);
+};
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_device.cpp b/host/frontend/webrtc/lib/audio_device.cpp
deleted file mode 100644
index 2d96f72..0000000
--- a/host/frontend/webrtc/lib/audio_device.cpp
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/audio_device.h"
-
-#include <string.h>
-
-#include <android-base/logging.h>
-
-#include <chrono>
-#include <thread>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-CfAudioDeviceModule::CfAudioDeviceModule() {}
-
-int CfAudioDeviceModule::GetMoreAudioData(void* data, int bytes_per_sample,
-                                          int samples_per_channel,
-                                          int num_channels, int sample_rate,
-                                          bool& muted) {
-  muted = !playing_ || !audio_callback_;
-  if (muted) {
-    return 0;
-  }
-
-  size_t read_samples;
-  int64_t elapsed_time;
-  int64_t ntp_time_ms;
-  auto res = audio_callback_->NeedMorePlayData(
-      samples_per_channel, bytes_per_sample, num_channels, sample_rate, data,
-      read_samples, &elapsed_time, &ntp_time_ms);
-  if (res != 0) {
-    return res;
-  }
-  return read_samples / num_channels;
-}
-
-// Retrieve the currently utilized audio layer
-int32_t CfAudioDeviceModule::ActiveAudioLayer(AudioLayer* audioLayer) const {
-  return -1;
-}
-
-// Full-duplex transportation of PCM audio
-int32_t CfAudioDeviceModule::RegisterAudioCallback(
-    webrtc::AudioTransport* audio_callback) {
-  audio_callback_ = audio_callback;
-  return 0;
-}
-
-// Main initialization and termination
-int32_t CfAudioDeviceModule::Init() { return 0; }
-int32_t CfAudioDeviceModule::Terminate() { return 0; }
-bool CfAudioDeviceModule::Initialized() const { return true; }
-
-// Device enumeration
-int16_t CfAudioDeviceModule::PlayoutDevices() { return 1; }
-int16_t CfAudioDeviceModule::RecordingDevices() { return 1; }
-int32_t CfAudioDeviceModule::PlayoutDeviceName(
-    uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize],
-    char guid[webrtc::kAdmMaxGuidSize]) {
-  if (index != 0) {
-    return -1;
-  }
-  constexpr auto device_name = "Cuttlefish Webrtc Audio";
-  constexpr auto device_guid = "Cuttlefish Webrtc Audio Device Id";
-  strncpy(name, device_name, webrtc::kAdmMaxDeviceNameSize);
-  name[webrtc::kAdmMaxDeviceNameSize - 1] = '\0';
-  strncpy(guid, device_guid, webrtc::kAdmMaxGuidSize);
-  guid[webrtc::kAdmMaxGuidSize - 1] = '\0';
-  return 0;
-}
-int32_t CfAudioDeviceModule::RecordingDeviceName(
-    uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize],
-    char guid[webrtc::kAdmMaxGuidSize]) {
-  if (index != 0) {
-    return -1;
-  }
-  constexpr auto device_name = "Cuttlefish Webrtc Audio";
-  constexpr auto device_guid = "Cuttlefish Webrtc Audio Device Id";
-  strncpy(name, device_name, webrtc::kAdmMaxDeviceNameSize);
-  name[webrtc::kAdmMaxDeviceNameSize - 1] = '\0';
-  strncpy(guid, device_guid, webrtc::kAdmMaxGuidSize);
-  guid[webrtc::kAdmMaxGuidSize - 1] = '\0';
-  return 0;
-}
-
-// Device selection
-int32_t CfAudioDeviceModule::SetPlayoutDevice(uint16_t index) { return 0; }
-int32_t CfAudioDeviceModule::SetPlayoutDevice(WindowsDeviceType device) {
-  return -1;
-}
-int32_t CfAudioDeviceModule::SetRecordingDevice(uint16_t index) { return 0; }
-int32_t CfAudioDeviceModule::SetRecordingDevice(WindowsDeviceType device) {
-  return -1;
-}
-
-// Audio transport initialization
-int32_t CfAudioDeviceModule::PlayoutIsAvailable(bool* available) {
-  *available = true;
-  return 0;
-}
-int32_t CfAudioDeviceModule::InitPlayout() { return 0; }
-bool CfAudioDeviceModule::PlayoutIsInitialized() const { return true; }
-int32_t CfAudioDeviceModule::RecordingIsAvailable(bool* available) {
-  *available = 0;
-  return 0;
-}
-int32_t CfAudioDeviceModule::InitRecording() { return 0; }
-bool CfAudioDeviceModule::RecordingIsInitialized() const { return true; }
-
-// Audio transport control
-int32_t CfAudioDeviceModule::StartPlayout() {
-  playing_ = true;
-  return 0;
-}
-int32_t CfAudioDeviceModule::StopPlayout() {
-  playing_ = false;
-  return 0;
-}
-bool CfAudioDeviceModule::Playing() const { return playing_; }
-int32_t CfAudioDeviceModule::StartRecording() {
-  recording_ = true;
-  return 0;
-}
-int32_t CfAudioDeviceModule::StopRecording() {
-  recording_ = false;
-  return 0;
-}
-bool CfAudioDeviceModule::Recording() const { return recording_; }
-
-// Audio mixer initialization
-int32_t CfAudioDeviceModule::InitSpeaker() { return -1; }
-bool CfAudioDeviceModule::SpeakerIsInitialized() const { return false; }
-int32_t CfAudioDeviceModule::InitMicrophone() { return 0; }
-bool CfAudioDeviceModule::MicrophoneIsInitialized() const { return true; }
-
-// Speaker volume controls
-int32_t CfAudioDeviceModule::SpeakerVolumeIsAvailable(bool* available) {
-  *available = false;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetSpeakerVolume(uint32_t volume) { return -1; }
-int32_t CfAudioDeviceModule::SpeakerVolume(uint32_t* volume) const {
-  return -1;
-}
-int32_t CfAudioDeviceModule::MaxSpeakerVolume(uint32_t* maxVolume) const {
-  return -1;
-}
-int32_t CfAudioDeviceModule::MinSpeakerVolume(uint32_t* minVolume) const {
-  return -1;
-}
-
-// Microphone volume controls
-int32_t CfAudioDeviceModule::MicrophoneVolumeIsAvailable(bool* available) {
-  *available = false;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetMicrophoneVolume(uint32_t volume) { return -1; }
-int32_t CfAudioDeviceModule::MicrophoneVolume(uint32_t* volume) const {
-  return -1;
-}
-int32_t CfAudioDeviceModule::MaxMicrophoneVolume(uint32_t* maxVolume) const {
-  return -1;
-}
-int32_t CfAudioDeviceModule::MinMicrophoneVolume(uint32_t* minVolume) const {
-  return -1;
-}
-
-// Speaker mute control
-int32_t CfAudioDeviceModule::SpeakerMuteIsAvailable(bool* available) {
-  *available = false;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetSpeakerMute(bool enable) { return -1; }
-int32_t CfAudioDeviceModule::SpeakerMute(bool* enabled) const { return -1; }
-
-// Microphone mute control
-int32_t CfAudioDeviceModule::MicrophoneMuteIsAvailable(bool* available) {
-  *available = false;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetMicrophoneMute(bool enable) { return -1; }
-int32_t CfAudioDeviceModule::MicrophoneMute(bool* enabled) const { return -1; }
-
-// Stereo support
-int32_t CfAudioDeviceModule::StereoPlayoutIsAvailable(bool* available) const {
-  *available = true;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetStereoPlayout(bool enable) {
-  stereo_playout_enabled_ = enable;
-  return 0;
-}
-int32_t CfAudioDeviceModule::StereoPlayout(bool* enabled) const {
-  *enabled = stereo_playout_enabled_;
-  return 0;
-}
-int32_t CfAudioDeviceModule::StereoRecordingIsAvailable(bool* available) const {
-  *available = true;
-  return 0;
-}
-int32_t CfAudioDeviceModule::SetStereoRecording(bool enable) {
-  stereo_recording_enabled_ = enable;
-  return 0;
-}
-int32_t CfAudioDeviceModule::StereoRecording(bool* enabled) const {
-  *enabled = stereo_recording_enabled_;
-  return 0;
-}
-
-// Playout delay
-int32_t CfAudioDeviceModule::PlayoutDelay(uint16_t* delayMS) const {
-  // There is currently no way to estimate the real delay for thiese streams.
-  // Given that 10ms buffers are used almost everywhere in the pipeline we know
-  // the delay is at least 10ms, so that's the best guess here.
-  *delayMS = 10;
-  return 0;
-}
-
-// Only supported on Android.
-bool CfAudioDeviceModule::BuiltInAECIsAvailable() const { return false; }
-bool CfAudioDeviceModule::BuiltInAGCIsAvailable() const { return false; }
-bool CfAudioDeviceModule::BuiltInNSIsAvailable() const { return false; }
-
-// Enables the built-in audio effects. Only supported on Android.
-int32_t CfAudioDeviceModule::EnableBuiltInAEC(bool enable) { return -1; }
-int32_t CfAudioDeviceModule::EnableBuiltInAGC(bool enable) { return -1; }
-int32_t CfAudioDeviceModule::EnableBuiltInNS(bool enable) { return -1; }
-
-int32_t CfAudioDeviceModule::GetPlayoutUnderrunCount() const { return -1; }
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_device.h b/host/frontend/webrtc/lib/audio_device.h
deleted file mode 100644
index 9b675ee..0000000
--- a/host/frontend/webrtc/lib/audio_device.h
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <modules/audio_device/include/audio_device.h>
-
-#include <atomic>
-
-#include "host/frontend/webrtc/lib/audio_source.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class CfAudioDeviceModule : public webrtc::AudioDeviceModule,
-                            public AudioSource {
- public:
-  CfAudioDeviceModule();
-  ~CfAudioDeviceModule() override = default;
-
-  // Returns number of frames if there is data available, 0 if the stream is not
-  // playing (no clients or the streams are muted), -1 on error.
-  int GetMoreAudioData(void* data, int bytes_per_samples, int samples_per_channel,
-                       int num_channels, int sample_rate, bool& muted) override;
-
-  // Retrieve the currently utilized audio layer
-  int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
-
-  // Full-duplex transportation of PCM audio
-  int32_t RegisterAudioCallback(webrtc::AudioTransport* audioCallback) override;
-
-  // Main initialization and termination
-  int32_t Init() override;
-  int32_t Terminate() override;
-  bool Initialized() const override;
-
-  // Device enumeration
-  int16_t PlayoutDevices() override;
-  int16_t RecordingDevices() override;
-  int32_t PlayoutDeviceName(uint16_t index,
-                            char name[webrtc::kAdmMaxDeviceNameSize],
-                            char guid[webrtc::kAdmMaxGuidSize]) override;
-  int32_t RecordingDeviceName(uint16_t index,
-                              char name[webrtc::kAdmMaxDeviceNameSize],
-                              char guid[webrtc::kAdmMaxGuidSize]) override;
-
-  // Device selection
-  int32_t SetPlayoutDevice(uint16_t index) override;
-  int32_t SetPlayoutDevice(WindowsDeviceType device) override;
-  int32_t SetRecordingDevice(uint16_t index) override;
-  int32_t SetRecordingDevice(WindowsDeviceType device) override;
-
-  // Audio transport initialization
-  int32_t PlayoutIsAvailable(bool* available) override;
-  int32_t InitPlayout() override;
-  bool PlayoutIsInitialized() const override;
-  int32_t RecordingIsAvailable(bool* available) override;
-  int32_t InitRecording() override;
-  bool RecordingIsInitialized() const override;
-
-  // Audio transport control
-  int32_t StartPlayout() override;
-  int32_t StopPlayout() override;
-  bool Playing() const override;
-  int32_t StartRecording() override;
-  int32_t StopRecording() override;
-  bool Recording() const override;
-
-  // Audio mixer initialization
-  int32_t InitSpeaker() override;
-  bool SpeakerIsInitialized() const override;
-  int32_t InitMicrophone() override;
-  bool MicrophoneIsInitialized() const override;
-
-  // Speaker volume controls
-  int32_t SpeakerVolumeIsAvailable(bool* available) override;
-  int32_t SetSpeakerVolume(uint32_t volume) override;
-  int32_t SpeakerVolume(uint32_t* volume) const override;
-  int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override;
-  int32_t MinSpeakerVolume(uint32_t* minVolume) const override;
-
-  // Microphone volume controls
-  int32_t MicrophoneVolumeIsAvailable(bool* available) override;
-  int32_t SetMicrophoneVolume(uint32_t volume) override;
-  int32_t MicrophoneVolume(uint32_t* volume) const override;
-  int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override;
-  int32_t MinMicrophoneVolume(uint32_t* minVolume) const override;
-
-  // Speaker mute control
-  int32_t SpeakerMuteIsAvailable(bool* available) override;
-  int32_t SetSpeakerMute(bool enable) override;
-  int32_t SpeakerMute(bool* enabled) const override;
-
-  // Microphone mute control
-  int32_t MicrophoneMuteIsAvailable(bool* available) override;
-  int32_t SetMicrophoneMute(bool enable) override;
-  int32_t MicrophoneMute(bool* enabled) const override;
-
-  // Stereo support
-  int32_t StereoPlayoutIsAvailable(bool* available) const override;
-  int32_t SetStereoPlayout(bool enable) override;
-  int32_t StereoPlayout(bool* enabled) const override;
-  int32_t StereoRecordingIsAvailable(bool* available) const override;
-  int32_t SetStereoRecording(bool enable) override;
-  int32_t StereoRecording(bool* enabled) const override;
-
-  // Playout delay
-  int32_t PlayoutDelay(uint16_t* delayMS) const override;
-
-  // Only supported on Android.
-  bool BuiltInAECIsAvailable() const override;
-  bool BuiltInAGCIsAvailable() const override;
-  bool BuiltInNSIsAvailable() const override;
-
-  // Enables the built-in audio effects. Only supported on Android.
-  int32_t EnableBuiltInAEC(bool enable) override;
-  int32_t EnableBuiltInAGC(bool enable) override;
-  int32_t EnableBuiltInNS(bool enable) override;
-
-  // Play underrun count. Only supported on Android (guest).
-  int32_t GetPlayoutUnderrunCount() const override;
-
- private:
-  webrtc::AudioTransport* audio_callback_ = nullptr;
-  bool stereo_playout_enabled_ = true;
-  bool stereo_recording_enabled_ = true;
-  std::atomic<bool> playing_ = false;
-  std::atomic<bool> recording_ = false;
-};
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_sink.h b/host/frontend/webrtc/lib/audio_sink.h
deleted file mode 100644
index e54170b..0000000
--- a/host/frontend/webrtc/lib/audio_sink.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <memory>
-
-#include "host/frontend/webrtc/lib/audio_frame_buffer.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class AudioSink {
- public:
-  virtual ~AudioSink() = default;
-  virtual void OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
-                       int64_t timestamp_us) = 0;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_track_source_impl.cpp b/host/frontend/webrtc/lib/audio_track_source_impl.cpp
deleted file mode 100644
index 334cbb5..0000000
--- a/host/frontend/webrtc/lib/audio_track_source_impl.cpp
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/audio_track_source_impl.h"
-
-#include <android-base/logging.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-void AudioTrackSourceImpl::SetVolume(double volume) {
-  std::lock_guard<std::mutex> lock(observers_mutex_);
-  for (auto observer : audio_observers_) {
-    observer->OnSetVolume(volume);
-  }
-}
-
-void AudioTrackSourceImpl::RegisterAudioObserver(AudioObserver* observer) {
-  std::lock_guard<std::mutex> lock(observers_mutex_);
-  audio_observers_.insert(observer);
-}
-void AudioTrackSourceImpl::UnregisterAudioObserver(AudioObserver* observer) {
-  std::lock_guard<std::mutex> lock(observers_mutex_);
-  audio_observers_.erase(observer);
-}
-
-void AudioTrackSourceImpl::AddSink(webrtc::AudioTrackSinkInterface* sink) {
-  std::lock_guard<std::mutex> lock(sinks_mutex_);
-  sinks_.insert(sink);
-}
-
-void AudioTrackSourceImpl::RemoveSink(webrtc::AudioTrackSinkInterface* sink) {
-  std::lock_guard<std::mutex> lock(sinks_mutex_);
-  sinks_.erase(sink);
-}
-
-const cricket::AudioOptions AudioTrackSourceImpl::options() const {
-  return cricket::AudioOptions();
-}
-
-void AudioTrackSourceImpl::OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
-                                   int64_t timestamp_ms) {
-    std::lock_guard<std::mutex> lock(sinks_mutex_);
-    for (auto sink : sinks_) {
-      sink->OnData(frame->data(), frame->bits_per_sample(),
-                   frame->sample_rate(), frame->channels(), frame->frames(),
-                   timestamp_ms);
-    }
-}
-
-AudioTrackSourceImpl::SourceState AudioTrackSourceImpl::state() const {
-  return SourceState::kLive;
-}
-
-bool AudioTrackSourceImpl::remote() const { return false; }
-
-void AudioTrackSourceImpl::RegisterObserver(
-    webrtc::ObserverInterface* /*observer*/) {}
-
-void AudioTrackSourceImpl::UnregisterObserver(
-    webrtc::ObserverInterface* /*observer*/) {}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_track_source_impl.h b/host/frontend/webrtc/lib/audio_track_source_impl.h
deleted file mode 100644
index 0a72fe4..0000000
--- a/host/frontend/webrtc/lib/audio_track_source_impl.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <mutex>
-#include <set>
-
-#include <api/media_stream_interface.h>
-
-#include "host/frontend/webrtc/lib/audio_sink.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class AudioTrackSourceImpl : public webrtc::AudioSourceInterface {
- public:
-  AudioTrackSourceImpl() = default;
-
-  // Sets the volume of the source. |volume| is in  the range of [0, 10].
-  void SetVolume(double volume) override;
-
-  void RegisterAudioObserver(AudioObserver* observer) override;
-  void UnregisterAudioObserver(AudioObserver* observer) override;
-
-  void AddSink(webrtc::AudioTrackSinkInterface* sink) override;
-  void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override;
-
-  // Returns options for the AudioSource.
-  // (for some of the settings this approach is broken, e.g. setting
-  // audio network adaptation on the source is the wrong layer of abstraction).
-  virtual const cricket::AudioOptions options() const;
-
-  void OnFrame(std::shared_ptr<AudioFrameBuffer> frame, int64_t timestamp_ms);
-
-  // MediaSourceInterface implementation
-  SourceState state() const override;
-  bool remote() const override;
-
-  // NotifierInterface implementation
-  void RegisterObserver(webrtc::ObserverInterface* observer) override;
-  void UnregisterObserver(webrtc::ObserverInterface* observer) override;
-
- private:
-  std::set<AudioObserver*> audio_observers_;
-  std::mutex observers_mutex_;
-  std::set<webrtc::AudioTrackSinkInterface*> sinks_;
-  std::mutex sinks_mutex_;
-};
-
-// Wraps an AudioTrackSourceImpl as an implementation of the AudioSink
-// interface. This is needed as the AudioTrackSourceImpl is a reference counted
-// object that should only be referenced by rtc::scoped_refptr pointers, but the
-// AudioSink interface is not a reference counted object and therefore not
-// compatible with that kind of pointers. This class can be referenced by a
-// shared pointer and it in turn holds a scoped_refptr to the wrapped object.
-class AudioTrackSourceImplSinkWrapper : public AudioSink {
- public:
-  virtual ~AudioTrackSourceImplSinkWrapper() = default;
-
-  AudioTrackSourceImplSinkWrapper(rtc::scoped_refptr<AudioTrackSourceImpl> obj)
-      : track_source_impl_(obj) {}
-
-  void OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
-               int64_t timestamp_ms) override {
-    track_source_impl_->OnFrame(frame, timestamp_ms);
-  }
-
- private:
-  rtc::scoped_refptr<AudioTrackSourceImpl> track_source_impl_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/camera_streamer.h b/host/frontend/webrtc/lib/camera_streamer.h
deleted file mode 100644
index 3afed62..0000000
--- a/host/frontend/webrtc/lib/camera_streamer.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-#include <api/video/i420_buffer.h>
-#include <api/video/video_frame.h>
-#include <api/video/video_sink_interface.h>
-#include <json/json.h>
-
-#include "common/libs/utils/vsock_connection.h"
-#include "host/frontend/webrtc/lib/camera_controller.h"
-
-#include <atomic>
-#include <mutex>
-#include <thread>
-#include <vector>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class CameraStreamer : public rtc::VideoSinkInterface<webrtc::VideoFrame>,
-                       public CameraController {
- public:
-  CameraStreamer(unsigned int port, unsigned int cid);
-  ~CameraStreamer();
-
-  CameraStreamer(const CameraStreamer& other) = delete;
-  CameraStreamer& operator=(const CameraStreamer& other) = delete;
-
-  void OnFrame(const webrtc::VideoFrame& frame) override;
-
-  void HandleMessage(const Json::Value& message) override;
-  void HandleMessage(const std::vector<char>& message) override;
-
- private:
-  using Resolution = struct {
-    int32_t width;
-    int32_t height;
-  };
-  bool ForwardClientMessage(const Json::Value& message);
-  Resolution GetResolutionFromSettings(const Json::Value& settings);
-  bool VsockSendYUVFrame(const webrtc::I420BufferInterface* frame);
-  bool IsConnectionReady();
-  void StartReadLoop();
-  void Disconnect();
-  std::future<bool> pending_connection_;
-  VsockClientConnection cvd_connection_;
-  std::atomic<Resolution> resolution_;
-  std::mutex settings_mutex_;
-  std::string settings_buffer_;
-  std::mutex frame_mutex_;
-  std::mutex onframe_mutex_;
-  rtc::scoped_refptr<webrtc::I420Buffer> scaled_frame_;
-  unsigned int cid_;
-  unsigned int port_;
-  std::thread reader_thread_;
-  std::atomic<bool> camera_session_active_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/client_handler.cpp b/host/frontend/webrtc/lib/client_handler.cpp
deleted file mode 100644
index 9be1ca0..0000000
--- a/host/frontend/webrtc/lib/client_handler.cpp
+++ /dev/null
@@ -1,930 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "ClientHandler"
-
-#include "host/frontend/webrtc/lib/client_handler.h"
-
-#include <vector>
-
-#include <json/json.h>
-#include <json/writer.h>
-#include <netdb.h>
-#include <openssl/rand.h>
-
-#include <android-base/logging.h>
-
-#include "host/frontend/webrtc/lib/keyboard.h"
-#include "host/frontend/webrtc/lib/utils.h"
-#include "host/libs/config/cuttlefish_config.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-namespace {
-
-static constexpr auto kInputChannelLabel = "input-channel";
-static constexpr auto kAdbChannelLabel = "adb-channel";
-static constexpr auto kBluetoothChannelLabel = "bluetooth-channel";
-static constexpr auto kCameraDataChannelLabel = "camera-data-channel";
-static constexpr auto kCameraDataEof = "EOF";
-
-class CvdCreateSessionDescriptionObserver
-    : public webrtc::CreateSessionDescriptionObserver {
- public:
-  CvdCreateSessionDescriptionObserver(
-      std::weak_ptr<ClientHandler> client_handler)
-      : client_handler_(client_handler) {}
-
-  void OnSuccess(webrtc::SessionDescriptionInterface *desc) override {
-    auto client_handler = client_handler_.lock();
-    if (client_handler) {
-      client_handler->OnCreateSDPSuccess(desc);
-    }
-  }
-  void OnFailure(webrtc::RTCError error) override {
-    auto client_handler = client_handler_.lock();
-    if (client_handler) {
-      client_handler->OnCreateSDPFailure(error);
-    }
-  }
-
- private:
-  std::weak_ptr<ClientHandler> client_handler_;
-};
-
-class CvdSetSessionDescriptionObserver
-    : public webrtc::SetSessionDescriptionObserver {
- public:
-  CvdSetSessionDescriptionObserver(std::weak_ptr<ClientHandler> client_handler)
-      : client_handler_(client_handler) {}
-
-  void OnSuccess() override {
-    // local description set, nothing else to do
-  }
-  void OnFailure(webrtc::RTCError error) override {
-    auto client_handler = client_handler_.lock();
-    if (client_handler) {
-      client_handler->OnSetSDPFailure(error);
-    }
-  }
-
- private:
-  std::weak_ptr<ClientHandler> client_handler_;
-};
-
-class CvdOnSetRemoteDescription
-    : public webrtc::SetRemoteDescriptionObserverInterface {
- public:
-  CvdOnSetRemoteDescription(
-      std::function<void(webrtc::RTCError error)> on_error)
-      : on_error_(on_error) {}
-
-  void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
-    on_error_(error);
-  }
-
- private:
-  std::function<void(webrtc::RTCError error)> on_error_;
-};
-
-}  // namespace
-
-// Video streams initiating in the client may be added and removed at unexpected
-// times, causing the webrtc objects to be destroyed and created every time.
-// This class hides away that complexity and allows to set up sinks only once.
-class ClientVideoTrackImpl : public ClientVideoTrackInterface {
- public:
-  void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink,
-                       const rtc::VideoSinkWants &wants) override {
-    sink_ = sink;
-    wants_ = wants;
-    if (video_track_) {
-      video_track_->AddOrUpdateSink(sink, wants);
-    }
-  }
-
-  void SetVideoTrack(webrtc::VideoTrackInterface *track) {
-    video_track_ = track;
-    if (sink_) {
-      video_track_->AddOrUpdateSink(sink_, wants_);
-    }
-  }
-
-  void UnsetVideoTrack(webrtc::VideoTrackInterface *track) {
-    if (track == video_track_) {
-      video_track_ = nullptr;
-    }
-  }
-
- private:
-  webrtc::VideoTrackInterface* video_track_;
-  rtc::VideoSinkInterface<webrtc::VideoFrame> *sink_ = nullptr;
-  rtc::VideoSinkWants wants_ = {};
-};
-
-class InputChannelHandler : public webrtc::DataChannelObserver {
- public:
-  InputChannelHandler(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> input_channel,
-      std::shared_ptr<ConnectionObserver> observer);
-  ~InputChannelHandler() override;
-
-  void OnStateChange() override;
-  void OnMessage(const webrtc::DataBuffer &msg) override;
-
- private:
-  rtc::scoped_refptr<webrtc::DataChannelInterface> input_channel_;
-  std::shared_ptr<ConnectionObserver> observer_;
-};
-
-class AdbChannelHandler : public webrtc::DataChannelObserver {
- public:
-  AdbChannelHandler(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> adb_channel,
-      std::shared_ptr<ConnectionObserver> observer);
-  ~AdbChannelHandler() override;
-
-  void OnStateChange() override;
-  void OnMessage(const webrtc::DataBuffer &msg) override;
-
- private:
-  rtc::scoped_refptr<webrtc::DataChannelInterface> adb_channel_;
-  std::shared_ptr<ConnectionObserver> observer_;
-  bool channel_open_reported_ = false;
-};
-
-class ControlChannelHandler : public webrtc::DataChannelObserver {
- public:
-  ControlChannelHandler(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> control_channel,
-      std::shared_ptr<ConnectionObserver> observer);
-  ~ControlChannelHandler() override;
-
-  void OnStateChange() override;
-  void OnMessage(const webrtc::DataBuffer &msg) override;
-
-  void Send(const Json::Value &message);
-  void Send(const uint8_t *msg, size_t size, bool binary);
-
- private:
-  rtc::scoped_refptr<webrtc::DataChannelInterface> control_channel_;
-  std::shared_ptr<ConnectionObserver> observer_;
-};
-
-class BluetoothChannelHandler : public webrtc::DataChannelObserver {
- public:
-  BluetoothChannelHandler(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> bluetooth_channel,
-      std::shared_ptr<ConnectionObserver> observer);
-  ~BluetoothChannelHandler() override;
-
-  void OnStateChange() override;
-  void OnMessage(const webrtc::DataBuffer &msg) override;
-
- private:
-  rtc::scoped_refptr<webrtc::DataChannelInterface> bluetooth_channel_;
-  std::shared_ptr<ConnectionObserver> observer_;
-  bool channel_open_reported_ = false;
-};
-
-class CameraChannelHandler : public webrtc::DataChannelObserver {
- public:
-  CameraChannelHandler(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> bluetooth_channel,
-      std::shared_ptr<ConnectionObserver> observer);
-  ~CameraChannelHandler() override;
-
-  void OnStateChange() override;
-  void OnMessage(const webrtc::DataBuffer &msg) override;
-
- private:
-  rtc::scoped_refptr<webrtc::DataChannelInterface> camera_channel_;
-  std::shared_ptr<ConnectionObserver> observer_;
-  std::vector<char> receive_buffer_;
-};
-
-InputChannelHandler::InputChannelHandler(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> input_channel,
-    std::shared_ptr<ConnectionObserver> observer)
-    : input_channel_(input_channel), observer_(observer) {
-  input_channel->RegisterObserver(this);
-}
-
-InputChannelHandler::~InputChannelHandler() {
-  input_channel_->UnregisterObserver();
-}
-
-void InputChannelHandler::OnStateChange() {
-  LOG(VERBOSE) << "Input channel state changed to "
-               << webrtc::DataChannelInterface::DataStateString(
-                      input_channel_->state());
-}
-
-void InputChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
-  if (msg.binary) {
-    // TODO (jemoreira) consider binary protocol to avoid JSON parsing overhead
-    LOG(ERROR) << "Received invalid (binary) data on input channel";
-    return;
-  }
-  auto size = msg.size();
-
-  Json::Value evt;
-  Json::CharReaderBuilder builder;
-  std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
-  std::string errorMessage;
-  auto str = msg.data.cdata<char>();
-  if (!json_reader->parse(str, str + size, &evt, &errorMessage) < 0) {
-    LOG(ERROR) << "Received invalid JSON object over input channel: "
-               << errorMessage;
-    return;
-  }
-  if (!evt.isMember("type") || !evt["type"].isString()) {
-    LOG(ERROR) << "Input event doesn't have a valid 'type' field: "
-               << evt.toStyledString();
-    return;
-  }
-  auto event_type = evt["type"].asString();
-  if (event_type == "mouse") {
-    auto result =
-        ValidationResult::ValidateJsonObject(evt, "mouse",
-                           {{"down", Json::ValueType::intValue},
-                            {"x", Json::ValueType::intValue},
-                            {"y", Json::ValueType::intValue},
-                            {"display_label", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LOG(ERROR) << result.error();
-      return;
-    }
-    auto label = evt["display_label"].asString();
-    int32_t down = evt["down"].asInt();
-    int32_t x = evt["x"].asInt();
-    int32_t y = evt["y"].asInt();
-
-    observer_->OnTouchEvent(label, x, y, down);
-  } else if (event_type == "multi-touch") {
-    auto result =
-        ValidationResult::ValidateJsonObject(evt, "multi-touch",
-                           {{"id", Json::ValueType::arrayValue},
-                            {"down", Json::ValueType::intValue},
-                            {"x", Json::ValueType::arrayValue},
-                            {"y", Json::ValueType::arrayValue},
-                            {"slot", Json::ValueType::arrayValue},
-                            {"display_label", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LOG(ERROR) << result.error();
-      return;
-    }
-
-    auto label = evt["display_label"].asString();
-    auto idArr = evt["id"];
-    int32_t down = evt["down"].asInt();
-    auto xArr = evt["x"];
-    auto yArr = evt["y"];
-    auto slotArr = evt["slot"];
-    int size = evt["id"].size();
-
-    observer_->OnMultiTouchEvent(label, idArr, slotArr, xArr, yArr, down, size);
-  } else if (event_type == "keyboard") {
-    auto result =
-        ValidationResult::ValidateJsonObject(evt, "keyboard",
-                           {{"event_type", Json::ValueType::stringValue},
-                            {"keycode", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LOG(ERROR) << result.error();
-      return;
-    }
-    auto down = evt["event_type"].asString() == std::string("keydown");
-    auto code = DomKeyCodeToLinux(evt["keycode"].asString());
-    observer_->OnKeyboardEvent(code, down);
-  } else {
-    LOG(ERROR) << "Unrecognized event type: " << event_type;
-    return;
-  }
-}
-
-AdbChannelHandler::AdbChannelHandler(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> adb_channel,
-    std::shared_ptr<ConnectionObserver> observer)
-    : adb_channel_(adb_channel), observer_(observer) {
-  adb_channel->RegisterObserver(this);
-}
-
-AdbChannelHandler::~AdbChannelHandler() { adb_channel_->UnregisterObserver(); }
-
-void AdbChannelHandler::OnStateChange() {
-  LOG(VERBOSE) << "Adb channel state changed to "
-               << webrtc::DataChannelInterface::DataStateString(
-                      adb_channel_->state());
-}
-
-void AdbChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
-  // Report the adb channel as open on the first message received instead of at
-  // channel open, this avoids unnecessarily connecting to the adb daemon for
-  // clients that don't use ADB.
-  if (!channel_open_reported_) {
-    observer_->OnAdbChannelOpen([this](const uint8_t *msg, size_t size) {
-      webrtc::DataBuffer buffer(rtc::CopyOnWriteBuffer(msg, size),
-                                true /*binary*/);
-      // TODO (b/185832105): When the SCTP channel is congested data channel
-      // messages are buffered up to 16MB, when the buffer is full the channel
-      // is abruptly closed. Keep track of the buffered data to avoid losing the
-      // adb data channel.
-      adb_channel_->Send(buffer);
-      return true;
-    });
-    channel_open_reported_ = true;
-  }
-  observer_->OnAdbMessage(msg.data.cdata(), msg.size());
-}
-
-ControlChannelHandler::ControlChannelHandler(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> control_channel,
-    std::shared_ptr<ConnectionObserver> observer)
-    : control_channel_(control_channel), observer_(observer) {
-  control_channel->RegisterObserver(this);
-  observer_->OnControlChannelOpen([this](const Json::Value& message) {
-    this->Send(message);
-    return true;
-  });
-}
-
-ControlChannelHandler::~ControlChannelHandler() {
-  control_channel_->UnregisterObserver();
-}
-
-void ControlChannelHandler::OnStateChange() {
-  LOG(VERBOSE) << "Control channel state changed to "
-               << webrtc::DataChannelInterface::DataStateString(
-                      control_channel_->state());
-}
-
-void ControlChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
-  observer_->OnControlMessage(msg.data.cdata(), msg.size());
-}
-
-void ControlChannelHandler::Send(const Json::Value& message) {
-  Json::StreamWriterBuilder factory;
-  std::string message_string = Json::writeString(factory, message);
-  Send(reinterpret_cast<const uint8_t*>(message_string.c_str()),
-       message_string.size(), /*binary=*/false);
-}
-
-void ControlChannelHandler::Send(const uint8_t *msg, size_t size, bool binary) {
-  webrtc::DataBuffer buffer(rtc::CopyOnWriteBuffer(msg, size), binary);
-  control_channel_->Send(buffer);
-}
-
-BluetoothChannelHandler::BluetoothChannelHandler(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> bluetooth_channel,
-    std::shared_ptr<ConnectionObserver> observer)
-    : bluetooth_channel_(bluetooth_channel), observer_(observer) {
-  bluetooth_channel_->RegisterObserver(this);
-}
-
-BluetoothChannelHandler::~BluetoothChannelHandler() {
-  bluetooth_channel_->UnregisterObserver();
-}
-
-void BluetoothChannelHandler::OnStateChange() {
-  LOG(VERBOSE) << "Bluetooth channel state changed to "
-               << webrtc::DataChannelInterface::DataStateString(
-                      bluetooth_channel_->state());
-}
-
-void BluetoothChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
-  // Notify bluetooth channel opening when actually using the channel,
-  // it has the same reason with AdbChannelHandler::OnMessage,
-  // to avoid unnecessarily connection for Rootcanal.
-  if (channel_open_reported_ == false) {
-    channel_open_reported_ = true;
-    observer_->OnBluetoothChannelOpen([this](const uint8_t *msg, size_t size) {
-      webrtc::DataBuffer buffer(rtc::CopyOnWriteBuffer(msg, size),
-                                true /*binary*/);
-      // TODO (b/185832105): When the SCTP channel is congested data channel
-      // messages are buffered up to 16MB, when the buffer is full the channel
-      // is abruptly closed. Keep track of the buffered data to avoid losing the
-      // adb data channel.
-      bluetooth_channel_->Send(buffer);
-      return true;
-    });
-  }
-
-  observer_->OnBluetoothMessage(msg.data.cdata(), msg.size());
-}
-
-CameraChannelHandler::CameraChannelHandler(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> camera_channel,
-    std::shared_ptr<ConnectionObserver> observer)
-    : camera_channel_(camera_channel), observer_(observer) {
-  camera_channel_->RegisterObserver(this);
-}
-
-CameraChannelHandler::~CameraChannelHandler() {
-  camera_channel_->UnregisterObserver();
-}
-
-void CameraChannelHandler::OnStateChange() {
-  LOG(VERBOSE) << "Camera channel state changed to "
-               << webrtc::DataChannelInterface::DataStateString(
-                      camera_channel_->state());
-}
-
-void CameraChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
-  auto msg_data = msg.data.cdata<char>();
-  if (msg.size() == strlen(kCameraDataEof) &&
-      !strncmp(msg_data, kCameraDataEof, msg.size())) {
-    // Send complete buffer to observer on EOF marker
-    observer_->OnCameraData(receive_buffer_);
-    receive_buffer_.clear();
-    return;
-  }
-  // Otherwise buffer up data
-  receive_buffer_.insert(receive_buffer_.end(), msg_data,
-                         msg_data + msg.size());
-}
-
-std::shared_ptr<ClientHandler> ClientHandler::Create(
-    int client_id, std::shared_ptr<ConnectionObserver> observer,
-    std::function<void(const Json::Value &)> send_to_client_cb,
-    std::function<void(bool)> on_connection_changed_cb) {
-  return std::shared_ptr<ClientHandler>(new ClientHandler(
-      client_id, observer, send_to_client_cb, on_connection_changed_cb));
-}
-
-ClientHandler::ClientHandler(
-    int client_id, std::shared_ptr<ConnectionObserver> observer,
-    std::function<void(const Json::Value &)> send_to_client_cb,
-    std::function<void(bool)> on_connection_changed_cb)
-    : client_id_(client_id),
-      observer_(observer),
-      send_to_client_(send_to_client_cb),
-      on_connection_changed_cb_(on_connection_changed_cb),
-      camera_track_(new ClientVideoTrackImpl()) {}
-
-ClientHandler::~ClientHandler() {
-  for (auto &data_channel : data_channels_) {
-    data_channel->UnregisterObserver();
-  }
-}
-
-bool ClientHandler::SetPeerConnection(
-    rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection) {
-  peer_connection_ = peer_connection;
-
-  // libwebrtc configures the video encoder with a start bitrate of just 300kbs
-  // which causes it to drop the first 4 frames it receives. Any value over 2Mbs
-  // will be capped at 2Mbs when passed to the encoder by the peer_connection
-  // object, so we pass the maximum possible value here.
-  webrtc::BitrateSettings bitrate_settings;
-  bitrate_settings.start_bitrate_bps = 2000000; // 2Mbs
-  peer_connection_->SetBitrate(bitrate_settings);
-  // At least one data channel needs to be created on the side that makes the
-  // SDP offer (the device) for data channels to be enabled at all.
-  // This channel is meant to carry control commands from the client.
-  auto control_channel = peer_connection_->CreateDataChannel(
-      "device-control", nullptr /* config */);
-  if (!control_channel) {
-    LOG(ERROR) << "Failed to create control data channel";
-    return false;
-  }
-  control_handler_.reset(new ControlChannelHandler(control_channel, observer_));
-  return true;
-}
-
-bool ClientHandler::AddDisplay(
-    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track,
-    const std::string &label) {
-  // Send each track as part of a different stream with the label as id
-  auto err_or_sender =
-      peer_connection_->AddTrack(video_track, {label} /* stream_id */);
-  if (!err_or_sender.ok()) {
-    LOG(ERROR) << "Failed to add video track to the peer connection";
-    return false;
-  }
-  // TODO (b/154138394): use the returned sender (err_or_sender.value()) to
-  // remove the display from the connection.
-  return true;
-}
-
-bool ClientHandler::AddAudio(
-    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track,
-    const std::string &label) {
-  // Send each track as part of a different stream with the label as id
-  auto err_or_sender =
-      peer_connection_->AddTrack(audio_track, {label} /* stream_id */);
-  if (!err_or_sender.ok()) {
-    LOG(ERROR) << "Failed to add video track to the peer connection";
-    return false;
-  }
-  return true;
-}
-
-ClientVideoTrackInterface* ClientHandler::GetCameraStream() {
-  return camera_track_.get();
-}
-
-void ClientHandler::LogAndReplyError(const std::string &error_msg) const {
-  LOG(ERROR) << error_msg;
-  Json::Value reply;
-  reply["type"] = "error";
-  reply["error"] = error_msg;
-  send_to_client_(reply);
-}
-
-void ClientHandler::AddPendingIceCandidates() {
-  // Add any ice candidates that arrived before the remote description
-  for (auto& candidate: pending_ice_candidates_) {
-    peer_connection_->AddIceCandidate(std::move(candidate),
-                                      [this](webrtc::RTCError error) {
-                                        if (!error.ok()) {
-                                          LogAndReplyError(error.message());
-                                        }
-                                      });
-  }
-  pending_ice_candidates_.clear();
-}
-
-void ClientHandler::OnCreateSDPSuccess(
-    webrtc::SessionDescriptionInterface *desc) {
-  std::string offer_str;
-  desc->ToString(&offer_str);
-  std::string sdp_type = desc->type();
-  peer_connection_->SetLocalDescription(
-      // The peer connection wraps this raw pointer with a scoped_refptr, so
-      // it's guaranteed to be deleted at some point
-      new rtc::RefCountedObject<CvdSetSessionDescriptionObserver>(
-          weak_from_this()),
-      desc);
-  // The peer connection takes ownership of the description so it should not be
-  // used after this
-  desc = nullptr;
-
-  Json::Value reply;
-  reply["type"] = sdp_type;
-  reply["sdp"] = offer_str;
-
-  state_ = State::kAwaitingAnswer;
-  send_to_client_(reply);
-}
-
-void ClientHandler::OnCreateSDPFailure(webrtc::RTCError error) {
-  state_ = State::kFailed;
-  LogAndReplyError(error.message());
-  Close();
-}
-
-void ClientHandler::OnSetSDPFailure(webrtc::RTCError error) {
-  state_ = State::kFailed;
-  LogAndReplyError(error.message());
-  LOG(ERROR) << "Error setting local description: Either there is a bug in "
-                "libwebrtc or the local description was (incorrectly) modified "
-                "after creating it";
-  Close();
-}
-
-void ClientHandler::HandleMessage(const Json::Value &message) {
-  {
-    auto result = ValidationResult::ValidateJsonObject(message, "",
-                                     {{"type", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LogAndReplyError(result.error());
-      return;
-    }
-  }
-  auto type = message["type"].asString();
-  if (type == "request-offer") {
-    // Can't check for state being different that kNew because renegotiation can
-    // start in any state after the answer is returned.
-    if (state_ == State::kCreatingOffer) {
-      // An offer has been requested already
-      LogAndReplyError("Multiple requests for offer received from single client");
-      return;
-    }
-    state_ = State::kCreatingOffer;
-    peer_connection_->CreateOffer(
-        // No memory leak here because this is a ref counted objects and the
-        // peer connection immediately wraps it with a scoped_refptr
-        new rtc::RefCountedObject<CvdCreateSessionDescriptionObserver>(
-            weak_from_this()),
-        webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
-    // The created offer wil be sent to the client on
-    // OnSuccess(webrtc::SessionDescriptionInterface* desc)
-  } else if (type == "offer") {
-    auto result = ValidationResult::ValidateJsonObject(
-        message, type, {{"sdp", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LogAndReplyError(result.error());
-      return;
-    }
-    auto remote_desc_str = message["sdp"].asString();
-    auto remote_desc = webrtc::CreateSessionDescription(
-        webrtc::SdpType::kOffer, remote_desc_str, nullptr /*error*/);
-    if (!remote_desc) {
-      LogAndReplyError("Failed to parse answer.");
-      return;
-    }
-
-    rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer(
-        new rtc::RefCountedObject<
-            CvdOnSetRemoteDescription>([this](webrtc::RTCError error) {
-          if (!error.ok()) {
-            LogAndReplyError(error.message());
-            // The remote description was rejected, this client can't be
-            // trusted anymore.
-            Close();
-            return;
-          }
-          remote_description_added_ = true;
-          AddPendingIceCandidates();
-          peer_connection_->CreateAnswer(
-              // No memory leak here because this is a ref counted objects and
-              // the peer connection immediately wraps it with a scoped_refptr
-              new rtc::RefCountedObject<CvdCreateSessionDescriptionObserver>(
-                  weak_from_this()),
-              webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
-        }));
-    peer_connection_->SetRemoteDescription(std::move(remote_desc), observer);
-    state_ = State::kConnecting;
-  } else if (type == "answer") {
-    if (state_ != State::kAwaitingAnswer) {
-      LogAndReplyError("Received unexpected SDP answer");
-      return;
-    }
-    auto result = ValidationResult::ValidateJsonObject(message, type,
-                                     {{"sdp", Json::ValueType::stringValue}});
-    if (!result.ok()) {
-      LogAndReplyError(result.error());
-      return;
-    }
-    auto remote_desc_str = message["sdp"].asString();
-    auto remote_desc = webrtc::CreateSessionDescription(
-        webrtc::SdpType::kAnswer, remote_desc_str, nullptr /*error*/);
-    if (!remote_desc) {
-      LogAndReplyError("Failed to parse answer.");
-      return;
-    }
-    rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer(
-        new rtc::RefCountedObject<CvdOnSetRemoteDescription>(
-            [this](webrtc::RTCError error) {
-              if (!error.ok()) {
-                LogAndReplyError(error.message());
-                // The remote description was rejected, this client can't be
-                // trusted anymore.
-                Close();
-              }
-            }));
-    peer_connection_->SetRemoteDescription(std::move(remote_desc), observer);
-    remote_description_added_ = true;
-    AddPendingIceCandidates();
-    state_ = State::kConnecting;
-
-  } else if (type == "ice-candidate") {
-    {
-      auto result = ValidationResult::ValidateJsonObject(
-          message, type, {{"candidate", Json::ValueType::objectValue}});
-      if (!result.ok()) {
-        LogAndReplyError(result.error());
-        return;
-      }
-    }
-    auto candidate_json = message["candidate"];
-    {
-      auto result =
-          ValidationResult::ValidateJsonObject(candidate_json,
-                                               "ice-candidate/candidate",
-                             {
-                                 {"sdpMid", Json::ValueType::stringValue},
-                                 {"candidate", Json::ValueType::stringValue},
-                                 {"sdpMLineIndex", Json::ValueType::intValue},
-                             });
-      if (!result.ok()) {
-        LogAndReplyError(result.error());
-        return;
-      }
-    }
-    auto mid = candidate_json["sdpMid"].asString();
-    auto candidate_sdp = candidate_json["candidate"].asString();
-    auto line_index = candidate_json["sdpMLineIndex"].asInt();
-
-    std::unique_ptr<webrtc::IceCandidateInterface> candidate(
-        webrtc::CreateIceCandidate(mid, line_index, candidate_sdp,
-                                   nullptr /*error*/));
-    if (!candidate) {
-      LogAndReplyError("Failed to parse ICE candidate");
-      return;
-    }
-    if (remote_description_added_) {
-      peer_connection_->AddIceCandidate(std::move(candidate),
-                                        [this](webrtc::RTCError error) {
-                                          if (!error.ok()) {
-                                            LogAndReplyError(error.message());
-                                          }
-                                        });
-    } else {
-      // Store the ice candidate to be added later if it arrives before the
-      // remote description. This could happen if the client uses polling
-      // instead of websockets because the candidates are generated immediately
-      // after the remote (offer) description is set and the events and the ajax
-      // calls are asynchronous.
-      pending_ice_candidates_.push_back(std::move(candidate));
-    }
-  } else {
-    LogAndReplyError("Unknown client message type: " + type);
-    return;
-  }
-}
-
-void ClientHandler::Close() {
-  // We can't simply call peer_connection_->Close() here because this method
-  // could be called from one of the PeerConnectionObserver callbacks and that
-  // would lead to a deadlock (Close eventually tries to destroy an object that
-  // will then wait for the callback to return -> deadlock). Destroying the
-  // peer_connection_ has the same effect. The only alternative is to postpone
-  // that operation until after the callback returns.
-  on_connection_changed_cb_(false);
-}
-
-void ClientHandler::OnConnectionChange(
-    webrtc::PeerConnectionInterface::PeerConnectionState new_state) {
-  switch (new_state) {
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kNew:
-      break;
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting:
-      break;
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected:
-      LOG(VERBOSE) << "Client " << client_id_ << ": WebRTC connected";
-      state_ = State::kConnected;
-      observer_->OnConnected(
-          [this](const uint8_t *msg, size_t size, bool binary) {
-            control_handler_->Send(msg, size, binary);
-            return true;
-          });
-      on_connection_changed_cb_(true);
-      break;
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
-      LOG(VERBOSE) << "Client " << client_id_ << ": Connection disconnected";
-      Close();
-      break;
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed:
-      LOG(ERROR) << "Client " << client_id_ << ": Connection failed";
-      Close();
-      break;
-    case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed:
-      LOG(VERBOSE) << "Client " << client_id_ << ": Connection closed";
-      Close();
-      break;
-  }
-}
-
-void ClientHandler::OnIceCandidate(
-    const webrtc::IceCandidateInterface *candidate) {
-  std::string candidate_sdp;
-  candidate->ToString(&candidate_sdp);
-  auto sdp_mid = candidate->sdp_mid();
-  auto line_index = candidate->sdp_mline_index();
-
-  Json::Value reply;
-  reply["type"] = "ice-candidate";
-  reply["mid"] = sdp_mid;
-  reply["mLineIndex"] = static_cast<Json::UInt64>(line_index);
-  reply["candidate"] = candidate_sdp;
-
-  send_to_client_(reply);
-}
-
-void ClientHandler::OnDataChannel(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
-  auto label = data_channel->label();
-  if (label == kInputChannelLabel) {
-    input_handler_.reset(new InputChannelHandler(data_channel, observer_));
-  } else if (label == kAdbChannelLabel) {
-    adb_handler_.reset(new AdbChannelHandler(data_channel, observer_));
-  } else if (label == kBluetoothChannelLabel) {
-    bluetooth_handler_.reset(
-        new BluetoothChannelHandler(data_channel, observer_));
-  } else if (label == kCameraDataChannelLabel) {
-    camera_data_handler_.reset(
-        new CameraChannelHandler(data_channel, observer_));
-  } else {
-    LOG(VERBOSE) << "Data channel connected: " << label;
-    data_channels_.push_back(data_channel);
-  }
-}
-
-void ClientHandler::OnRenegotiationNeeded() {
-  state_ = State::kNew;
-  LOG(VERBOSE) << "Client " << client_id_ << " needs renegotiation";
-}
-
-void ClientHandler::OnIceGatheringChange(
-    webrtc::PeerConnectionInterface::IceGatheringState new_state) {
-  std::string state_str;
-  switch (new_state) {
-    case webrtc::PeerConnectionInterface::IceGatheringState::kIceGatheringNew:
-      state_str = "NEW";
-      break;
-    case webrtc::PeerConnectionInterface::IceGatheringState::
-        kIceGatheringGathering:
-      state_str = "GATHERING";
-      break;
-    case webrtc::PeerConnectionInterface::IceGatheringState::
-        kIceGatheringComplete:
-      state_str = "COMPLETE";
-      break;
-    default:
-      state_str = "UNKNOWN";
-  }
-  LOG(VERBOSE) << "Client " << client_id_
-               << ": ICE Gathering state set to: " << state_str;
-}
-
-void ClientHandler::OnIceCandidateError(const std::string &host_candidate,
-                                        const std::string &url, int error_code,
-                                        const std::string &error_text) {
-  LOG(VERBOSE) << "Gathering of an ICE candidate (host candidate: "
-               << host_candidate << ", url: " << url
-               << ") failed: " << error_text;
-}
-
-void ClientHandler::OnIceCandidateError(const std::string &address, int port,
-                                        const std::string &url, int error_code,
-                                        const std::string &error_text) {
-  LOG(VERBOSE) << "Gathering of an ICE candidate (address: " << address
-               << ", port: " << port << ", url: " << url
-               << ") failed: " << error_text;
-}
-
-void ClientHandler::OnSignalingChange(
-    webrtc::PeerConnectionInterface::SignalingState new_state) {
-  // ignore
-}
-void ClientHandler::OnStandardizedIceConnectionChange(
-    webrtc::PeerConnectionInterface::IceConnectionState new_state) {
-  switch (new_state) {
-    case webrtc::PeerConnectionInterface::kIceConnectionNew:
-      LOG(DEBUG) << "ICE connection state: New";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionChecking:
-      LOG(DEBUG) << "ICE connection state: Checking";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionConnected:
-      LOG(DEBUG) << "ICE connection state: Connected";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
-      LOG(DEBUG) << "ICE connection state: Completed";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionFailed:
-      state_ = State::kFailed;
-      LOG(DEBUG) << "ICE connection state: Failed";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
-      LOG(DEBUG) << "ICE connection state: Disconnected";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionClosed:
-      LOG(DEBUG) << "ICE connection state: Closed";
-      break;
-    case webrtc::PeerConnectionInterface::kIceConnectionMax:
-      LOG(DEBUG) << "ICE connection state: Max";
-      break;
-  }
-}
-void ClientHandler::OnIceCandidatesRemoved(
-    const std::vector<cricket::Candidate> &candidates) {
-  // ignore
-}
-void ClientHandler::OnTrack(
-    rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
-  auto track = transceiver->receiver()->track();
-  if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
-    // It's ok to take the raw pointer here because we make sure to unset it
-    // when the track is removed
-    camera_track_->SetVideoTrack(
-        static_cast<webrtc::VideoTrackInterface *>(track.get()));
-  }
-}
-void ClientHandler::OnRemoveTrack(
-    rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) {
-  auto track = receiver->track();
-  if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
-    // this only unsets if the track matches the one already in store
-    camera_track_->UnsetVideoTrack(
-        reinterpret_cast<webrtc::VideoTrackInterface *>(track.get()));
-  }
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/client_handler.h b/host/frontend/webrtc/lib/client_handler.h
deleted file mode 100644
index ea58552..0000000
--- a/host/frontend/webrtc/lib/client_handler.h
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <functional>
-#include <memory>
-#include <optional>
-#include <sstream>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include <json/json.h>
-
-#include <api/peer_connection_interface.h>
-#include <pc/video_track_source.h>
-
-#include "host/frontend/webrtc/lib/connection_observer.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class InputChannelHandler;
-class AdbChannelHandler;
-class ControlChannelHandler;
-class BluetoothChannelHandler;
-class CameraChannelHandler;
-
-class ClientVideoTrackInterface;
-class ClientVideoTrackImpl;
-
-class ClientHandler : public webrtc::PeerConnectionObserver,
-                      public std::enable_shared_from_this<ClientHandler> {
- public:
-  static std::shared_ptr<ClientHandler> Create(
-      int client_id, std::shared_ptr<ConnectionObserver> observer,
-      std::function<void(const Json::Value&)> send_client_cb,
-      std::function<void(bool)> on_connection_changed_cb);
-  ~ClientHandler() override;
-
-  bool SetPeerConnection(
-      rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection);
-
-  bool AddDisplay(rtc::scoped_refptr<webrtc::VideoTrackInterface> track,
-                  const std::string& label);
-
-  bool AddAudio(rtc::scoped_refptr<webrtc::AudioTrackInterface> track,
-                  const std::string& label);
-
-  ClientVideoTrackInterface* GetCameraStream();
-
-  void HandleMessage(const Json::Value& client_message);
-
-  // CreateSessionDescriptionObserver implementation
-  void OnCreateSDPSuccess(webrtc::SessionDescriptionInterface* desc);
-  void OnCreateSDPFailure(webrtc::RTCError error);
-
-  // SetSessionDescriptionObserver implementation
-  void OnSetSDPFailure(webrtc::RTCError error);
-
-  // PeerConnectionObserver implementation
-  void OnSignalingChange(
-      webrtc::PeerConnectionInterface::SignalingState new_state) override;
-  void OnDataChannel(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
-  void OnRenegotiationNeeded() override;
-  void OnStandardizedIceConnectionChange(
-      webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
-  void OnConnectionChange(
-      webrtc::PeerConnectionInterface::PeerConnectionState new_state) override;
-  void OnIceGatheringChange(
-      webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
-  void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
-  // Gathering of an ICE candidate failed.
-  // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror
-  // |host_candidate| is a stringified socket address.
-  void OnIceCandidateError(const std::string& host_candidate,
-                           const std::string& url, int error_code,
-                           const std::string& error_text) override;
-  // Gathering of an ICE candidate failed.
-  // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror
-  void OnIceCandidateError(const std::string& address, int port,
-                           const std::string& url, int error_code,
-                           const std::string& error_text) override;
-  void OnIceCandidatesRemoved(
-      const std::vector<cricket::Candidate>& candidates) override;
-  void OnTrack(
-      rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) override;
-  void OnRemoveTrack(
-      rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) override;
-
- private:
-  enum class State {
-      kNew,
-      kCreatingOffer,
-      kAwaitingAnswer,
-      kConnecting,
-      kConnected,
-      kFailed,
-  };
-  ClientHandler(int client_id, std::shared_ptr<ConnectionObserver> observer,
-                std::function<void(const Json::Value&)> send_client_cb,
-                std::function<void(bool)> on_connection_changed_cb);
-
-  // Intentionally private, disconnect the client by destroying the object.
-  void Close();
-
-  void LogAndReplyError(const std::string& error_msg) const;
-  void AddPendingIceCandidates();
-
-  int client_id_;
-  State state_ = State::kNew;
-  std::shared_ptr<ConnectionObserver> observer_;
-  std::function<void(const Json::Value&)> send_to_client_;
-  std::function<void(bool)> on_connection_changed_cb_;
-  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
-  std::vector<rtc::scoped_refptr<webrtc::DataChannelInterface>> data_channels_;
-  std::unique_ptr<InputChannelHandler> input_handler_;
-  std::unique_ptr<AdbChannelHandler> adb_handler_;
-  std::unique_ptr<ControlChannelHandler> control_handler_;
-  std::unique_ptr<BluetoothChannelHandler> bluetooth_handler_;
-  std::unique_ptr<CameraChannelHandler> camera_data_handler_;
-  std::unique_ptr<ClientVideoTrackImpl> camera_track_;
-  bool remote_description_added_ = false;
-  std::vector<std::unique_ptr<webrtc::IceCandidateInterface>>
-      pending_ice_candidates_;
-};
-
-class ClientVideoTrackInterface {
- public:
-  virtual ~ClientVideoTrackInterface() = default;
-  virtual void AddOrUpdateSink(
-      rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
-      const rtc::VideoSinkWants& wants) = 0;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/connection_observer.h b/host/frontend/webrtc/lib/connection_observer.h
deleted file mode 100644
index fe82549..0000000
--- a/host/frontend/webrtc/lib/connection_observer.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <functional>
-
-#include <json/json.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class ConnectionObserver {
- public:
-  ConnectionObserver() = default;
-  virtual ~ConnectionObserver() = default;
-
-  virtual void OnConnected(
-      std::function<void(const uint8_t*, size_t, bool)> ctrl_msg_sender) = 0;
-  virtual void OnTouchEvent(const std::string& display_label, int x, int y,
-                            bool down) = 0;
-  virtual void OnMultiTouchEvent(const std::string& label, Json::Value id, Json::Value slot,
-                                 Json::Value x, Json::Value y, bool down, int size) = 0;
-  virtual void OnKeyboardEvent(uint16_t keycode, bool down) = 0;
-  virtual void OnSwitchEvent(uint16_t code, bool state) = 0;
-  virtual void OnAdbChannelOpen(
-      std::function<bool(const uint8_t*, size_t)> adb_message_sender) = 0;
-  virtual void OnAdbMessage(const uint8_t* msg, size_t size) = 0;
-  virtual void OnControlChannelOpen(
-      std::function<bool(const Json::Value)> control_message_sender) = 0;
-  virtual void OnControlMessage(const uint8_t* msg, size_t size) = 0;
-  virtual void OnBluetoothChannelOpen(
-      std::function<bool(const uint8_t*, size_t)> bluetooth_message_sender) = 0;
-  virtual void OnBluetoothMessage(const uint8_t* msg, size_t size) = 0;
-  virtual void OnCameraData(const std::vector<char>& data) = 0;
-};
-
-class ConnectionObserverFactory {
- public:
-  virtual ~ConnectionObserverFactory() = default;
-  // Called when a new connection is requested
-  virtual std::shared_ptr<ConnectionObserver> CreateObserver() = 0;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/keyboard.cpp b/host/frontend/webrtc/lib/keyboard.cpp
deleted file mode 100644
index ee6b2a7..0000000
--- a/host/frontend/webrtc/lib/keyboard.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/keyboard.h"
-
-#include <linux/input.h>
-
-#include <map>
-
-static const std::map<std::string, uint16_t> kDomToLinuxMapping = {
-    {"Backquote", KEY_GRAVE},
-    {"Backslash", KEY_BACKSLASH},
-    {"Backspace", KEY_BACKSPACE},
-    {"BracketLeft", KEY_LEFTBRACE},
-    {"BracketRight", KEY_RIGHTBRACE},
-    {"Comma", KEY_COMMA},
-    {"Digit0", KEY_0},
-    {"Digit1", KEY_1},
-    {"Digit2", KEY_2},
-    {"Digit3", KEY_3},
-    {"Digit4", KEY_4},
-    {"Digit5", KEY_5},
-    {"Digit6", KEY_6},
-    {"Digit7", KEY_7},
-    {"Digit8", KEY_8},
-    {"Digit9", KEY_9},
-    {"Equal", KEY_EQUAL},
-    {"IntlBackslash", KEY_BACKSLASH},
-    {"IntlRo", KEY_RO},
-    {"IntlYen", KEY_BACKSLASH},
-    {"KeyA", KEY_A},
-    {"KeyB", KEY_B},
-    {"KeyC", KEY_C},
-    {"KeyD", KEY_D},
-    {"KeyE", KEY_E},
-    {"KeyF", KEY_F},
-    {"KeyG", KEY_G},
-    {"KeyH", KEY_H},
-    {"KeyI", KEY_I},
-    {"KeyJ", KEY_J},
-    {"KeyK", KEY_K},
-    {"KeyL", KEY_L},
-    {"KeyM", KEY_M},
-    {"KeyN", KEY_N},
-    {"KeyO", KEY_O},
-    {"KeyP", KEY_P},
-    {"KeyQ", KEY_Q},
-    {"KeyR", KEY_R},
-    {"KeyS", KEY_S},
-    {"KeyT", KEY_T},
-    {"KeyU", KEY_U},
-    {"KeyV", KEY_V},
-    {"KeyW", KEY_W},
-    {"KeyX", KEY_X},
-    {"KeyY", KEY_Y},
-    {"KeyZ", KEY_Z},
-    {"Minus", KEY_MINUS},
-    {"Period", KEY_DOT},
-    {"Quote", KEY_APOSTROPHE},
-    {"Semicolon", KEY_SEMICOLON},
-    {"Slash", KEY_SLASH},
-    {"AltLeft", KEY_LEFTALT},
-    {"AltRight", KEY_RIGHTALT},
-    {"CapsLock", KEY_CAPSLOCK},
-    {"ContextMenu", KEY_CONTEXT_MENU},
-    {"ControlLeft", KEY_LEFTCTRL},
-    {"ControlRight", KEY_RIGHTCTRL},
-    {"Enter", KEY_ENTER},
-    {"MetaLeft", KEY_LEFTMETA},
-    {"MetaRight", KEY_RIGHTMETA},
-    {"ShiftLeft", KEY_LEFTSHIFT},
-    {"ShiftRight", KEY_RIGHTSHIFT},
-    {"Space", KEY_SPACE},
-    {"Tab", KEY_TAB},
-    {"Delete", KEY_DELETE},
-    {"End", KEY_END},
-    {"Help", KEY_HELP},
-    {"Home", KEY_HOME},
-    {"Insert", KEY_INSERT},
-    {"PageDown", KEY_PAGEDOWN},
-    {"PageUp", KEY_PAGEUP},
-    {"ArrowDown", KEY_DOWN},
-    {"ArrowLeft", KEY_LEFT},
-    {"ArrowRight", KEY_RIGHT},
-    {"ArrowUp", KEY_UP},
-
-    {"NumLock", KEY_NUMLOCK},
-    {"Numpad0", KEY_KP0},
-    {"Numpad1", KEY_KP1},
-    {"Numpad2", KEY_KP2},
-    {"Numpad3", KEY_KP3},
-    {"Numpad4", KEY_KP4},
-    {"Numpad5", KEY_KP5},
-    {"Numpad6", KEY_KP6},
-    {"Numpad7", KEY_KP7},
-    {"Numpad8", KEY_KP8},
-    {"Numpad9", KEY_KP9},
-    {"NumpadAdd", KEY_KPPLUS},
-    {"NumpadBackspace", KEY_BACKSPACE},
-    {"NumpadClear", KEY_CLEAR},
-    {"NumpadComma", KEY_KPCOMMA},
-    {"NumpadDecimal", KEY_KPDOT},
-    {"NumpadDivide", KEY_KPSLASH},
-    {"NumpadEnter", KEY_KPENTER},
-    {"NumpadEqual", KEY_KPEQUAL},
-    /*
-    {"NumpadClearEntry", },
-    {"NumpadHash", },
-    {"NumpadMemoryAdd", },
-    {"NumpadMemoryClear", },
-    {"NumpadMemoryRecall", },
-    {"NumpadMemoryStore", },
-    {"NumpadMemorySubtract", },
-    */
-    {"NumpadMultiply", KEY_KPASTERISK},
-    {"NumpadParenLeft", KEY_KPLEFTPAREN},
-    {"NumpadParenRight", KEY_KPRIGHTPAREN},
-    {"NumpadStar", KEY_KPASTERISK},
-    {"NumpadSubtract", KEY_KPMINUS},
-
-    {"Escape", KEY_ESC},
-    {"F1", KEY_F1},
-    {"F2", KEY_F2},
-    {"F3", KEY_F3},
-    {"F4", KEY_F4},
-    {"F5", KEY_F5},
-    {"F6", KEY_F6},
-    {"F7", KEY_F7},
-    {"F8", KEY_F8},
-    {"F9", KEY_F9},
-    {"F10", KEY_F10},
-    {"F11", KEY_F11},
-    {"F12", KEY_F12},
-    {"Fn", KEY_FN},
-    /*{"FnLock", },*/
-    {"PrintScreen", KEY_SYSRQ},
-    {"ScrollLock", KEY_SCROLLLOCK},
-    {"Pause", KEY_PAUSE}};
-
-uint16_t DomKeyCodeToLinux(const std::string& dom_KEY_code) {
-  const auto it = kDomToLinuxMapping.find(dom_KEY_code);
-  if (it == kDomToLinuxMapping.end()) {
-    return 0;
-  }
-  return it->second;
-}
diff --git a/host/frontend/webrtc/lib/local_recorder.cpp b/host/frontend/webrtc/lib/local_recorder.cpp
deleted file mode 100644
index 28f71b0..0000000
--- a/host/frontend/webrtc/lib/local_recorder.cpp
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/local_recorder.h"
-
-#include <atomic>
-#include <chrono>
-#include <list>
-#include <mutex>
-#include <thread>
-#include <vector>
-
-#include <android-base/logging.h>
-#include <api/media_stream_interface.h>
-#include <api/rtp_parameters.h>
-#include <api/task_queue/default_task_queue_factory.h>
-#include <api/video/builtin_video_bitrate_allocator_factory.h>
-#include <api/video/video_stream_encoder_create.h>
-#include <api/video/video_stream_encoder_interface.h>
-#include <api/video_codecs/builtin_video_encoder_factory.h>
-#include <mkvmuxer/mkvmuxer.h>
-#include <mkvmuxer/mkvwriter.h>
-#include <system_wrappers/include/clock.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-constexpr double kRtpTicksPerSecond = 90000.;
-constexpr double kRtpTicksPerMs = kRtpTicksPerSecond / 1000.;
-constexpr double kRtpTicksPerUs = kRtpTicksPerMs / 1000.;
-constexpr double kRtpTicksPerNs = kRtpTicksPerUs / 1000.;
-
-class LocalRecorder::Display
-    : public webrtc::EncodedImageCallback
-    , public rtc::VideoSinkInterface<webrtc::VideoFrame> {
-public:
-  Display(LocalRecorder::Impl& impl);
-
-  void EncoderLoop();
-  void Stop();
-
-  // VideoSinkInterface
-  virtual void OnFrame(const webrtc::VideoFrame& frame) override;
-
-  // EncodedImageCallback
-  virtual webrtc::EncodedImageCallback::Result OnEncodedImage(
-      const webrtc::EncodedImage& encoded_image,
-      const webrtc::CodecSpecificInfo* codec_specific_info,
-      const webrtc::RTPFragmentationHeader* fragmentation) override;
-
-  LocalRecorder::Impl& impl_;
-  std::shared_ptr<webrtc::VideoTrackSourceInterface> source_;
-  std::unique_ptr<webrtc::VideoEncoder> video_encoder_;
-  uint64_t video_track_number_;
-
-  // TODO(schuffelen): Use a WebRTC task queue?
-  std::thread encoder_thread_;
-  std::condition_variable encoder_queue_signal_;
-  std::mutex encode_queue_mutex_;
-  std::list<webrtc::VideoFrame> encode_queue_;
-  std::atomic_bool encoder_running_ = true;
-};
-
-class LocalRecorder::Impl {
-public:
-  mkvmuxer::MkvWriter file_writer_;
-  mkvmuxer::Segment segment_;
-  std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory_;
-  std::mutex mkv_mutex_;
-  std::vector<std::unique_ptr<Display>> displays_;
-};
-
-/* static */
-std::unique_ptr<LocalRecorder> LocalRecorder::Create(
-    const std::string& filename) {
-  std::unique_ptr<Impl> impl(new Impl());
-
-  if (!impl->file_writer_.Open(filename.c_str())) {
-    LOG(ERROR) << "Failed to open \"" << filename << "\" to write a webm";
-    return {};
-  }
-
-  if (!impl->segment_.Init(&impl->file_writer_)) {
-    LOG(ERROR) << "Failed to initialize the mkvkmuxer segment";
-    return {};
-  }
-
-  impl->segment_.AccurateClusterDuration(true);
-  impl->segment_.set_estimate_file_duration(true);
-
-  impl->encoder_factory_ = webrtc::CreateBuiltinVideoEncoderFactory();
-  if (!impl->encoder_factory_) {
-    LOG(ERROR) << "Failed to create webRTC built-in video encoder factory";
-    return {};
-  }
-
-  return std::unique_ptr<LocalRecorder>(new LocalRecorder(std::move(impl)));
-}
-
-LocalRecorder::LocalRecorder(std::unique_ptr<LocalRecorder::Impl> impl)
-    : impl_(std::move(impl)) {
-}
-
-LocalRecorder::~LocalRecorder() = default;
-
-void LocalRecorder::AddDisplay(
-    size_t width,
-    size_t height,
-    std::shared_ptr<webrtc::VideoTrackSourceInterface> source) {
-  std::unique_ptr<Display> display(new Display(*impl_));
-  display->source_ = source;
-
-  display->video_encoder_ =
-      impl_->encoder_factory_->CreateVideoEncoder(webrtc::SdpVideoFormat("VP8"));
-  if (!display->video_encoder_) {
-    LOG(ERROR) << "Could not create vp8 video encoder";
-    return;
-  }
-  auto rc =
-      display->video_encoder_->RegisterEncodeCompleteCallback(display.get());
-  if (rc != 0) {
-    LOG(ERROR) << "Could not register encode complete callback";
-    return;
-  }
-  source->AddOrUpdateSink(display.get(), rtc::VideoSinkWants{});
-
-  webrtc::VideoCodec codec {};
-  memset(&codec, 0, sizeof(codec));
-  codec.codecType = webrtc::kVideoCodecVP8;
-  codec.width = width;
-  codec.height = height;
-  codec.startBitrate = 1000; // kilobits/sec
-  codec.maxBitrate = 2000;
-  codec.minBitrate = 0;
-  codec.maxFramerate = 60;
-  codec.active = true;
-  codec.qpMax = 56; // kDefaultMaxQp from simulcast_encoder_adapter.cc
-  codec.mode = webrtc::VideoCodecMode::kScreensharing;
-  codec.expect_encode_from_texture = false;
-  *codec.VP8() = webrtc::VideoEncoder::GetDefaultVp8Settings();
-
-  webrtc::VideoEncoder::Capabilities capabilities(false);
-  webrtc::VideoEncoder::Settings settings(capabilities, 1, 1 << 20);
-
-  rc = display->video_encoder_->InitEncode(&codec, settings);
-  if (rc != 0) {
-    LOG(ERROR) << "Failed to InitEncode";
-    return;
-  }
-
-  display->encoder_running_ = true;
-  display->encoder_thread_ = std::thread([](Display* display) {
-    display->EncoderLoop();
-  }, display.get());
-
-  std::lock_guard lock(impl_->mkv_mutex_);
-  display->video_track_number_ =
-      impl_->segment_.AddVideoTrack(width, height, 0);
-  if (display->video_track_number_ == 0) {
-    LOG(ERROR) << "Failed to add video track to webm muxer";
-    return;
-  }
-
-  impl_->displays_.emplace_back(std::move(display));
-}
-
-void LocalRecorder::Stop() {
-  for (auto& display : impl_->displays_) {
-    display->Stop();
-  }
-  impl_->displays_.clear();
-
-  std::lock_guard lock(impl_->mkv_mutex_);
-  impl_->segment_.Finalize();
-}
-
-LocalRecorder::Display::Display(LocalRecorder::Impl& impl) : impl_(impl) {
-}
-
-void LocalRecorder::Display::OnFrame(const webrtc::VideoFrame& frame) {
-  std::lock_guard queue_lock(encode_queue_mutex_);
-  static int kMaxQueuedFrames = 10;
-  if (encode_queue_.size() >= kMaxQueuedFrames) {
-    LOG(VERBOSE) << "Dropped frame, encoder queue too long";
-    return;
-  }
-  encode_queue_.push_back(frame);
-  encoder_queue_signal_.notify_one();
-}
-
-void LocalRecorder::Display::EncoderLoop() {
-  int frames_since_keyframe = 0;
-  std::chrono::time_point<std::chrono::steady_clock> start_timestamp;
-  auto last_keyframe_time = std::chrono::steady_clock::now();
-  while (encoder_running_) {
-    std::unique_ptr<webrtc::VideoFrame> frame;
-    {
-      std::unique_lock queue_lock(encode_queue_mutex_);
-      while (encode_queue_.size() == 0 && encoder_running_) {
-        encoder_queue_signal_.wait(queue_lock);
-      }
-      if (!encoder_running_) {
-        break;
-      }
-      frame = std::make_unique<webrtc::VideoFrame>(
-          std::move(encode_queue_.front()));
-      encode_queue_.pop_front();
-    }
-
-    auto now = std::chrono::steady_clock::now();
-    if (start_timestamp.time_since_epoch().count() == 0) {
-      start_timestamp = now;
-    }
-    auto timestamp_diff =
-        std::chrono::duration_cast<std::chrono::microseconds>(
-              now - start_timestamp);
-    frame->set_timestamp_us(timestamp_diff.count());
-    frame->set_timestamp(timestamp_diff.count() * kRtpTicksPerUs);
-
-    std::vector<webrtc::VideoFrameType> types;
-    auto time_since_keyframe = now - last_keyframe_time;
-    const auto min_keyframe_time = std::chrono::seconds(10);
-    if (frames_since_keyframe > 60 || time_since_keyframe > min_keyframe_time) {
-      last_keyframe_time = now;
-      frames_since_keyframe = 0;
-      types.push_back(webrtc::VideoFrameType::kVideoFrameKey);
-    } else {
-      types.push_back(webrtc::VideoFrameType::kVideoFrameDelta);
-    }
-    auto rc = video_encoder_->Encode(*frame, &types);
-    if (rc != 0) {
-      LOG(ERROR) << "Failed to encode frame";
-    }
-  }
-}
-
-void LocalRecorder::Display::Stop() {
-  encoder_running_ = false;
-  encoder_queue_signal_.notify_all();
-  if (encoder_thread_.joinable()) {
-    encoder_thread_.join();
-  }
-}
-
-webrtc::EncodedImageCallback::Result LocalRecorder::Display::OnEncodedImage(
-    const webrtc::EncodedImage& encoded_image,
-    const webrtc::CodecSpecificInfo* codec_specific_info,
-    const webrtc::RTPFragmentationHeader* fragmentation) {
-  uint64_t timestamp = encoded_image.Timestamp() / kRtpTicksPerNs;
-
-  std::lock_guard(impl_.mkv_mutex_);
-
-  bool is_key =
-      encoded_image._frameType == webrtc::VideoFrameType::kVideoFrameKey;
-  bool success = impl_.segment_.AddFrame(
-      encoded_image.data(),
-      encoded_image.size(),
-      video_track_number_,
-      timestamp,
-      is_key);
-
-  webrtc::EncodedImageCallback::Result result(
-      success
-          ? webrtc::EncodedImageCallback::Result::Error::OK
-          : webrtc::EncodedImageCallback::Result::Error::ERROR_SEND_FAILED);
-
-  if (success) {
-    result.frame_id = encoded_image.Timestamp();
-  }
-  return result;
-}
-
-} // namespace webrtc_streaming
-} // namespace cuttlefish
-
diff --git a/host/frontend/webrtc/lib/port_range_socket_factory.cpp b/host/frontend/webrtc/lib/port_range_socket_factory.cpp
deleted file mode 100644
index c997cbc..0000000
--- a/host/frontend/webrtc/lib/port_range_socket_factory.cpp
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/port_range_socket_factory.h"
-
-#include <android-base/logging.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-namespace {
-
-std::pair<uint16_t, uint16_t> IntersectPortRanges(
-    std::pair<uint16_t, uint16_t> own_range, uint16_t min_port,
-    uint16_t max_port) {
-  if (own_range.first == own_range.second && own_range.first == 0) {
-    // No range configured
-    return {min_port, max_port};
-  }
-  if (min_port == max_port && max_port == 0) {
-    // No range requested, use configured
-    return own_range;
-  }
-  uint16_t own_min_port = own_range.first;
-  uint16_t own_max_port = own_range.second;
-
-  if (min_port > own_max_port || max_port < own_min_port) {
-    // Ranges don't intersect
-    LOG(WARNING) << "Port ranges don't intersect: requested=[" << min_port
-                 << "," << max_port << "], configured=[" << own_min_port << ","
-                 << own_max_port << "]";
-  }
-  return {std::max(min_port, own_min_port), std::min(max_port, own_max_port)};
-}
-
-}  // namespace
-
-PortRangeSocketFactory::PortRangeSocketFactory(
-    rtc::Thread* thread, std::pair<uint16_t, uint16_t> udp_port_range,
-    std::pair<uint16_t, uint16_t> tcp_port_range)
-    : rtc::BasicPacketSocketFactory(thread),
-      udp_port_range_(udp_port_range),
-      tcp_port_range_(tcp_port_range) {}
-
-rtc::AsyncPacketSocket* PortRangeSocketFactory::CreateUdpSocket(
-    const rtc::SocketAddress& local_address, uint16_t min_port,
-    uint16_t max_port) {
-  auto port_range = IntersectPortRanges(udp_port_range_, min_port, max_port);
-  if (port_range.second < port_range.first) {
-    // Own range doesn't intersect with requested range
-    return nullptr;
-  }
-  return rtc::BasicPacketSocketFactory::CreateUdpSocket(
-      local_address, port_range.first, port_range.second);
-}
-
-rtc::AsyncPacketSocket* PortRangeSocketFactory::CreateServerTcpSocket(
-    const rtc::SocketAddress& local_address, uint16_t min_port,
-    uint16_t max_port, int opts) {
-  auto port_range = IntersectPortRanges(tcp_port_range_, min_port, max_port);
-  if (port_range.second < port_range.first) {
-    // Own range doesn't intersect with requested range
-    return nullptr;
-  }
-
-  return rtc::BasicPacketSocketFactory::CreateServerTcpSocket(
-      local_address, port_range.first, port_range.second, opts);
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/port_range_socket_factory.h b/host/frontend/webrtc/lib/port_range_socket_factory.h
deleted file mode 100644
index c5a34a1..0000000
--- a/host/frontend/webrtc/lib/port_range_socket_factory.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <cinttypes>
-#include <utility>
-
-// This is not part of the webrtc api and therefore subject to change
-#include <p2p/base/basic_packet_socket_factory.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-// rtc::BasicPacketSocketFactory is not part of the webrtc api so only functions
-// from its upper class should be overridden here.
-class PortRangeSocketFactory : public rtc::BasicPacketSocketFactory {
- public:
-  PortRangeSocketFactory(rtc::Thread* thread,
-                         std::pair<uint16_t, uint16_t> udp_port_range,
-                         std::pair<uint16_t, uint16_t> tcp_port_range);
-
-  rtc::AsyncPacketSocket* CreateUdpSocket(
-      const rtc::SocketAddress& local_address, uint16_t min_port,
-      uint16_t max_port) override;
-
-  rtc::AsyncPacketSocket* CreateServerTcpSocket(
-      const rtc::SocketAddress& local_address, uint16_t min_port,
-      uint16_t max_port, int opts) override;
-
- private:
-  std::pair<uint16_t, uint16_t> udp_port_range_;
-  std::pair<uint16_t, uint16_t> tcp_port_range_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/server_connection.cpp b/host/frontend/webrtc/lib/server_connection.cpp
deleted file mode 100644
index 314b6df..0000000
--- a/host/frontend/webrtc/lib/server_connection.cpp
+++ /dev/null
@@ -1,650 +0,0 @@
-//
-// Copyright (C) 2020 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-//
-
-#include "host/frontend/webrtc/lib/server_connection.h"
-
-#include <android-base/logging.h>
-#include <libwebsockets.h>
-
-#include "common/libs/fs/shared_fd.h"
-#include "common/libs/fs/shared_select.h"
-#include "common/libs/utils/files.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-// ServerConnection over Unix socket
-class UnixServerConnection : public ServerConnection {
- public:
-  UnixServerConnection(const std::string& addr,
-                       std::weak_ptr<ServerConnectionObserver> observer);
-  ~UnixServerConnection() override;
-
-  bool Send(const Json::Value& msg) override;
-
- private:
-  void Connect() override;
-  void StopThread();
-  void ReadLoop();
-
-  const std::string addr_;
-  SharedFD conn_;
-  std::mutex write_mtx_;
-  std::weak_ptr<ServerConnectionObserver> observer_;
-  // The event fd must be declared before the thread to ensure it's initialized
-  // before the thread starts and is safe to be accessed from it.
-  SharedFD thread_notifier_;
-  std::atomic_bool running_ = false;
-  std::thread thread_;
-};
-
-// ServerConnection using websockets
-class WsConnectionContext;
-
-class WsConnection : public std::enable_shared_from_this<WsConnection> {
- public:
-  struct CreateConnectionSul {
-    lws_sorted_usec_list_t sul = {};
-    std::weak_ptr<WsConnection> weak_this;
-  };
-
-  WsConnection(int port, const std::string& addr, const std::string& path,
-               ServerConfig::Security secure,
-               const std::vector<std::pair<std::string, std::string>>& headers,
-               std::weak_ptr<ServerConnectionObserver> observer,
-               std::shared_ptr<WsConnectionContext> context);
-
-  ~WsConnection();
-
-  void Connect();
-  bool Send(const Json::Value& msg);
-
-  void ConnectInner();
-
-  void OnError(const std::string& error);
-  void OnReceive(const uint8_t* data, size_t len, bool is_binary);
-  void OnOpen();
-  void OnClose();
-  void OnWriteable();
-
-  void AddHttpHeaders(unsigned char** p, unsigned char* end) const;
-
- private:
-  struct WsBuffer {
-    WsBuffer() = default;
-    WsBuffer(const uint8_t* data, size_t len, bool binary)
-        : buffer_(LWS_PRE + len), is_binary_(binary) {
-      memcpy(&buffer_[LWS_PRE], data, len);
-    }
-
-    uint8_t* data() { return &buffer_[LWS_PRE]; }
-    bool is_binary() const { return is_binary_; }
-    size_t size() const { return buffer_.size() - LWS_PRE; }
-
-   private:
-    std::vector<uint8_t> buffer_;
-    bool is_binary_;
-  };
-  bool Send(const uint8_t* data, size_t len, bool binary = false);
-
-  CreateConnectionSul extended_sul_;
-  struct lws* wsi_;
-  const int port_;
-  const std::string addr_;
-  const std::string path_;
-  const ServerConfig::Security security_;
-  const std::vector<std::pair<std::string, std::string>> headers_;
-
-  std::weak_ptr<ServerConnectionObserver> observer_;
-
-  // each element contains the data to be sent and whether it's binary or not
-  std::deque<WsBuffer> write_queue_;
-  std::mutex write_queue_mutex_;
-  // The connection object should not outlive the context object. This reference
-  // guarantees it.
-  std::shared_ptr<WsConnectionContext> context_;
-};
-
-class WsConnectionContext
-    : public std::enable_shared_from_this<WsConnectionContext> {
- public:
-  static std::shared_ptr<WsConnectionContext> Create();
-
-  WsConnectionContext(struct lws_context* lws_ctx);
-  ~WsConnectionContext();
-
-  std::unique_ptr<ServerConnection> CreateConnection(
-      int port, const std::string& addr, const std::string& path,
-      ServerConfig::Security secure,
-      std::weak_ptr<ServerConnectionObserver> observer,
-      const std::vector<std::pair<std::string, std::string>>& headers);
-
-  void RememberConnection(void*, std::weak_ptr<WsConnection>);
-  void ForgetConnection(void*);
-  std::shared_ptr<WsConnection> GetConnection(void*);
-
-  struct lws_context* lws_context() {
-    return lws_context_;
-  }
-
- private:
-  void Start();
-
-  std::map<void*, std::weak_ptr<WsConnection>> weak_by_ptr_;
-  std::mutex map_mutex_;
-  struct lws_context* lws_context_;
-  std::thread message_loop_;
-};
-
-std::unique_ptr<ServerConnection> ServerConnection::Connect(
-    const ServerConfig& conf,
-    std::weak_ptr<ServerConnectionObserver> observer) {
-  std::unique_ptr<ServerConnection> ret;
-  // If the provided address points to an existing UNIX socket in the file
-  // system connect to it, otherwise assume it's a network address and connect
-  // using websockets
-  if (FileIsSocket(conf.addr)) {
-    ret.reset(new UnixServerConnection(conf.addr, observer));
-  } else {
-    // This can be a local variable since the ws connection will keep a
-    // reference to it.
-    auto ws_context = WsConnectionContext::Create();
-    CHECK(ws_context) << "Failed to create websocket context";
-    ret = ws_context->CreateConnection(conf.port, conf.addr, conf.path,
-                                       conf.security, observer,
-                                       conf.http_headers);
-  }
-  ret->Connect();
-  return ret;
-}
-
-void ServerConnection::Reconnect() { Connect(); }
-
-// UnixServerConnection implementation
-
-UnixServerConnection::UnixServerConnection(
-    const std::string& addr, std::weak_ptr<ServerConnectionObserver> observer)
-    : addr_(addr), observer_(observer) {}
-
-UnixServerConnection::~UnixServerConnection() {
-  StopThread();
-}
-
-bool UnixServerConnection::Send(const Json::Value& msg) {
-  Json::StreamWriterBuilder factory;
-  auto str = Json::writeString(factory, msg);
-  std::lock_guard<std::mutex> lock(write_mtx_);
-  auto res =
-      conn_->Send(reinterpret_cast<const uint8_t*>(str.c_str()), str.size(), 0);
-  if (res < 0) {
-    LOG(ERROR) << "Failed to send data to signaling server: "
-               << conn_->StrError();
-    // Don't call OnError() here, the receiving thread probably did it already
-    // or is about to do it.
-  }
-  // A SOCK_SEQPACKET unix socket will send the entire message or fail, but it
-  // won't send a partial message.
-  return res == str.size();
-}
-
-void UnixServerConnection::Connect() {
-  // The thread could be running if this is a Reconnect
-  StopThread();
-
-  conn_ = SharedFD::SocketLocalClient(addr_, false, SOCK_SEQPACKET);
-  if (!conn_->IsOpen()) {
-    LOG(ERROR) << "Failed to connect to unix socket: " << conn_->StrError();
-    if (auto o = observer_.lock(); o) {
-      o->OnError("Failed to connect to unix socket");
-    }
-    return;
-  }
-  thread_notifier_ = SharedFD::Event();
-  if (!thread_notifier_->IsOpen()) {
-    LOG(ERROR) << "Failed to create eventfd for background thread: "
-               << thread_notifier_->StrError();
-    if (auto o = observer_.lock(); o) {
-      o->OnError("Failed to create eventfd for background thread");
-    }
-    return;
-  }
-  if (auto o = observer_.lock(); o) {
-    o->OnOpen();
-  }
-  // Start the thread
-  running_ = true;
-  thread_ = std::thread([this](){ReadLoop();});
-}
-
-void UnixServerConnection::StopThread() {
-  running_ = false;
-  if (!thread_notifier_->IsOpen()) {
-    // The thread won't be running if this isn't open
-    return;
-  }
-  if (thread_notifier_->EventfdWrite(1) < 0) {
-    LOG(ERROR) << "Failed to notify background thread, this thread may block";
-  }
-  if (thread_.joinable()) {
-    thread_.join();
-  }
-}
-
-void UnixServerConnection::ReadLoop() {
-  if (!thread_notifier_->IsOpen()) {
-    LOG(ERROR) << "The UnixServerConnection's background thread is unable to "
-                  "receive notifications so it can't run";
-    return;
-  }
-  std::vector<uint8_t> buffer(4096, 0);
-  while (running_) {
-    SharedFDSet rset;
-    rset.Set(thread_notifier_);
-    rset.Set(conn_);
-    auto res = Select(&rset, nullptr, nullptr, nullptr);
-    if (res < 0) {
-      LOG(ERROR) << "Failed to select from background thread";
-      break;
-    }
-    if (rset.IsSet(thread_notifier_)) {
-      eventfd_t val;
-      auto res = thread_notifier_->EventfdRead(&val);
-      if (res < 0) {
-        LOG(ERROR) << "Error reading from event fd: "
-                   << thread_notifier_->StrError();
-        break;
-      }
-    }
-    if (rset.IsSet(conn_)) {
-      auto size = conn_->Recv(buffer.data(), 0, MSG_TRUNC | MSG_PEEK);
-      if (size > buffer.size()) {
-        // Enlarge enough to accommodate size bytes and be a multiple of 4096
-        auto new_size = (size + 4095) & ~4095;
-        buffer.resize(new_size);
-      }
-      auto res = conn_->Recv(buffer.data(), buffer.size(), MSG_TRUNC);
-      if (res < 0) {
-        LOG(ERROR) << "Failed to read from server: " << conn_->StrError();
-        if (auto observer = observer_.lock(); observer) {
-          observer->OnError(conn_->StrError());
-        }
-        return;
-      }
-      if (res == 0) {
-        auto observer = observer_.lock();
-        if (observer) {
-          observer->OnClose();
-        }
-        break;
-      }
-      auto observer = observer_.lock();
-      if (observer) {
-        observer->OnReceive(buffer.data(), res, false);
-      }
-    }
-  }
-}
-
-// WsConnection implementation
-
-int LwsCallback(struct lws* wsi, enum lws_callback_reasons reason, void* user,
-                void* in, size_t len);
-void CreateConnectionCallback(lws_sorted_usec_list_t* sul);
-
-namespace {
-
-constexpr char kProtocolName[] = "cf-webrtc-device";
-constexpr int kBufferSize = 65536;
-
-const uint32_t backoff_ms[] = {1000, 2000, 3000, 4000, 5000};
-
-const lws_retry_bo_t kRetry = {
-    .retry_ms_table = backoff_ms,
-    .retry_ms_table_count = LWS_ARRAY_SIZE(backoff_ms),
-    .conceal_count = LWS_ARRAY_SIZE(backoff_ms),
-
-    .secs_since_valid_ping = 3,    /* force PINGs after secs idle */
-    .secs_since_valid_hangup = 10, /* hangup after secs idle */
-
-    .jitter_percent = 20,
-};
-
-const struct lws_protocols kProtocols[2] = {
-    {kProtocolName, LwsCallback, 0, kBufferSize, 0, NULL, 0},
-    {NULL, NULL, 0, 0, 0, NULL, 0}};
-
-}  // namespace
-
-std::shared_ptr<WsConnectionContext> WsConnectionContext::Create() {
-  struct lws_context_creation_info context_info = {};
-  context_info.port = CONTEXT_PORT_NO_LISTEN;
-  context_info.options = LWS_SERVER_OPTION_DO_SSL_GLOBAL_INIT;
-  context_info.protocols = kProtocols;
-  struct lws_context* lws_ctx = lws_create_context(&context_info);
-  if (!lws_ctx) {
-    return nullptr;
-  }
-  return std::shared_ptr<WsConnectionContext>(new WsConnectionContext(lws_ctx));
-}
-
-WsConnectionContext::WsConnectionContext(struct lws_context* lws_ctx)
-    : lws_context_(lws_ctx) {
-  Start();
-}
-
-WsConnectionContext::~WsConnectionContext() {
-  lws_context_destroy(lws_context_);
-  if (message_loop_.joinable()) {
-    message_loop_.join();
-  }
-}
-
-void WsConnectionContext::Start() {
-  message_loop_ = std::thread([this]() {
-    for (;;) {
-      if (lws_service(lws_context_, 0) < 0) {
-        break;
-      }
-    }
-  });
-}
-
-// This wrapper is needed because the ServerConnection objects are meant to be
-// referenced by std::unique_ptr but WsConnection needs to be referenced by
-// std::shared_ptr because it's also (weakly) referenced by the websocket
-// thread.
-class WsConnectionWrapper : public ServerConnection {
- public:
-  WsConnectionWrapper(std::shared_ptr<WsConnection> conn) : conn_(conn) {}
-
-  bool Send(const Json::Value& msg) override { return conn_->Send(msg); }
-
- private:
-  void Connect() override { return conn_->Connect(); }
-  std::shared_ptr<WsConnection> conn_;
-};
-
-std::unique_ptr<ServerConnection> WsConnectionContext::CreateConnection(
-    int port, const std::string& addr, const std::string& path,
-    ServerConfig::Security security,
-    std::weak_ptr<ServerConnectionObserver> observer,
-    const std::vector<std::pair<std::string, std::string>>& headers) {
-  return std::unique_ptr<ServerConnection>(
-      new WsConnectionWrapper(std::make_shared<WsConnection>(
-          port, addr, path, security, headers, observer, shared_from_this())));
-}
-
-std::shared_ptr<WsConnection> WsConnectionContext::GetConnection(void* raw) {
-  std::shared_ptr<WsConnection> connection;
-  {
-    std::lock_guard<std::mutex> lock(map_mutex_);
-    if (weak_by_ptr_.count(raw) == 0) {
-      return nullptr;
-    }
-    connection = weak_by_ptr_[raw].lock();
-    if (!connection) {
-      weak_by_ptr_.erase(raw);
-    }
-  }
-  return connection;
-}
-
-void WsConnectionContext::RememberConnection(void* raw,
-                                             std::weak_ptr<WsConnection> conn) {
-  std::lock_guard<std::mutex> lock(map_mutex_);
-  weak_by_ptr_.emplace(
-      std::pair<void*, std::weak_ptr<WsConnection>>(raw, conn));
-}
-
-void WsConnectionContext::ForgetConnection(void* raw) {
-  std::lock_guard<std::mutex> lock(map_mutex_);
-  weak_by_ptr_.erase(raw);
-}
-
-WsConnection::WsConnection(
-    int port, const std::string& addr, const std::string& path,
-    ServerConfig::Security security,
-    const std::vector<std::pair<std::string, std::string>>& headers,
-    std::weak_ptr<ServerConnectionObserver> observer,
-    std::shared_ptr<WsConnectionContext> context)
-    : port_(port),
-      addr_(addr),
-      path_(path),
-      security_(security),
-      headers_(headers),
-      observer_(observer),
-      context_(context) {}
-
-WsConnection::~WsConnection() {
-  context_->ForgetConnection(this);
-  // This will cause the callback to be called which will drop the connection
-  // after seeing the context doesn't remember this object
-  lws_callback_on_writable(wsi_);
-}
-
-void WsConnection::Connect() {
-  memset(&extended_sul_.sul, 0, sizeof(extended_sul_.sul));
-  extended_sul_.weak_this = weak_from_this();
-  lws_sul_schedule(context_->lws_context(), 0, &extended_sul_.sul,
-                   CreateConnectionCallback, 1);
-}
-
-void WsConnection::AddHttpHeaders(unsigned char** p, unsigned char* end) const {
-  for (const auto& header_entry : headers_) {
-    const auto& name = header_entry.first;
-    const auto& value = header_entry.second;
-    auto res = lws_add_http_header_by_name(
-        wsi_, reinterpret_cast<const unsigned char*>(name.c_str()),
-        reinterpret_cast<const unsigned char*>(value.c_str()), value.size(), p,
-        end);
-    if (res != 0) {
-      LOG(ERROR) << "Unable to add header: " << name;
-    }
-  }
-  if (!headers_.empty()) {
-    // Let LWS know we added some headers.
-    lws_client_http_body_pending(wsi_, 1);
-  }
-}
-
-void WsConnection::OnError(const std::string& error) {
-  auto observer = observer_.lock();
-  if (observer) {
-    observer->OnError(error);
-  }
-}
-void WsConnection::OnReceive(const uint8_t* data, size_t len, bool is_binary) {
-  auto observer = observer_.lock();
-  if (observer) {
-    observer->OnReceive(data, len, is_binary);
-  }
-}
-void WsConnection::OnOpen() {
-  auto observer = observer_.lock();
-  if (observer) {
-    observer->OnOpen();
-  }
-}
-void WsConnection::OnClose() {
-  auto observer = observer_.lock();
-  if (observer) {
-    observer->OnClose();
-  }
-}
-
-void WsConnection::OnWriteable() {
-  WsBuffer buffer;
-  {
-    std::lock_guard<std::mutex> lock(write_queue_mutex_);
-    if (write_queue_.size() == 0) {
-      return;
-    }
-    buffer = std::move(write_queue_.front());
-    write_queue_.pop_front();
-  }
-  auto flags = lws_write_ws_flags(
-      buffer.is_binary() ? LWS_WRITE_BINARY : LWS_WRITE_TEXT, true, true);
-  auto res = lws_write(wsi_, buffer.data(), buffer.size(),
-                       (enum lws_write_protocol)flags);
-  if (res != buffer.size()) {
-    LOG(WARNING) << "Unable to send the entire message!";
-  }
-}
-
-bool WsConnection::Send(const Json::Value& msg) {
-  Json::StreamWriterBuilder factory;
-  auto str = Json::writeString(factory, msg);
-  return Send(reinterpret_cast<const uint8_t*>(str.c_str()), str.size());
-}
-
-bool WsConnection::Send(const uint8_t* data, size_t len, bool binary) {
-  if (!wsi_) {
-    LOG(WARNING) << "Send called on an uninitialized connection!!";
-    return false;
-  }
-  WsBuffer buffer(data, len, binary);
-  {
-    std::lock_guard<std::mutex> lock(write_queue_mutex_);
-    write_queue_.emplace_back(std::move(buffer));
-  }
-
-  lws_callback_on_writable(wsi_);
-  return true;
-}
-
-int LwsCallback(struct lws* wsi, enum lws_callback_reasons reason, void* user,
-                void* in, size_t len) {
-  constexpr int DROP = -1;
-  constexpr int OK = 0;
-
-  // For some values of `reason`, `user` doesn't point to the value provided
-  // when the connection was created. This function object should be used with
-  // care.
-  auto with_connection =
-      [wsi, user](std::function<void(std::shared_ptr<WsConnection>)> cb) {
-        auto context = reinterpret_cast<WsConnectionContext*>(user);
-        auto connection = context->GetConnection(wsi);
-        if (!connection) {
-          return DROP;
-        }
-        cb(connection);
-        return OK;
-      };
-
-  switch (reason) {
-    case LWS_CALLBACK_CLIENT_CONNECTION_ERROR:
-      return with_connection([in](std::shared_ptr<WsConnection> connection) {
-        connection->OnError(in ? (char*)in : "(null)");
-      });
-
-    case LWS_CALLBACK_CLIENT_RECEIVE:
-      return with_connection(
-          [in, len, wsi](std::shared_ptr<WsConnection> connection) {
-            connection->OnReceive((const uint8_t*)in, len,
-                                  lws_frame_is_binary(wsi));
-          });
-
-    case LWS_CALLBACK_CLIENT_ESTABLISHED:
-      return with_connection([](std::shared_ptr<WsConnection> connection) {
-        connection->OnOpen();
-      });
-
-    case LWS_CALLBACK_CLIENT_CLOSED:
-      return with_connection([](std::shared_ptr<WsConnection> connection) {
-        connection->OnClose();
-      });
-
-    case LWS_CALLBACK_CLIENT_WRITEABLE:
-      return with_connection([](std::shared_ptr<WsConnection> connection) {
-        connection->OnWriteable();
-      });
-
-    case LWS_CALLBACK_CLIENT_APPEND_HANDSHAKE_HEADER:
-      return with_connection(
-          [in, len](std::shared_ptr<WsConnection> connection) {
-            auto p = reinterpret_cast<unsigned char**>(in);
-            auto end = (*p) + len;
-            connection->AddHttpHeaders(p, end);
-          });
-
-    case LWS_CALLBACK_CLIENT_HTTP_WRITEABLE:
-      // This callback is only called when we add additional HTTP headers, let
-      // LWS know we're done modifying the HTTP request.
-      lws_client_http_body_pending(wsi, 0);
-      return 0;
-
-    default:
-      LOG(VERBOSE) << "Unhandled value: " << reason;
-      return lws_callback_http_dummy(wsi, reason, user, in, len);
-  }
-}
-
-void CreateConnectionCallback(lws_sorted_usec_list_t* sul) {
-  std::shared_ptr<WsConnection> connection =
-      reinterpret_cast<WsConnection::CreateConnectionSul*>(sul)
-          ->weak_this.lock();
-  if (!connection) {
-    LOG(WARNING) << "The object was already destroyed by the time of the first "
-                 << "connection attempt. That's unusual.";
-    return;
-  }
-  connection->ConnectInner();
-}
-
-void WsConnection::ConnectInner() {
-  struct lws_client_connect_info connect_info;
-
-  memset(&connect_info, 0, sizeof(connect_info));
-
-  connect_info.context = context_->lws_context();
-  connect_info.port = port_;
-  connect_info.address = addr_.c_str();
-  connect_info.path = path_.c_str();
-  connect_info.host = connect_info.address;
-  connect_info.origin = connect_info.address;
-  switch (security_) {
-    case ServerConfig::Security::kAllowSelfSigned:
-      connect_info.ssl_connection = LCCSCF_ALLOW_SELFSIGNED |
-                                    LCCSCF_SKIP_SERVER_CERT_HOSTNAME_CHECK |
-                                    LCCSCF_USE_SSL;
-      break;
-    case ServerConfig::Security::kStrict:
-      connect_info.ssl_connection = LCCSCF_USE_SSL;
-      break;
-    case ServerConfig::Security::kInsecure:
-      connect_info.ssl_connection = 0;
-      break;
-  }
-  connect_info.protocol = "webrtc-operator";
-  connect_info.local_protocol_name = kProtocolName;
-  connect_info.pwsi = &wsi_;
-  connect_info.retry_and_idle_policy = &kRetry;
-  // There is no guarantee the connection object still exists when the callback
-  // is called. Put the context instead as the user data which is guaranteed to
-  // still exist and holds a weak ptr to the connection.
-  connect_info.userdata = context_.get();
-
-  if (lws_client_connect_via_info(&connect_info)) {
-    // wsi_ is not initialized until after the call to
-    // lws_client_connect_via_info(). Luckily, this is guaranteed to run before
-    // the protocol callback is called because it runs in the same loop.
-    context_->RememberConnection(wsi_, weak_from_this());
-  } else {
-    LOG(ERROR) << "Connection failed!";
-  }
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/server_connection.h b/host/frontend/webrtc/lib/server_connection.h
deleted file mode 100644
index ab7111f..0000000
--- a/host/frontend/webrtc/lib/server_connection.h
+++ /dev/null
@@ -1,93 +0,0 @@
-//
-// Copyright (C) 2020 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-//
-
-#pragma once
-
-#include <string.h>
-
-#include <deque>
-#include <functional>
-#include <map>
-#include <memory>
-#include <mutex>
-#include <string>
-#include <thread>
-#include <vector>
-
-#include <json/json.h>
-#include <libwebsockets.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-struct ServerConfig {
-  enum class Security {
-    kInsecure,
-    kAllowSelfSigned,
-    kStrict,
-  };
-
-  // The ip address or domain name of the operator server.
-  std::string addr;
-  int port;
-  // The path component of the operator server's register url.
-  std::string path;
-  // The security level to use when connecting to the operator server.
-  Security security;
-  // A list of key value pairs to include as HTTP handshake headers when
-  // connecting to the operator.
-  std::vector<std::pair<std::string, std::string>> http_headers;
-};
-
-class ServerConnectionObserver {
- public:
-  virtual ~ServerConnectionObserver() = default;
-  // Called when the connection to the server has been established. This is the
-  // cue to start using Send().
-  virtual void OnOpen() = 0;
-  virtual void OnClose() = 0;
-  // Called when the connection to the server has failed with an unrecoverable
-  // error.
-  virtual void OnError(const std::string& error) = 0;
-  virtual void OnReceive(const uint8_t* msg, size_t length, bool is_binary) = 0;
-};
-
-// Represents a connection to the signaling server. When a connection is created
-// it connects with the server automatically but sends no info.
-// Only Send() can be called from multiple threads simultaneously. Reconnect(),
-// Send() and the destructor will run into race conditions if called
-// concurrently.
-class ServerConnection {
- public:
-  static std::unique_ptr<ServerConnection> Connect(
-      const ServerConfig& conf,
-      std::weak_ptr<ServerConnectionObserver> observer);
-
-  // Destroying the connection will disconnect from the signaling server and
-  // release any open fds.
-  virtual ~ServerConnection() = default;
-
-  // Sends data to the server encoded as JSON.
-  virtual bool Send(const Json::Value&) = 0;
-
-  // makes re-connect request
-  virtual void Reconnect();
-
- private:
-  virtual void Connect() = 0;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/streamer.cpp b/host/frontend/webrtc/lib/streamer.cpp
deleted file mode 100644
index 088b25b..0000000
--- a/host/frontend/webrtc/lib/streamer.cpp
+++ /dev/null
@@ -1,701 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/streamer.h"
-
-#include <android-base/logging.h>
-#include <json/json.h>
-
-#include <api/audio_codecs/audio_decoder_factory.h>
-#include <api/audio_codecs/audio_encoder_factory.h>
-#include <api/audio_codecs/builtin_audio_decoder_factory.h>
-#include <api/audio_codecs/builtin_audio_encoder_factory.h>
-#include <api/create_peerconnection_factory.h>
-#include <api/peer_connection_interface.h>
-#include <api/video_codecs/builtin_video_decoder_factory.h>
-#include <api/video_codecs/builtin_video_encoder_factory.h>
-#include <api/video_codecs/video_decoder_factory.h>
-#include <api/video_codecs/video_encoder_factory.h>
-#include <media/base/video_broadcaster.h>
-#include <pc/video_track_source.h>
-
-#include "host/frontend/webrtc/lib/audio_device.h"
-#include "host/frontend/webrtc/lib/audio_track_source_impl.h"
-#include "host/frontend/webrtc/lib/camera_streamer.h"
-#include "host/frontend/webrtc/lib/client_handler.h"
-#include "host/frontend/webrtc/lib/port_range_socket_factory.h"
-#include "host/frontend/webrtc/lib/video_track_source_impl.h"
-#include "host/frontend/webrtc/lib/vp8only_encoder_factory.h"
-#include "host/frontend/webrtc_operator/constants/signaling_constants.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-namespace {
-
-constexpr auto kStreamIdField = "stream_id";
-constexpr auto kXResField = "x_res";
-constexpr auto kYResField = "y_res";
-constexpr auto kDpiField = "dpi";
-constexpr auto kIsTouchField = "is_touch";
-constexpr auto kDisplaysField = "displays";
-constexpr auto kAudioStreamsField = "audio_streams";
-constexpr auto kHardwareField = "hardware";
-constexpr auto kControlPanelButtonCommand = "command";
-constexpr auto kControlPanelButtonTitle = "title";
-constexpr auto kControlPanelButtonIconName = "icon_name";
-constexpr auto kControlPanelButtonShellCommand = "shell_command";
-constexpr auto kControlPanelButtonDeviceStates = "device_states";
-constexpr auto kControlPanelButtonLidSwitchOpen = "lid_switch_open";
-constexpr auto kControlPanelButtonHingeAngleValue = "hinge_angle_value";
-constexpr auto kCustomControlPanelButtonsField = "custom_control_panel_buttons";
-
-constexpr int kRegistrationRetries = 3;
-constexpr int kRetryFirstIntervalMs = 1000;
-constexpr int kReconnectRetries = 100;
-constexpr int kReconnectIntervalMs = 1000;
-
-bool ParseMessage(const uint8_t* data, size_t length, Json::Value* msg_out) {
-  auto str = reinterpret_cast<const char*>(data);
-  Json::CharReaderBuilder builder;
-  std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
-  std::string errorMessage;
-  return json_reader->parse(str, str + length, msg_out, &errorMessage);
-}
-
-std::unique_ptr<rtc::Thread> CreateAndStartThread(const std::string& name) {
-  auto thread = rtc::Thread::CreateWithSocketServer();
-  if (!thread) {
-    LOG(ERROR) << "Failed to create " << name << " thread";
-    return nullptr;
-  }
-  thread->SetName(name, nullptr);
-  if (!thread->Start()) {
-    LOG(ERROR) << "Failed to start " << name << " thread";
-    return nullptr;
-  }
-  return thread;
-}
-
-struct DisplayDescriptor {
-  int width;
-  int height;
-  int dpi;
-  bool touch_enabled;
-  rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source;
-};
-
-struct ControlPanelButtonDescriptor {
-  std::string command;
-  std::string title;
-  std::string icon_name;
-  std::optional<std::string> shell_command;
-  std::vector<DeviceState> device_states;
-};
-
-// TODO (jemoreira): move to a place in common with the signaling server
-struct OperatorServerConfig {
-  std::vector<webrtc::PeerConnectionInterface::IceServer> servers;
-};
-
-// Wraps a scoped_refptr pointer to an audio device module
-class AudioDeviceModuleWrapper : public AudioSource {
- public:
-  AudioDeviceModuleWrapper(
-      rtc::scoped_refptr<CfAudioDeviceModule> device_module)
-      : device_module_(device_module) {}
-  int GetMoreAudioData(void* data, int bytes_per_sample,
-                       int samples_per_channel, int num_channels,
-                       int sample_rate, bool& muted) override {
-    return device_module_->GetMoreAudioData(data, bytes_per_sample,
-                                            samples_per_channel, num_channels,
-                                            sample_rate, muted);
-  }
-
-  rtc::scoped_refptr<CfAudioDeviceModule> device_module() {
-    return device_module_;
-  }
-
- private:
-  rtc::scoped_refptr<CfAudioDeviceModule> device_module_;
-};
-
-}  // namespace
-
-
-class Streamer::Impl : public ServerConnectionObserver,
-                       public std::enable_shared_from_this<ServerConnectionObserver> {
- public:
-  std::shared_ptr<ClientHandler> CreateClientHandler(int client_id);
-
-  void Register(std::weak_ptr<OperatorObserver> observer);
-
-  void SendMessageToClient(int client_id, const Json::Value& msg);
-  void DestroyClientHandler(int client_id);
-  void SetupCameraForClient(int client_id);
-
-  // WsObserver
-  void OnOpen() override;
-  void OnClose() override;
-  void OnError(const std::string& error) override;
-  void OnReceive(const uint8_t* msg, size_t length, bool is_binary) override;
-
-  void HandleConfigMessage(const Json::Value& msg);
-  void HandleClientMessage(const Json::Value& server_message);
-
-  // All accesses to these variables happen from the signal_thread, so there is
-  // no need for extra synchronization mechanisms (mutex)
-  StreamerConfig config_;
-  OperatorServerConfig operator_config_;
-  std::unique_ptr<ServerConnection> server_connection_;
-  std::shared_ptr<ConnectionObserverFactory> connection_observer_factory_;
-  rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
-      peer_connection_factory_;
-  std::unique_ptr<rtc::Thread> network_thread_;
-  std::unique_ptr<rtc::Thread> worker_thread_;
-  std::unique_ptr<rtc::Thread> signal_thread_;
-  std::map<std::string, DisplayDescriptor> displays_;
-  std::map<std::string, rtc::scoped_refptr<AudioTrackSourceImpl>>
-      audio_sources_;
-  std::map<int, std::shared_ptr<ClientHandler>> clients_;
-  std::weak_ptr<OperatorObserver> operator_observer_;
-  std::map<std::string, std::string> hardware_;
-  std::vector<ControlPanelButtonDescriptor> custom_control_panel_buttons_;
-  std::shared_ptr<AudioDeviceModuleWrapper> audio_device_module_;
-  std::unique_ptr<CameraStreamer> camera_streamer_;
-  int registration_retries_left_ = kRegistrationRetries;
-  int retry_interval_ms_ = kRetryFirstIntervalMs;
-};
-
-Streamer::Streamer(std::unique_ptr<Streamer::Impl> impl)
-    : impl_(std::move(impl)) {}
-
-/* static */
-std::unique_ptr<Streamer> Streamer::Create(
-    const StreamerConfig& cfg,
-    std::shared_ptr<ConnectionObserverFactory> connection_observer_factory) {
-
-  rtc::LogMessage::LogToDebug(rtc::LS_ERROR);
-
-  std::unique_ptr<Streamer::Impl> impl(new Streamer::Impl());
-  impl->config_ = cfg;
-  impl->connection_observer_factory_ = connection_observer_factory;
-
-  impl->network_thread_ = CreateAndStartThread("network-thread");
-  impl->worker_thread_ = CreateAndStartThread("work-thread");
-  impl->signal_thread_ = CreateAndStartThread("signal-thread");
-  if (!impl->network_thread_ || !impl->worker_thread_ ||
-      !impl->signal_thread_) {
-    return nullptr;
-  }
-
-  impl->audio_device_module_ = std::make_shared<AudioDeviceModuleWrapper>(
-      rtc::scoped_refptr<CfAudioDeviceModule>(
-          new rtc::RefCountedObject<CfAudioDeviceModule>()));
-
-  impl->peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
-      impl->network_thread_.get(), impl->worker_thread_.get(),
-      impl->signal_thread_.get(), impl->audio_device_module_->device_module(),
-      webrtc::CreateBuiltinAudioEncoderFactory(),
-      webrtc::CreateBuiltinAudioDecoderFactory(),
-      std::make_unique<VP8OnlyEncoderFactory>(
-          webrtc::CreateBuiltinVideoEncoderFactory()),
-      webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */,
-      nullptr /* audio_processing */);
-
-  if (!impl->peer_connection_factory_) {
-    LOG(ERROR) << "Failed to create peer connection factory";
-    return nullptr;
-  }
-
-  webrtc::PeerConnectionFactoryInterface::Options options;
-  // By default the loopback network is ignored, but generating candidates for
-  // it is useful when using TCP port forwarding.
-  options.network_ignore_mask = 0;
-  impl->peer_connection_factory_->SetOptions(options);
-
-  return std::unique_ptr<Streamer>(new Streamer(std::move(impl)));
-}
-
-std::shared_ptr<VideoSink> Streamer::AddDisplay(const std::string& label,
-                                                int width, int height, int dpi,
-                                                bool touch_enabled) {
-  // Usually called from an application thread
-  return impl_->signal_thread_->Invoke<std::shared_ptr<VideoSink>>(
-      RTC_FROM_HERE,
-      [this, &label, width, height, dpi,
-       touch_enabled]() -> std::shared_ptr<VideoSink> {
-        if (impl_->displays_.count(label)) {
-          LOG(ERROR) << "Display with same label already exists: " << label;
-          return nullptr;
-        }
-        rtc::scoped_refptr<VideoTrackSourceImpl> source(
-            new rtc::RefCountedObject<VideoTrackSourceImpl>(width, height));
-        impl_->displays_[label] = {width, height, dpi, touch_enabled, source};
-        return std::shared_ptr<VideoSink>(
-            new VideoTrackSourceImplSinkWrapper(source));
-      });
-}
-
-std::shared_ptr<AudioSink> Streamer::AddAudioStream(const std::string& label) {
-  // Usually called from an application thread
-  return impl_->signal_thread_->Invoke<std::shared_ptr<AudioSink>>(
-      RTC_FROM_HERE, [this, &label]() -> std::shared_ptr<AudioSink> {
-        if (impl_->audio_sources_.count(label)) {
-          LOG(ERROR) << "Audio stream with same label already exists: "
-                     << label;
-          return nullptr;
-        }
-        rtc::scoped_refptr<AudioTrackSourceImpl> source(
-            new rtc::RefCountedObject<AudioTrackSourceImpl>());
-        impl_->audio_sources_[label] = source;
-        return std::shared_ptr<AudioSink>(
-            new AudioTrackSourceImplSinkWrapper(source));
-      });
-}
-
-std::shared_ptr<AudioSource> Streamer::GetAudioSource() {
-  return impl_->audio_device_module_;
-}
-
-CameraController* Streamer::AddCamera(unsigned int port, unsigned int cid) {
-  impl_->camera_streamer_ = std::make_unique<CameraStreamer>(port, cid);
-  return impl_->camera_streamer_.get();
-}
-
-void Streamer::SetHardwareSpec(std::string key, std::string value) {
-  impl_->hardware_.emplace(key, value);
-}
-
-void Streamer::AddCustomControlPanelButton(const std::string& command,
-                                           const std::string& title,
-                                           const std::string& icon_name) {
-  ControlPanelButtonDescriptor button = {
-      .command = command, .title = title, .icon_name = icon_name};
-  impl_->custom_control_panel_buttons_.push_back(button);
-}
-
-void Streamer::AddCustomControlPanelButtonWithShellCommand(
-    const std::string& command, const std::string& title,
-    const std::string& icon_name, const std::string& shell_command) {
-  ControlPanelButtonDescriptor button = {
-      .command = command, .title = title, .icon_name = icon_name};
-  button.shell_command = shell_command;
-  impl_->custom_control_panel_buttons_.push_back(button);
-}
-
-void Streamer::AddCustomControlPanelButtonWithDeviceStates(
-    const std::string& command, const std::string& title,
-    const std::string& icon_name,
-    const std::vector<DeviceState>& device_states) {
-  ControlPanelButtonDescriptor button = {
-      .command = command, .title = title, .icon_name = icon_name};
-  button.device_states = device_states;
-  impl_->custom_control_panel_buttons_.push_back(button);
-}
-
-void Streamer::Register(std::weak_ptr<OperatorObserver> observer) {
-  // Usually called from an application thread
-  // No need to block the calling thread on this, the observer will be notified
-  // when the connection is established.
-  impl_->signal_thread_->PostTask(RTC_FROM_HERE, [this, observer]() {
-    impl_->Register(observer);
-  });
-}
-
-void Streamer::Unregister() {
-  // Usually called from an application thread.
-  impl_->signal_thread_->PostTask(
-      RTC_FROM_HERE, [this]() { impl_->server_connection_.reset(); });
-}
-
-void Streamer::RecordDisplays(LocalRecorder& recorder) {
-  for (auto& [key, display] : impl_->displays_) {
-    rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
-        display.source;
-    auto deleter = [](webrtc::VideoTrackSourceInterface* source) {
-      source->Release();
-    };
-    std::shared_ptr<webrtc::VideoTrackSourceInterface> source_shared(
-        source.release(), deleter);
-    recorder.AddDisplay(display.width, display.height, source_shared);
-  }
-}
-
-void Streamer::Impl::Register(std::weak_ptr<OperatorObserver> observer) {
-  operator_observer_ = observer;
-  // When the connection is established the OnOpen function will be called where
-  // the registration will take place
-  if (!server_connection_) {
-    server_connection_ =
-        ServerConnection::Connect(config_.operator_server, weak_from_this());
-  } else {
-    // in case connection attempt is retried, just call Reconnect().
-    // Recreating server_connection_ object will destroy existing WSConnection
-    // object and task re-scheduling will fail
-    server_connection_->Reconnect();
-  }
-}
-
-void Streamer::Impl::OnOpen() {
-  // Called from the websocket thread.
-  // Connected to operator.
-  signal_thread_->PostTask(RTC_FROM_HERE, [this]() {
-    Json::Value register_obj;
-    register_obj[cuttlefish::webrtc_signaling::kTypeField] =
-        cuttlefish::webrtc_signaling::kRegisterType;
-    register_obj[cuttlefish::webrtc_signaling::kDeviceIdField] =
-        config_.device_id;
-    CHECK(config_.client_files_port >= 0) << "Invalide device port provided";
-    register_obj[cuttlefish::webrtc_signaling::kDevicePortField] =
-        config_.client_files_port;
-
-    Json::Value device_info;
-    Json::Value displays(Json::ValueType::arrayValue);
-    // No need to synchronize with other accesses to display_ because all
-    // happens on signal_thread.
-    for (auto& entry : displays_) {
-      Json::Value display;
-      display[kStreamIdField] = entry.first;
-      display[kXResField] = entry.second.width;
-      display[kYResField] = entry.second.height;
-      display[kDpiField] = entry.second.dpi;
-      display[kIsTouchField] = true;
-      displays.append(display);
-    }
-    device_info[kDisplaysField] = displays;
-    Json::Value audio_streams(Json::ValueType::arrayValue);
-    for (auto& entry : audio_sources_) {
-      Json::Value audio;
-      audio[kStreamIdField] = entry.first;
-      audio_streams.append(audio);
-    }
-    device_info[kAudioStreamsField] = audio_streams;
-    Json::Value hardware;
-    for (const auto& [k, v] : hardware_) {
-      hardware[k] = v;
-    }
-    device_info[kHardwareField] = hardware;
-    Json::Value custom_control_panel_buttons(Json::arrayValue);
-    for (const auto& button : custom_control_panel_buttons_) {
-      Json::Value button_entry;
-      button_entry[kControlPanelButtonCommand] = button.command;
-      button_entry[kControlPanelButtonTitle] = button.title;
-      button_entry[kControlPanelButtonIconName] = button.icon_name;
-      if (button.shell_command) {
-        button_entry[kControlPanelButtonShellCommand] = *(button.shell_command);
-      } else if (!button.device_states.empty()) {
-        Json::Value device_states(Json::arrayValue);
-        for (const DeviceState& device_state : button.device_states) {
-          Json::Value device_state_entry;
-          if (device_state.lid_switch_open) {
-            device_state_entry[kControlPanelButtonLidSwitchOpen] =
-                *device_state.lid_switch_open;
-          }
-          if (device_state.hinge_angle_value) {
-            device_state_entry[kControlPanelButtonHingeAngleValue] =
-                *device_state.hinge_angle_value;
-          }
-          device_states.append(device_state_entry);
-        }
-        button_entry[kControlPanelButtonDeviceStates] = device_states;
-      }
-      custom_control_panel_buttons.append(button_entry);
-    }
-    device_info[kCustomControlPanelButtonsField] = custom_control_panel_buttons;
-    register_obj[cuttlefish::webrtc_signaling::kDeviceInfoField] = device_info;
-    server_connection_->Send(register_obj);
-    // Do this last as OnRegistered() is user code and may take some time to
-    // complete (although it shouldn't...)
-    auto observer = operator_observer_.lock();
-    if (observer) {
-      observer->OnRegistered();
-    }
-  });
-}
-
-void Streamer::Impl::OnClose() {
-  // Called from websocket thread
-  // The operator shouldn't close the connection with the client, it's up to the
-  // device to decide when to disconnect.
-  LOG(WARNING) << "Connection with server closed unexpectedly";
-  signal_thread_->PostTask(RTC_FROM_HERE, [this]() {
-    auto observer = operator_observer_.lock();
-    if (observer) {
-      observer->OnClose();
-    }
-  });
-  LOG(INFO) << "Trying to re-connect to operator..";
-  registration_retries_left_ = kReconnectRetries;
-  retry_interval_ms_ = kReconnectIntervalMs;
-  signal_thread_->PostDelayedTask(
-      RTC_FROM_HERE, [this]() { Register(operator_observer_); },
-      retry_interval_ms_);
-}
-
-void Streamer::Impl::OnError(const std::string& error) {
-  // Called from websocket thread.
-  if (registration_retries_left_) {
-    LOG(WARNING) << "Connection to operator failed (" << error << "), "
-                 << registration_retries_left_ << " retries left"
-                 << " (will retry in " << retry_interval_ms_ / 1000 << "s)";
-    --registration_retries_left_;
-    signal_thread_->PostDelayedTask(
-        RTC_FROM_HERE,
-        [this]() {
-          // Need to reconnect and register again with operator
-          Register(operator_observer_);
-        },
-        retry_interval_ms_);
-    retry_interval_ms_ *= 2;
-  } else {
-    LOG(ERROR) << "Error on connection with the operator: " << error;
-    signal_thread_->PostTask(RTC_FROM_HERE, [this]() {
-      auto observer = operator_observer_.lock();
-      if (observer) {
-        observer->OnError();
-      }
-    });
-  }
-}
-
-void Streamer::Impl::HandleConfigMessage(const Json::Value& server_message) {
-  CHECK(signal_thread_->IsCurrent())
-      << __FUNCTION__ << " called from the wrong thread";
-  if (server_message.isMember("ice_servers") &&
-      server_message["ice_servers"].isArray()) {
-    auto servers = server_message["ice_servers"];
-    operator_config_.servers.clear();
-    for (int server_idx = 0; server_idx < servers.size(); server_idx++) {
-      auto server = servers[server_idx];
-      webrtc::PeerConnectionInterface::IceServer ice_server;
-      if (!server.isMember("urls") || !server["urls"].isArray()) {
-        // The urls field is required
-        LOG(WARNING)
-            << "Invalid ICE server specification obtained from server: "
-            << server.toStyledString();
-        continue;
-      }
-      auto urls = server["urls"];
-      for (int url_idx = 0; url_idx < urls.size(); url_idx++) {
-        auto url = urls[url_idx];
-        if (!url.isString()) {
-          LOG(WARNING) << "Non string 'urls' field in ice server: "
-                       << url.toStyledString();
-          continue;
-        }
-        ice_server.urls.push_back(url.asString());
-        if (server.isMember("credential") && server["credential"].isString()) {
-          ice_server.password = server["credential"].asString();
-        }
-        if (server.isMember("username") && server["username"].isString()) {
-          ice_server.username = server["username"].asString();
-        }
-        operator_config_.servers.push_back(ice_server);
-      }
-    }
-  }
-}
-
-void Streamer::Impl::HandleClientMessage(const Json::Value& server_message) {
-  CHECK(signal_thread_->IsCurrent())
-      << __FUNCTION__ << " called from the wrong thread";
-  if (!server_message.isMember(cuttlefish::webrtc_signaling::kClientIdField) ||
-      !server_message[cuttlefish::webrtc_signaling::kClientIdField].isInt()) {
-    LOG(ERROR) << "Client message received without valid client id";
-    return;
-  }
-  auto client_id =
-      server_message[cuttlefish::webrtc_signaling::kClientIdField].asInt();
-  if (!server_message.isMember(cuttlefish::webrtc_signaling::kPayloadField)) {
-    LOG(WARNING) << "Received empty client message";
-    return;
-  }
-  auto client_message =
-      server_message[cuttlefish::webrtc_signaling::kPayloadField];
-  if (clients_.count(client_id) == 0) {
-    auto client_handler = CreateClientHandler(client_id);
-    if (!client_handler) {
-      LOG(ERROR) << "Failed to create a new client handler";
-      return;
-    }
-    clients_.emplace(client_id, client_handler);
-  }
-  auto client_handler = clients_[client_id];
-
-  client_handler->HandleMessage(client_message);
-}
-
-void Streamer::Impl::OnReceive(const uint8_t* msg, size_t length,
-                               bool is_binary) {
-  // Usually called from websocket thread.
-  Json::Value server_message;
-  // Once OnReceive returns the buffer can be destroyed/recycled at any time, so
-  // parse the data into a JSON object while still on the websocket thread.
-  if (is_binary || !ParseMessage(msg, length, &server_message)) {
-    LOG(ERROR) << "Received invalid JSON from server: '"
-               << (is_binary ? std::string("(binary_data)")
-                             : std::string(msg, msg + length))
-               << "'";
-    return;
-  }
-  // Transition to the signal thread before member variables are accessed.
-  signal_thread_->PostTask(RTC_FROM_HERE, [this, server_message]() {
-    if (!server_message.isMember(cuttlefish::webrtc_signaling::kTypeField) ||
-        !server_message[cuttlefish::webrtc_signaling::kTypeField].isString()) {
-      LOG(ERROR) << "No message_type field from server";
-      // Notify the caller
-      OnError(
-          "Invalid message received from operator: no message type field "
-          "present");
-      return;
-    }
-    auto type =
-        server_message[cuttlefish::webrtc_signaling::kTypeField].asString();
-    if (type == cuttlefish::webrtc_signaling::kConfigType) {
-      HandleConfigMessage(server_message);
-    } else if (type == cuttlefish::webrtc_signaling::kClientDisconnectType) {
-      if (!server_message.isMember(
-              cuttlefish::webrtc_signaling::kClientIdField) ||
-          !server_message.isMember(
-              cuttlefish::webrtc_signaling::kClientIdField)) {
-        LOG(ERROR) << "Invalid disconnect message received from server";
-        // Notify the caller
-        OnError("Invalid disconnect message: client_id is required");
-        return;
-      }
-      auto client_id =
-          server_message[cuttlefish::webrtc_signaling::kClientIdField].asInt();
-      LOG(INFO) << "Client " << client_id << " has disconnected.";
-      DestroyClientHandler(client_id);
-    } else if (type == cuttlefish::webrtc_signaling::kClientMessageType) {
-      HandleClientMessage(server_message);
-    } else {
-      LOG(ERROR) << "Unknown message type: " << type;
-      // Notify the caller
-      OnError("Invalid message received from operator: unknown message type");
-      return;
-    }
-  });
-}
-
-std::shared_ptr<ClientHandler> Streamer::Impl::CreateClientHandler(
-    int client_id) {
-  CHECK(signal_thread_->IsCurrent())
-      << __FUNCTION__ << " called from the wrong thread";
-  auto observer = connection_observer_factory_->CreateObserver();
-
-  auto client_handler = ClientHandler::Create(
-      client_id, observer,
-      [this, client_id](const Json::Value& msg) {
-        SendMessageToClient(client_id, msg);
-      },
-      [this, client_id](bool isOpen) {
-        if (isOpen) {
-          SetupCameraForClient(client_id);
-        } else {
-          DestroyClientHandler(client_id);
-        }
-      });
-
-  webrtc::PeerConnectionInterface::RTCConfiguration config;
-  config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
-  config.enable_dtls_srtp = true;
-  config.servers.insert(config.servers.end(), operator_config_.servers.begin(),
-                        operator_config_.servers.end());
-  webrtc::PeerConnectionDependencies dependencies(client_handler.get());
-  // PortRangeSocketFactory's super class' constructor needs to be called on the
-  // network thread or have it as a parameter
-  dependencies.packet_socket_factory.reset(new PortRangeSocketFactory(
-      network_thread_.get(), config_.udp_port_range, config_.tcp_port_range));
-  auto peer_connection = peer_connection_factory_->CreatePeerConnection(
-      config, std::move(dependencies));
-
-  if (!peer_connection) {
-    LOG(ERROR) << "Failed to create peer connection";
-    return nullptr;
-  }
-
-  if (!client_handler->SetPeerConnection(std::move(peer_connection))) {
-    return nullptr;
-  }
-
-  for (auto& entry : displays_) {
-    auto& label = entry.first;
-    auto& video_source = entry.second.source;
-
-    auto video_track =
-        peer_connection_factory_->CreateVideoTrack(label, video_source.get());
-    client_handler->AddDisplay(video_track, label);
-  }
-
-  for (auto& entry : audio_sources_) {
-    auto& label = entry.first;
-    auto& audio_stream = entry.second;
-    auto audio_track =
-        peer_connection_factory_->CreateAudioTrack(label, audio_stream.get());
-    client_handler->AddAudio(audio_track, label);
-  }
-
-  return client_handler;
-}
-
-void Streamer::Impl::SendMessageToClient(int client_id,
-                                         const Json::Value& msg) {
-  LOG(VERBOSE) << "Sending to client: " << msg.toStyledString();
-  CHECK(signal_thread_->IsCurrent())
-      << __FUNCTION__ << " called from the wrong thread";
-  Json::Value wrapper;
-  wrapper[cuttlefish::webrtc_signaling::kPayloadField] = msg;
-  wrapper[cuttlefish::webrtc_signaling::kTypeField] =
-      cuttlefish::webrtc_signaling::kForwardType;
-  wrapper[cuttlefish::webrtc_signaling::kClientIdField] = client_id;
-  // This is safe to call from the webrtc threads because
-  // ServerConnection(s) are thread safe
-  server_connection_->Send(wrapper);
-}
-
-void Streamer::Impl::DestroyClientHandler(int client_id) {
-  // Usually called from signal thread, could be called from websocket thread or
-  // an application thread.
-  signal_thread_->PostTask(RTC_FROM_HERE, [this, client_id]() {
-    // This needs to be 'posted' to the thread instead of 'invoked'
-    // immediately for two reasons:
-    // * The client handler is destroyed by this code, it's generally a
-    // bad idea (though not necessarily wrong) to return to a member
-    // function of a destroyed object.
-    // * The client handler may call this from within a peer connection
-    // observer callback, destroying the client handler there leads to a
-    // deadlock.
-    clients_.erase(client_id);
-  });
-}
-
-void Streamer::Impl::SetupCameraForClient(int client_id) {
-  if (!camera_streamer_) {
-    return;
-  }
-  auto client_handler = clients_[client_id];
-  if (client_handler) {
-    auto camera_track = client_handler->GetCameraStream();
-    if (camera_track) {
-      camera_track->AddOrUpdateSink(camera_streamer_.get(),
-                                    rtc::VideoSinkWants());
-    }
-  }
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/streamer.h b/host/frontend/webrtc/lib/streamer.h
deleted file mode 100644
index bc2297e..0000000
--- a/host/frontend/webrtc/lib/streamer.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <functional>
-#include <memory>
-#include <mutex>
-#include <optional>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "host/libs/config/custom_actions.h"
-
-#include "host/frontend/webrtc/lib/audio_sink.h"
-#include "host/frontend/webrtc/lib/audio_source.h"
-#include "host/frontend/webrtc/lib/camera_controller.h"
-#include "host/frontend/webrtc/lib/connection_observer.h"
-#include "host/frontend/webrtc/lib/local_recorder.h"
-#include "host/frontend/webrtc/lib/video_sink.h"
-#include "host/frontend/webrtc/lib/server_connection.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class ClientHandler;
-
-struct StreamerConfig {
-  // The id with which to register with the operator server.
-  std::string device_id;
-  // The port on which the client files are being served
-  int client_files_port;
-  ServerConfig operator_server;
-  // The port ranges webrtc is allowed to use.
-  // [0,0] means all ports
-  std::pair<uint16_t, uint16_t> udp_port_range = {15550, 15558};
-  std::pair<uint16_t, uint16_t> tcp_port_range = {15550, 15558};
-};
-
-class OperatorObserver {
- public:
-  virtual ~OperatorObserver() = default;
-  // Called when the websocket connection with the operator is established.
-  virtual void OnRegistered() = 0;
-  // Called when the websocket connection with the operator is closed.
-  virtual void OnClose() = 0;
-  // Called when an error is encountered in the connection to the operator.
-  virtual void OnError() = 0;
-};
-
-class Streamer {
- public:
-  // The observer_factory will be used to create an observer for every new
-  // client connection. Unregister() needs to be called to stop accepting
-  // connections.
-  static std::unique_ptr<Streamer> Create(
-      const StreamerConfig& cfg,
-      std::shared_ptr<ConnectionObserverFactory> factory);
-  ~Streamer() = default;
-
-  std::shared_ptr<VideoSink> AddDisplay(const std::string& label, int width,
-                                        int height, int dpi,
-                                        bool touch_enabled);
-
-  void SetHardwareSpec(std::string key, std::string value);
-
-  template <typename V>
-  void SetHardwareSpec(std::string key, V value) {
-    SetHardwareSpec(key, std::to_string(value));
-  }
-
-  std::shared_ptr<AudioSink> AddAudioStream(const std::string& label);
-  // Grants access to streams originating on the client side. If there are
-  // multiple streams (either because one client sends more than one or there
-  // are several clients) the audio will be mixed and provided as a single
-  // stream here.
-  std::shared_ptr<AudioSource> GetAudioSource();
-
-  CameraController* AddCamera(unsigned int port, unsigned int cid);
-
-  // Add a custom button to the control panel.
-  void AddCustomControlPanelButton(const std::string& command,
-                                   const std::string& title,
-                                   const std::string& icon_name);
-  void AddCustomControlPanelButtonWithShellCommand(
-      const std::string& command, const std::string& title,
-      const std::string& icon_name, const std::string& shell_command);
-  void AddCustomControlPanelButtonWithDeviceStates(
-      const std::string& command, const std::string& title,
-      const std::string& icon_name,
-      const std::vector<DeviceState>& device_states);
-
-  // Register with the operator.
-  void Register(std::weak_ptr<OperatorObserver> operator_observer);
-  void Unregister();
-
-  void RecordDisplays(LocalRecorder& recorder);
- private:
-  /*
-   * Private Implementation idiom.
-   * https://en.cppreference.com/w/cpp/language/pimpl
-   */
-  class Impl;
-
-  Streamer(std::unique_ptr<Impl> impl);
-  std::shared_ptr<Impl> impl_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/utils.cpp b/host/frontend/webrtc/lib/utils.cpp
deleted file mode 100644
index e84b897..0000000
--- a/host/frontend/webrtc/lib/utils.cpp
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/utils.h"
-
-#include <map>
-
-#include <json/json.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-namespace {
-
-std::string ValidateField(const Json::Value &obj, const std::string &type,
-                          const std::string &field_name,
-                          const Json::ValueType &field_type, bool required) {
-  if (!obj.isObject()) {
-    return "Expected object with name-value pairs";
-  }
-  if (!obj.isMember(field_name) && !required) {
-    return "";
-  }
-  if (!(obj.isMember(field_name) &&
-        obj[field_name].isConvertibleTo(field_type))) {
-    std::string error_msg = "Expected a field named '";
-    error_msg += field_name + "' of type '";
-    error_msg += std::to_string(field_type);
-    error_msg += "'";
-    if (!type.empty()) {
-      error_msg += " in message of type '" + type + "'";
-    }
-    error_msg += ".";
-    return error_msg;
-  }
-  return "";
-}
-
-}  // namespace
-
-ValidationResult ValidationResult::ValidateJsonObject(
-    const Json::Value &obj, const std::string &type,
-    const std::map<std::string, Json::ValueType> &required_fields,
-    const std::map<std::string, Json::ValueType> &optional_fields) {
-  for (const auto &field_spec : required_fields) {
-    auto result =
-        ValidateField(obj, type, field_spec.first, field_spec.second, true);
-    if (!result.empty()) {
-      return {result};
-    }
-  }
-  for (const auto &field_spec : optional_fields) {
-    auto result =
-        ValidateField(obj, type, field_spec.first, field_spec.second, false);
-    if (!result.empty()) {
-      return {result};
-    }
-  }
-  return {};
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/utils.h b/host/frontend/webrtc/lib/utils.h
deleted file mode 100644
index 169221c..0000000
--- a/host/frontend/webrtc/lib/utils.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <map>
-#include <optional>
-
-#include <json/json.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class ValidationResult {
- public:
-  ValidationResult() = default;
-  ValidationResult(const std::string &error) : error_(error) {}
-
-  // Helper method to ensure a json object has the required fields convertible
-  // to the appropriate types.
-  static ValidationResult ValidateJsonObject(
-      const Json::Value &obj, const std::string &type,
-      const std::map<std::string, Json::ValueType> &required_fields,
-      const std::map<std::string, Json::ValueType> &optional_fields = {});
-
-  bool ok() const { return !error_.has_value(); }
-  std::string error() const { return error_.value_or(""); }
-
- private:
-  std::optional<std::string> error_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/video_sink.h b/host/frontend/webrtc/lib/video_sink.h
deleted file mode 100644
index 118a7c2..0000000
--- a/host/frontend/webrtc/lib/video_sink.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <memory>
-
-#include "host/frontend/webrtc/lib/video_frame_buffer.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class VideoSink {
- public:
-  virtual ~VideoSink() = default;
-  virtual void OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
-                       int64_t timestamp_us) = 0;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/video_track_source_impl.cpp b/host/frontend/webrtc/lib/video_track_source_impl.cpp
deleted file mode 100644
index 6785e15..0000000
--- a/host/frontend/webrtc/lib/video_track_source_impl.cpp
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/video_track_source_impl.h"
-
-#include <api/video/video_frame_buffer.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-namespace {
-
-class VideoFrameWrapper : public webrtc::I420BufferInterface {
- public:
-  VideoFrameWrapper(
-      std::shared_ptr<::cuttlefish::webrtc_streaming::VideoFrameBuffer>
-          frame_buffer)
-      : frame_buffer_(frame_buffer) {}
-  ~VideoFrameWrapper() override = default;
-  // From VideoFrameBuffer
-  int width() const override { return frame_buffer_->width(); }
-  int height() const override { return frame_buffer_->height(); }
-
-  // From class PlanarYuvBuffer
-  int StrideY() const override { return frame_buffer_->StrideY(); }
-  int StrideU() const override { return frame_buffer_->StrideU(); }
-  int StrideV() const override { return frame_buffer_->StrideV(); }
-
-  // From class PlanarYuv8Buffer
-  const uint8_t *DataY() const override { return frame_buffer_->DataY(); }
-  const uint8_t *DataU() const override { return frame_buffer_->DataU(); }
-  const uint8_t *DataV() const override { return frame_buffer_->DataV(); }
-
- private:
-  std::shared_ptr<::cuttlefish::webrtc_streaming::VideoFrameBuffer>
-      frame_buffer_;
-};
-
-}  // namespace
-
-VideoTrackSourceImpl::VideoTrackSourceImpl(int width, int height)
-    : webrtc::VideoTrackSource(false), width_(width), height_(height) {}
-
-void VideoTrackSourceImpl::OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
-                                   int64_t timestamp_us) {
-  auto video_frame =
-      webrtc::VideoFrame::Builder()
-          .set_video_frame_buffer(
-              new rtc::RefCountedObject<VideoFrameWrapper>(frame))
-          .set_timestamp_us(timestamp_us)
-          .build();
-  broadcaster_.OnFrame(video_frame);
-}
-
-bool VideoTrackSourceImpl::GetStats(Stats *stats) {
-  stats->input_height = height_;
-  stats->input_width = width_;
-  return true;
-}
-
-bool VideoTrackSourceImpl::SupportsEncodedOutput() const { return false; }
-rtc::VideoSourceInterface<webrtc::VideoFrame> *VideoTrackSourceImpl::source() {
-  return &broadcaster_;
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/video_track_source_impl.h b/host/frontend/webrtc/lib/video_track_source_impl.h
deleted file mode 100644
index b3861ac..0000000
--- a/host/frontend/webrtc/lib/video_track_source_impl.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <media/base/video_broadcaster.h>
-#include <pc/video_track_source.h>
-
-#include "host/frontend/webrtc/lib/video_sink.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class VideoTrackSourceImpl : public webrtc::VideoTrackSource {
- public:
-  VideoTrackSourceImpl(int width, int height);
-
-  void OnFrame(std::shared_ptr<VideoFrameBuffer> frame, int64_t timestamp_us);
-
-  // Returns false if no stats are available, e.g, for a remote source, or a
-  // source which has not seen its first frame yet.
-  //
-  // Implementation should avoid blocking.
-  bool GetStats(Stats* stats) override;
-
-  bool SupportsEncodedOutput() const override;
-  void GenerateKeyFrame() override {}
-  void AddEncodedSink(
-      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
-  void RemoveEncodedSink(
-      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
-
-  rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override;
-
- private:
-  int width_;
-  int height_;
-  rtc::VideoBroadcaster broadcaster_;
-};
-
-// Wraps a VideoTrackSourceImpl as an implementation of the VideoSink interface.
-// This is needed as the VideoTrackSourceImpl is a reference counted object that
-// should only be referenced by rtc::scoped_refptr pointers, but the
-// VideoSink interface is not a reference counted object and therefore not
-// compatible with that kind of pointers. This class can be referenced by a
-// shared pointer and it in turn holds a scoped_refptr to the wrapped object.
-class VideoTrackSourceImplSinkWrapper : public VideoSink {
- public:
-  virtual ~VideoTrackSourceImplSinkWrapper() = default;
-
-  VideoTrackSourceImplSinkWrapper(rtc::scoped_refptr<VideoTrackSourceImpl> obj)
-      : track_source_impl_(obj) {}
-
-  void OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
-               int64_t timestamp_us) override {
-    track_source_impl_->OnFrame(frame, timestamp_us);
-  }
-
- private:
-  rtc::scoped_refptr<VideoTrackSourceImpl> track_source_impl_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/vp8only_encoder_factory.cpp b/host/frontend/webrtc/lib/vp8only_encoder_factory.cpp
deleted file mode 100644
index ef69cfa..0000000
--- a/host/frontend/webrtc/lib/vp8only_encoder_factory.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "host/frontend/webrtc/lib/vp8only_encoder_factory.h"
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-VP8OnlyEncoderFactory::VP8OnlyEncoderFactory(
-    std::unique_ptr<webrtc::VideoEncoderFactory> inner)
-    : inner_(std::move(inner)) {}
-
-std::vector<webrtc::SdpVideoFormat> VP8OnlyEncoderFactory::GetSupportedFormats()
-    const {
-  std::vector<webrtc::SdpVideoFormat> ret;
-  // Allow only VP8
-  for (auto& format : inner_->GetSupportedFormats()) {
-    if (format.name == "VP8") {
-      ret.push_back(format);
-    }
-  }
-  return ret;
-}
-
-webrtc::VideoEncoderFactory::CodecInfo VP8OnlyEncoderFactory::QueryVideoEncoder(
-    const webrtc::SdpVideoFormat& format) const {
-  return inner_->QueryVideoEncoder(format);
-}
-
-std::unique_ptr<webrtc::VideoEncoder> VP8OnlyEncoderFactory::CreateVideoEncoder(
-    const webrtc::SdpVideoFormat& format) {
-  return inner_->CreateVideoEncoder(format);
-}
-
-std::unique_ptr<webrtc::VideoEncoderFactory::EncoderSelectorInterface>
-VP8OnlyEncoderFactory::GetEncoderSelector() const {
-  return inner_->GetEncoderSelector();
-}
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/vp8only_encoder_factory.h b/host/frontend/webrtc/lib/vp8only_encoder_factory.h
deleted file mode 100644
index fcbdaeb..0000000
--- a/host/frontend/webrtc/lib/vp8only_encoder_factory.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <api/video_codecs/video_encoder_factory.h>
-#include <api/video_codecs/video_encoder.h>
-
-namespace cuttlefish {
-namespace webrtc_streaming {
-
-class VP8OnlyEncoderFactory : public webrtc::VideoEncoderFactory {
- public:
-  VP8OnlyEncoderFactory(std::unique_ptr<webrtc::VideoEncoderFactory> inner);
-
-  std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override;
-
-  CodecInfo QueryVideoEncoder(
-      const webrtc::SdpVideoFormat& format) const override;
-
-  std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
-      const webrtc::SdpVideoFormat& format) override;
-
-  std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
-
- private:
-  std::unique_ptr<webrtc::VideoEncoderFactory> inner_;
-};
-
-}  // namespace webrtc_streaming
-}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/Android.bp b/host/frontend/webrtc/libcommon/Android.bp
new file mode 100644
index 0000000..f6fe47d
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/Android.bp
@@ -0,0 +1,48 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_webrtc_common",
+    srcs: [
+        "audio_device.cpp",
+        "connection_controller.cpp",
+        "peer_connection_utils.cpp",
+        "port_range_socket_factory.cpp",
+        "vp8only_encoder_factory.cpp",
+        "utils.cpp",
+    ],
+    cflags: [
+        // libwebrtc headers need this
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+        "-DWEBRTC_POSIX",
+        "-DWEBRTC_LINUX",
+    ],
+    header_libs: [
+        "libwebrtc_absl_headers",
+    ],
+    static_libs: [
+        "libwebrtc",
+    ],
+    shared_libs: [
+        "libbase",
+        "libjsoncpp",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
diff --git a/host/frontend/webrtc/libcommon/audio_device.cpp b/host/frontend/webrtc/libcommon/audio_device.cpp
new file mode 100644
index 0000000..cbbdd63
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/audio_device.cpp
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/audio_device.h"
+
+#include <string.h>
+
+#include <android-base/logging.h>
+
+#include <chrono>
+#include <thread>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+CfAudioDeviceModule::CfAudioDeviceModule() {}
+
+int CfAudioDeviceModule::GetMoreAudioData(void* data, int bytes_per_sample,
+                                          int samples_per_channel,
+                                          int num_channels, int sample_rate,
+                                          bool& muted) {
+  muted = !playing_ || !audio_callback_;
+  if (muted) {
+    return 0;
+  }
+
+  size_t read_samples;
+  int64_t elapsed_time;
+  int64_t ntp_time_ms;
+  auto res = audio_callback_->NeedMorePlayData(
+      samples_per_channel, bytes_per_sample, num_channels, sample_rate, data,
+      read_samples, &elapsed_time, &ntp_time_ms);
+  if (res != 0) {
+    return res;
+  }
+  return read_samples / num_channels;
+}
+
+// Retrieve the currently utilized audio layer
+int32_t CfAudioDeviceModule::ActiveAudioLayer(AudioLayer* audioLayer) const {
+  return -1;
+}
+
+// Full-duplex transportation of PCM audio
+int32_t CfAudioDeviceModule::RegisterAudioCallback(
+    webrtc::AudioTransport* audio_callback) {
+  audio_callback_ = audio_callback;
+  return 0;
+}
+
+// Main initialization and termination
+int32_t CfAudioDeviceModule::Init() { return 0; }
+int32_t CfAudioDeviceModule::Terminate() { return 0; }
+bool CfAudioDeviceModule::Initialized() const { return true; }
+
+// Device enumeration
+int16_t CfAudioDeviceModule::PlayoutDevices() { return 1; }
+int16_t CfAudioDeviceModule::RecordingDevices() { return 1; }
+int32_t CfAudioDeviceModule::PlayoutDeviceName(
+    uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize],
+    char guid[webrtc::kAdmMaxGuidSize]) {
+  if (index != 0) {
+    return -1;
+  }
+  constexpr auto device_name = "Cuttlefish Webrtc Audio";
+  constexpr auto device_guid = "Cuttlefish Webrtc Audio Device Id";
+  strncpy(name, device_name, webrtc::kAdmMaxDeviceNameSize);
+  name[webrtc::kAdmMaxDeviceNameSize - 1] = '\0';
+  strncpy(guid, device_guid, webrtc::kAdmMaxGuidSize);
+  guid[webrtc::kAdmMaxGuidSize - 1] = '\0';
+  return 0;
+}
+int32_t CfAudioDeviceModule::RecordingDeviceName(
+    uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize],
+    char guid[webrtc::kAdmMaxGuidSize]) {
+  if (index != 0) {
+    return -1;
+  }
+  constexpr auto device_name = "Cuttlefish Webrtc Audio";
+  constexpr auto device_guid = "Cuttlefish Webrtc Audio Device Id";
+  strncpy(name, device_name, webrtc::kAdmMaxDeviceNameSize);
+  name[webrtc::kAdmMaxDeviceNameSize - 1] = '\0';
+  strncpy(guid, device_guid, webrtc::kAdmMaxGuidSize);
+  guid[webrtc::kAdmMaxGuidSize - 1] = '\0';
+  return 0;
+}
+
+// Device selection
+int32_t CfAudioDeviceModule::SetPlayoutDevice(uint16_t index) { return 0; }
+int32_t CfAudioDeviceModule::SetPlayoutDevice(WindowsDeviceType device) {
+  return -1;
+}
+int32_t CfAudioDeviceModule::SetRecordingDevice(uint16_t index) { return 0; }
+int32_t CfAudioDeviceModule::SetRecordingDevice(WindowsDeviceType device) {
+  return -1;
+}
+
+// Audio transport initialization
+int32_t CfAudioDeviceModule::PlayoutIsAvailable(bool* available) {
+  *available = true;
+  return 0;
+}
+int32_t CfAudioDeviceModule::InitPlayout() { return 0; }
+bool CfAudioDeviceModule::PlayoutIsInitialized() const { return true; }
+int32_t CfAudioDeviceModule::RecordingIsAvailable(bool* available) {
+  *available = 0;
+  return 0;
+}
+int32_t CfAudioDeviceModule::InitRecording() { return 0; }
+bool CfAudioDeviceModule::RecordingIsInitialized() const { return true; }
+
+// Audio transport control
+int32_t CfAudioDeviceModule::StartPlayout() {
+  playing_ = true;
+  return 0;
+}
+int32_t CfAudioDeviceModule::StopPlayout() {
+  playing_ = false;
+  return 0;
+}
+bool CfAudioDeviceModule::Playing() const { return playing_; }
+int32_t CfAudioDeviceModule::StartRecording() {
+  recording_ = true;
+  return 0;
+}
+int32_t CfAudioDeviceModule::StopRecording() {
+  recording_ = false;
+  return 0;
+}
+bool CfAudioDeviceModule::Recording() const { return recording_; }
+
+// Audio mixer initialization
+int32_t CfAudioDeviceModule::InitSpeaker() { return -1; }
+bool CfAudioDeviceModule::SpeakerIsInitialized() const { return false; }
+int32_t CfAudioDeviceModule::InitMicrophone() { return 0; }
+bool CfAudioDeviceModule::MicrophoneIsInitialized() const { return true; }
+
+// Speaker volume controls
+int32_t CfAudioDeviceModule::SpeakerVolumeIsAvailable(bool* available) {
+  *available = false;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetSpeakerVolume(uint32_t volume) { return -1; }
+int32_t CfAudioDeviceModule::SpeakerVolume(uint32_t* volume) const {
+  return -1;
+}
+int32_t CfAudioDeviceModule::MaxSpeakerVolume(uint32_t* maxVolume) const {
+  return -1;
+}
+int32_t CfAudioDeviceModule::MinSpeakerVolume(uint32_t* minVolume) const {
+  return -1;
+}
+
+// Microphone volume controls
+int32_t CfAudioDeviceModule::MicrophoneVolumeIsAvailable(bool* available) {
+  *available = false;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetMicrophoneVolume(uint32_t volume) { return -1; }
+int32_t CfAudioDeviceModule::MicrophoneVolume(uint32_t* volume) const {
+  return -1;
+}
+int32_t CfAudioDeviceModule::MaxMicrophoneVolume(uint32_t* maxVolume) const {
+  return -1;
+}
+int32_t CfAudioDeviceModule::MinMicrophoneVolume(uint32_t* minVolume) const {
+  return -1;
+}
+
+// Speaker mute control
+int32_t CfAudioDeviceModule::SpeakerMuteIsAvailable(bool* available) {
+  *available = false;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetSpeakerMute(bool enable) { return -1; }
+int32_t CfAudioDeviceModule::SpeakerMute(bool* enabled) const { return -1; }
+
+// Microphone mute control
+int32_t CfAudioDeviceModule::MicrophoneMuteIsAvailable(bool* available) {
+  *available = false;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetMicrophoneMute(bool enable) { return -1; }
+int32_t CfAudioDeviceModule::MicrophoneMute(bool* enabled) const { return -1; }
+
+// Stereo support
+int32_t CfAudioDeviceModule::StereoPlayoutIsAvailable(bool* available) const {
+  *available = true;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetStereoPlayout(bool enable) {
+  stereo_playout_enabled_ = enable;
+  return 0;
+}
+int32_t CfAudioDeviceModule::StereoPlayout(bool* enabled) const {
+  *enabled = stereo_playout_enabled_;
+  return 0;
+}
+int32_t CfAudioDeviceModule::StereoRecordingIsAvailable(bool* available) const {
+  *available = true;
+  return 0;
+}
+int32_t CfAudioDeviceModule::SetStereoRecording(bool enable) {
+  stereo_recording_enabled_ = enable;
+  return 0;
+}
+int32_t CfAudioDeviceModule::StereoRecording(bool* enabled) const {
+  *enabled = stereo_recording_enabled_;
+  return 0;
+}
+
+// Playout delay
+int32_t CfAudioDeviceModule::PlayoutDelay(uint16_t* delayMS) const {
+  // There is currently no way to estimate the real delay for thiese streams.
+  // Given that 10ms buffers are used almost everywhere in the pipeline we know
+  // the delay is at least 10ms, so that's the best guess here.
+  *delayMS = 10;
+  return 0;
+}
+
+// Only supported on Android.
+bool CfAudioDeviceModule::BuiltInAECIsAvailable() const { return false; }
+bool CfAudioDeviceModule::BuiltInAGCIsAvailable() const { return false; }
+bool CfAudioDeviceModule::BuiltInNSIsAvailable() const { return false; }
+
+// Enables the built-in audio effects. Only supported on Android.
+int32_t CfAudioDeviceModule::EnableBuiltInAEC(bool enable) { return -1; }
+int32_t CfAudioDeviceModule::EnableBuiltInAGC(bool enable) { return -1; }
+int32_t CfAudioDeviceModule::EnableBuiltInNS(bool enable) { return -1; }
+
+int32_t CfAudioDeviceModule::GetPlayoutUnderrunCount() const { return -1; }
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/audio_device.h b/host/frontend/webrtc/libcommon/audio_device.h
new file mode 100644
index 0000000..fff0c5d
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/audio_device.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <modules/audio_device/include/audio_device.h>
+
+#include <atomic>
+
+#include "host/frontend/webrtc/libcommon/audio_source.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class CfAudioDeviceModule : public webrtc::AudioDeviceModule,
+                            public AudioSource {
+ public:
+  CfAudioDeviceModule();
+  ~CfAudioDeviceModule() override = default;
+
+  // Returns number of frames if there is data available, 0 if the stream is not
+  // playing (no clients or the streams are muted), -1 on error.
+  int GetMoreAudioData(void* data, int bytes_per_samples, int samples_per_channel,
+                       int num_channels, int sample_rate, bool& muted) override;
+
+  // Retrieve the currently utilized audio layer
+  int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
+
+  // Full-duplex transportation of PCM audio
+  int32_t RegisterAudioCallback(webrtc::AudioTransport* audioCallback) override;
+
+  // Main initialization and termination
+  int32_t Init() override;
+  int32_t Terminate() override;
+  bool Initialized() const override;
+
+  // Device enumeration
+  int16_t PlayoutDevices() override;
+  int16_t RecordingDevices() override;
+  int32_t PlayoutDeviceName(uint16_t index,
+                            char name[webrtc::kAdmMaxDeviceNameSize],
+                            char guid[webrtc::kAdmMaxGuidSize]) override;
+  int32_t RecordingDeviceName(uint16_t index,
+                              char name[webrtc::kAdmMaxDeviceNameSize],
+                              char guid[webrtc::kAdmMaxGuidSize]) override;
+
+  // Device selection
+  int32_t SetPlayoutDevice(uint16_t index) override;
+  int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+  int32_t SetRecordingDevice(uint16_t index) override;
+  int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+  // Audio transport initialization
+  int32_t PlayoutIsAvailable(bool* available) override;
+  int32_t InitPlayout() override;
+  bool PlayoutIsInitialized() const override;
+  int32_t RecordingIsAvailable(bool* available) override;
+  int32_t InitRecording() override;
+  bool RecordingIsInitialized() const override;
+
+  // Audio transport control
+  int32_t StartPlayout() override;
+  int32_t StopPlayout() override;
+  bool Playing() const override;
+  int32_t StartRecording() override;
+  int32_t StopRecording() override;
+  bool Recording() const override;
+
+  // Audio mixer initialization
+  int32_t InitSpeaker() override;
+  bool SpeakerIsInitialized() const override;
+  int32_t InitMicrophone() override;
+  bool MicrophoneIsInitialized() const override;
+
+  // Speaker volume controls
+  int32_t SpeakerVolumeIsAvailable(bool* available) override;
+  int32_t SetSpeakerVolume(uint32_t volume) override;
+  int32_t SpeakerVolume(uint32_t* volume) const override;
+  int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override;
+  int32_t MinSpeakerVolume(uint32_t* minVolume) const override;
+
+  // Microphone volume controls
+  int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+  int32_t SetMicrophoneVolume(uint32_t volume) override;
+  int32_t MicrophoneVolume(uint32_t* volume) const override;
+  int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override;
+  int32_t MinMicrophoneVolume(uint32_t* minVolume) const override;
+
+  // Speaker mute control
+  int32_t SpeakerMuteIsAvailable(bool* available) override;
+  int32_t SetSpeakerMute(bool enable) override;
+  int32_t SpeakerMute(bool* enabled) const override;
+
+  // Microphone mute control
+  int32_t MicrophoneMuteIsAvailable(bool* available) override;
+  int32_t SetMicrophoneMute(bool enable) override;
+  int32_t MicrophoneMute(bool* enabled) const override;
+
+  // Stereo support
+  int32_t StereoPlayoutIsAvailable(bool* available) const override;
+  int32_t SetStereoPlayout(bool enable) override;
+  int32_t StereoPlayout(bool* enabled) const override;
+  int32_t StereoRecordingIsAvailable(bool* available) const override;
+  int32_t SetStereoRecording(bool enable) override;
+  int32_t StereoRecording(bool* enabled) const override;
+
+  // Playout delay
+  int32_t PlayoutDelay(uint16_t* delayMS) const override;
+
+  // Only supported on Android.
+  bool BuiltInAECIsAvailable() const override;
+  bool BuiltInAGCIsAvailable() const override;
+  bool BuiltInNSIsAvailable() const override;
+
+  // Enables the built-in audio effects. Only supported on Android.
+  int32_t EnableBuiltInAEC(bool enable) override;
+  int32_t EnableBuiltInAGC(bool enable) override;
+  int32_t EnableBuiltInNS(bool enable) override;
+
+  // Play underrun count. Only supported on Android (guest).
+  int32_t GetPlayoutUnderrunCount() const override;
+
+ private:
+  webrtc::AudioTransport* audio_callback_ = nullptr;
+  bool stereo_playout_enabled_ = true;
+  bool stereo_recording_enabled_ = true;
+  std::atomic<bool> playing_ = false;
+  std::atomic<bool> recording_ = false;
+};
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/audio_source.h b/host/frontend/webrtc/libcommon/audio_source.h
similarity index 100%
rename from host/frontend/webrtc/lib/audio_source.h
rename to host/frontend/webrtc/libcommon/audio_source.h
diff --git a/host/frontend/webrtc/libcommon/connection_controller.cpp b/host/frontend/webrtc/libcommon/connection_controller.cpp
new file mode 100644
index 0000000..c776258
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/connection_controller.cpp
@@ -0,0 +1,446 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/connection_controller.h"
+
+#include <algorithm>
+#include <vector>
+
+#include <android-base/logging.h>
+
+#include "host/frontend/webrtc/libcommon/audio_device.h"
+#include "host/frontend/webrtc/libcommon/utils.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// Different classes are needed because all the interfaces inherit from
+// classes providing the methods AddRef and Release, needed by scoped_ptr, which
+// cause ambiguity when a single class (i.e ConnectionController) implements all
+// of them.
+// It's safe for these classes to hold a reference to the ConnectionController
+// because it owns the peer connection, so it will never be destroyed before
+// these observers.
+class CreateSessionDescriptionObserverIntermediate
+    : public webrtc::CreateSessionDescriptionObserver {
+ public:
+  CreateSessionDescriptionObserverIntermediate(ConnectionController& controller)
+      : controller_(controller) {}
+
+  void OnSuccess(webrtc::SessionDescriptionInterface* desc) override {
+    controller_.OnCreateSDPSuccess(desc);
+  }
+  void OnFailure(webrtc::RTCError error) override {
+    controller_.OnCreateSDPFailure(error);
+  }
+
+ private:
+  ConnectionController& controller_;
+};
+
+class SetSessionDescriptionObserverIntermediate
+    : public webrtc::SetSessionDescriptionObserver {
+ public:
+  SetSessionDescriptionObserverIntermediate(ConnectionController& controller)
+      : controller_(controller) {}
+
+  void OnSuccess() override { controller_.OnSetLocalDescriptionSuccess(); }
+  void OnFailure(webrtc::RTCError error) override {
+    controller_.OnSetLocalDescriptionFailure(error);
+  }
+
+ private:
+  ConnectionController& controller_;
+};
+
+class SetRemoteDescriptionObserverIntermediate
+    : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+  SetRemoteDescriptionObserverIntermediate(ConnectionController& controller)
+      : controller_(controller) {}
+
+  void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
+    controller_.OnSetRemoteDescriptionComplete(error);
+  }
+
+ private:
+  ConnectionController& controller_;
+};
+
+ConnectionController::ConnectionController(
+    PeerSignalingHandler& sig_handler,
+    PeerConnectionBuilder& connection_builder,
+    ConnectionController::Observer& observer)
+    : sig_handler_(sig_handler),
+      connection_builder_(connection_builder),
+      observer_(observer) {}
+
+void ConnectionController::CreateOffer() {
+  // No memory leak here because this is a ref counted object and the
+  // peer connection immediately wraps it with a scoped_refptr
+  peer_connection_->CreateOffer(ThisAsCreateSDPObserver(), {} /*options*/);
+}
+
+Result<void> ConnectionController::RequestOffer(
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+        ice_servers) {
+  observer_.OnConnectionStateChange(
+      webrtc::PeerConnectionInterface::PeerConnectionState::kNew);
+  Json::Value msg;
+  msg["type"] = "request-offer";
+  if (!ice_servers.empty()) {
+    // Only include the ice servers in the message if non empty
+    msg["ice_servers"] = GenerateIceServersMessage(ice_servers);
+  }
+  CF_EXPECT(sig_handler_.SendMessage(msg),
+            "Failed to send the request-offer message to the device");
+  return {};
+}
+
+void ConnectionController::FailConnection(const std::string& message) {
+  Json::Value reply;
+  reply["type"] = "error";
+  reply["error"] = message;
+  sig_handler_.SendMessage(reply);
+  observer_.OnConnectionStateChange(CF_ERR(message));
+}
+
+void ConnectionController::AddPendingIceCandidates() {
+  // Add any ice candidates that arrived before the remote description
+  for (auto& candidate : pending_ice_candidates_) {
+    peer_connection_->AddIceCandidate(
+        std::move(candidate), [this](webrtc::RTCError error) {
+          if (!error.ok()) {
+            FailConnection(ToString(error.type()) + std::string(": ") +
+                           error.message());
+          }
+        });
+  }
+  pending_ice_candidates_.clear();
+}
+
+Result<void> ConnectionController::OnOfferRequestMsg(
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+        ice_servers) {
+  peer_connection_ = CF_EXPECT(connection_builder_.Build(*this, ice_servers),
+                               "Failed to create peer connection");
+  CreateOffer();
+  return {};
+}
+
+Result<void> ConnectionController::OnOfferMsg(
+    std::unique_ptr<webrtc::SessionDescriptionInterface> offer) {
+  peer_connection_->SetRemoteDescription(std::move(offer),
+                                         ThisAsSetRemoteSDPObserver());
+  return {};
+}
+
+Result<void> ConnectionController::OnAnswerMsg(
+    std::unique_ptr<webrtc::SessionDescriptionInterface> answer) {
+  peer_connection_->SetRemoteDescription(std::move(answer),
+                                         ThisAsSetRemoteSDPObserver());
+  return {};
+}
+
+Result<void> ConnectionController::OnIceCandidateMsg(
+    std::unique_ptr<webrtc::IceCandidateInterface> candidate) {
+  if (peer_connection_->remote_description()) {
+    peer_connection_->AddIceCandidate(
+        std::move(candidate), [this](webrtc::RTCError error) {
+          if (!error.ok()) {
+            FailConnection(ToString(error.type()) + std::string(": ") +
+                           error.message());
+          }
+        });
+  } else {
+    // Store the ice candidate to be added later if it arrives before the
+    // remote description. This could happen if the client uses polling
+    // instead of websockets because the candidates are generated immediately
+    // after the remote (offer) description is set and the events and the ajax
+    // calls are asynchronous.
+    pending_ice_candidates_.push_back(std::move(candidate));
+  }
+  return {};
+}
+
+Result<void> ConnectionController::OnErrorMsg(const std::string& msg) {
+  LOG(ERROR) << "Received error message from peer: " << msg;
+  return {};
+}
+
+void ConnectionController::OnCreateSDPSuccess(
+    webrtc::SessionDescriptionInterface* desc) {
+  std::string offer_str;
+  desc->ToString(&offer_str);
+  std::string sdp_type = desc->type();
+  peer_connection_->SetLocalDescription(ThisAsSetSDPObserver(), desc);
+  // The peer connection takes ownership of the description so it should not be
+  // used after this
+  desc = nullptr;
+
+  Json::Value reply;
+  reply["type"] = sdp_type;
+  reply["sdp"] = offer_str;
+
+  sig_handler_.SendMessage(reply);
+}
+
+void ConnectionController::OnCreateSDPFailure(const webrtc::RTCError& error) {
+  FailConnection(ToString(error.type()) + std::string(": ") + error.message());
+}
+
+void ConnectionController::OnSetLocalDescriptionSuccess() {
+  // local description set, nothing else to do
+}
+
+void ConnectionController::OnSetLocalDescriptionFailure(
+    const webrtc::RTCError& error) {
+  LOG(ERROR) << "Error setting local description: Either there is a bug in "
+                "libwebrtc or the local description was (incorrectly) modified "
+                "after creating it";
+  FailConnection(ToString(error.type()) + std::string(": ") + error.message());
+}
+
+void ConnectionController::OnSetRemoteDescriptionComplete(
+    const webrtc::RTCError& error) {
+  if (!error.ok()) {
+    // The remote description was rejected, can't connect to device.
+    FailConnection(ToString(error.type()) + std::string(": ") + error.message());
+    return;
+  }
+  AddPendingIceCandidates();
+  auto remote_desc = peer_connection_->remote_description();
+  CHECK(remote_desc) << "The remote description was just added successfully in "
+                        "this thread, so it can't be nullptr";
+  if (remote_desc->GetType() != webrtc::SdpType::kOffer) {
+    // Only create and send answer when the remote description is an offer.
+    return;
+  }
+  peer_connection_->CreateAnswer(ThisAsCreateSDPObserver(), {} /*options*/);
+}
+
+// No memory leaks with these because the peer_connection immediately wraps
+// these pointers with scoped_refptr.
+webrtc::CreateSessionDescriptionObserver*
+ConnectionController::ThisAsCreateSDPObserver() {
+  return new rtc::RefCountedObject<
+      CreateSessionDescriptionObserverIntermediate>(*this);
+}
+webrtc::SetSessionDescriptionObserver*
+ConnectionController::ThisAsSetSDPObserver() {
+  return new rtc::RefCountedObject<SetSessionDescriptionObserverIntermediate>(
+      *this);
+}
+rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface>
+ConnectionController::ThisAsSetRemoteSDPObserver() {
+  return rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface>(
+      new rtc::RefCountedObject<SetRemoteDescriptionObserverIntermediate>(
+          *this));
+}
+
+void ConnectionController::HandleSignalingMessage(const Json::Value& msg) {
+  auto result = HandleSignalingMessageInner(msg);
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Message();
+    LOG(DEBUG) << result.error().Trace();
+    FailConnection(result.error().Message());
+  }
+}
+
+Result<void> ConnectionController::HandleSignalingMessageInner(
+    const Json::Value& message) {
+  CF_EXPECT(ValidateJsonObject(message, "",
+                               {{"type", Json::ValueType::stringValue}}));
+  auto type = message["type"].asString();
+
+  if (type == "request-offer") {
+    auto ice_servers = CF_EXPECT(ParseIceServersMessage(message),
+                                 "Error parsing ice-servers field");
+    return OnOfferRequestMsg(ice_servers);
+  } else if (type == "offer") {
+    auto remote_desc = CF_EXPECT(
+        ParseSessionDescription(type, message, webrtc::SdpType::kOffer));
+    return OnOfferMsg(std::move(remote_desc));
+  } else if (type == "answer") {
+    auto remote_desc = CF_EXPECT(
+        ParseSessionDescription(type, message, webrtc::SdpType::kAnswer));
+    return OnAnswerMsg(std::move(remote_desc));
+  } else if (type == "ice-candidate") {
+    auto candidate = CF_EXPECT(ParseIceCandidate(type, message));
+    return OnIceCandidateMsg(std::move(candidate));
+  } else if (type == "error") {
+    return OnErrorMsg(CF_EXPECT(ParseError(type, message)));
+  } else {
+    return CF_ERR("Unknown client message type: " + type);
+  }
+}
+
+// Triggered when the SignalingState changed.
+void ConnectionController::OnSignalingChange(
+    webrtc::PeerConnectionInterface::SignalingState new_state) {
+  LOG(VERBOSE) << "Signaling state changed: " << new_state;
+}
+
+// Triggered when media is received on a new stream from remote peer.
+void ConnectionController::OnAddStream(
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
+  LOG(VERBOSE) << "Stream added: " << stream->id();
+}
+
+// Triggered when a remote peer closes a stream.
+void ConnectionController::OnRemoveStream(
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
+  LOG(VERBOSE) << "Stream removed: " << stream->id();
+}
+
+// Triggered when a remote peer opens a data channel.
+void ConnectionController::OnDataChannel(
+    rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+  observer_.OnDataChannel(data_channel);
+}
+
+// Triggered when renegotiation is needed. For example, an ICE restart
+// has begun.
+void ConnectionController::OnRenegotiationNeeded() {
+  if (!peer_connection_) {
+    return;
+  }
+  CreateOffer();
+}
+
+// Called any time the standards-compliant IceConnectionState changes.
+void ConnectionController::OnStandardizedIceConnectionChange(
+    webrtc::PeerConnectionInterface::IceConnectionState new_state) {
+  switch (new_state) {
+    case webrtc::PeerConnectionInterface::kIceConnectionNew:
+      LOG(DEBUG) << "ICE connection state: New";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionChecking:
+      LOG(DEBUG) << "ICE connection state: Checking";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionConnected:
+      LOG(DEBUG) << "ICE connection state: Connected";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
+      LOG(DEBUG) << "ICE connection state: Completed";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionFailed:
+      LOG(DEBUG) << "ICE connection state: Failed";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
+      LOG(DEBUG) << "ICE connection state: Disconnected";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionClosed:
+      LOG(DEBUG) << "ICE connection state: Closed";
+      break;
+    case webrtc::PeerConnectionInterface::kIceConnectionMax:
+      LOG(DEBUG) << "ICE connection state: Max";
+      break;
+    default:
+      LOG(DEBUG) << "ICE connection state: " << new_state;
+  }
+}
+
+// Called any time the PeerConnectionState changes.
+void ConnectionController::OnConnectionChange(
+    webrtc::PeerConnectionInterface::PeerConnectionState new_state) {
+  observer_.OnConnectionStateChange(new_state);
+}
+
+// Called any time the IceGatheringState changes.
+void ConnectionController::OnIceGatheringChange(
+    webrtc::PeerConnectionInterface::IceGatheringState new_state) {
+  std::string state_str;
+  switch (new_state) {
+    case webrtc::PeerConnectionInterface::IceGatheringState::kIceGatheringNew:
+      state_str = "NEW";
+      break;
+    case webrtc::PeerConnectionInterface::IceGatheringState::
+        kIceGatheringGathering:
+      state_str = "GATHERING";
+      break;
+    case webrtc::PeerConnectionInterface::IceGatheringState::
+        kIceGatheringComplete:
+      state_str = "COMPLETE";
+      break;
+    default:
+      state_str = "UNKNOWN";
+  }
+  LOG(VERBOSE) << "ICE Gathering state set to: " << state_str;
+}
+
+// A new ICE candidate has been gathered.
+void ConnectionController::OnIceCandidate(
+    const webrtc::IceCandidateInterface* candidate) {
+  std::string candidate_sdp;
+  candidate->ToString(&candidate_sdp);
+  auto sdp_mid = candidate->sdp_mid();
+  auto line_index = candidate->sdp_mline_index();
+
+  Json::Value reply;
+  reply["type"] = "ice-candidate";
+  reply["mid"] = sdp_mid;
+  reply["mLineIndex"] = static_cast<Json::UInt64>(line_index);
+  reply["candidate"] = candidate_sdp;
+
+  sig_handler_.SendMessage(reply);
+}
+
+// Gathering of an ICE candidate failed.
+// See https://w3c.github.io/webrtc-pc/#event-icecandidateerror
+void ConnectionController::OnIceCandidateError(const std::string& address,
+                                               int port, const std::string& url,
+                                               int error_code,
+                                               const std::string& error_text) {
+  LOG(VERBOSE) << "Gathering of an ICE candidate (address: " << address
+               << ", port: " << port << ", url: " << url
+               << ") failed: " << error_text;
+}
+
+// Ice candidates have been removed.
+void ConnectionController::OnIceCandidatesRemoved(
+    const std::vector<cricket::Candidate>&) {
+  // ignore
+}
+
+// This is called when signaling indicates a transceiver will be receiving
+// media from the remote endpoint. This is fired during a call to
+// SetRemoteDescription. The receiving track can be accessed by:
+// ConnectionController::|transceiver->receiver()->track()| and its
+// associated streams by |transceiver->receiver()->streams()|. Note: This will
+// only be called if Unified Plan semantics are specified. This behavior is
+// specified in section 2.2.8.2.5 of the "Set the RTCSessionDescription"
+// algorithm: https://w3c.github.io/webrtc-pc/#set-description
+void ConnectionController::OnTrack(
+    rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+  observer_.OnTrack(transceiver);
+}
+
+// Called when signaling indicates that media will no longer be received on a
+// track.
+// With Plan B semantics, the given receiver will have been removed from the
+// PeerConnection and the track muted.
+// With Unified Plan semantics, the receiver will remain but the transceiver
+// will have changed direction to either sendonly or inactive.
+// https://w3c.github.io/webrtc-pc/#process-remote-track-removal
+void ConnectionController::OnRemoveTrack(
+    rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) {
+  observer_.OnRemoveTrack(receiver);
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
+
diff --git a/host/frontend/webrtc/libcommon/connection_controller.h b/host/frontend/webrtc/libcommon/connection_controller.h
new file mode 100644
index 0000000..b034764
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/connection_controller.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+
+#include <api/peer_connection_interface.h>
+#include <json/json.h>
+
+#include "common/libs/utils/result.h"
+#include "host/frontend/webrtc/libcommon/peer_signaling_handler.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// Creating the peer connection is different on the client and device, but for
+// both the pc needs to be created during the signaling process.
+class PeerConnectionBuilder {
+ public:
+  virtual ~PeerConnectionBuilder() = default;
+  virtual Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>> Build(
+      webrtc::PeerConnectionObserver& observer,
+      const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+          per_connection_servers) = 0;
+};
+
+// Encapsulates the signaling protocol, which is mostly the same for client and
+// device. Devices will create a connection controller for each new client and
+// simply provide implementations of the callbacks in
+// ConnectionController::Observer. Clients must additionally call RequestOffer
+// to start the signaling process.
+class ConnectionController : public webrtc::PeerConnectionObserver {
+ public:
+  // These callbacks will be called from the signaling thread. Implementations
+  // should return as soon as possible, particularly not blocking on IO.
+  // Implementations must never destroy the ConnectionController object from
+  // inside these callbacks as that would lead to undefined behavior.
+  // TODO (b/240578845): This avoids having to create an extra thread per
+  // client just to monitor changes in the device side, but opens problems by
+  // allowing it to react to state changes on a peer connection callback. The
+  // device already has code to avoid these issues, but the ideal solution
+  // would be for this callback to be posted to a thread or not to be used at
+  // all.
+  class Observer {
+   public:
+    virtual ~Observer() = default;
+    virtual void OnConnectionStateChange(
+        Result<webrtc::PeerConnectionInterface::PeerConnectionState>
+            status) = 0;
+
+    // Called when new media tracks are added to the peer connection.
+    virtual void OnTrack(
+        rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) = 0;
+    // Called when media tracks are removed from the peer connection.
+    virtual void OnRemoveTrack(
+        rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) = 0;
+    // Called when a data channel is added to the peer connection.
+    virtual void OnDataChannel(
+        rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) = 0;
+  };
+
+  ConnectionController(PeerSignalingHandler& sig_handler,
+                       PeerConnectionBuilder& connection_builder,
+                       Observer& observer);
+  ~ConnectionController() override = default;
+
+  // Sends a request-offer message to the peer to kickstart the signaling
+  // process.
+  Result<void> RequestOffer(
+      const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+          ice_servers);
+
+  // This class doesn't poll for signaling messages from the server, instead
+  // users must do that themselves and provide them here for the connection
+  // controller to process them. As the result of this processing some callbacks
+  // may be called or new messages may be sent to the peer, most likely after
+  // the function returns, but that's not guaranteed.
+  void HandleSignalingMessage(const Json::Value& msg);
+
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection() {
+    return peer_connection_;
+  }
+
+  // webrtc::PeerConnectionObserver implementation
+  void OnSignalingChange(
+      webrtc::PeerConnectionInterface::SignalingState new_state) override;
+  void OnAddStream(
+      rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
+  void OnRemoveStream(
+      rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
+  void OnDataChannel(
+      rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+  void OnRenegotiationNeeded() override;
+  void OnStandardizedIceConnectionChange(
+      webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
+  void OnConnectionChange(
+      webrtc::PeerConnectionInterface::PeerConnectionState new_state) override;
+  void OnIceGatheringChange(
+      webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
+  void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+  void OnIceCandidateError(const std::string& address, int port,
+                           const std::string& url, int error_code,
+                           const std::string& error_text) override;
+  void OnIceCandidatesRemoved(
+      const std::vector<cricket::Candidate>& candidates) override;
+  // The following can be overridden but are not needed by either the device or
+  // client at the moement. void OnIceConnectionReceivingChange(bool receiving)
+  // override; void OnIceSelectedCandidatePairChanged( const
+  // cricket::CandidatePairChangeEvent& event) override; void OnAddTrack(
+  // rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
+  // const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
+  // streams) override;
+  // void OnInterestingUsage(int usage_pattern) override;
+  void OnTrack(
+      rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) override;
+  void OnRemoveTrack(
+      rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) override;
+
+ private:
+  friend class CreateSessionDescriptionObserverIntermediate;
+  friend class SetSessionDescriptionObserverIntermediate;
+  friend class SetRemoteDescriptionObserverIntermediate;
+  void CreateOffer();
+  void AddPendingIceCandidates();
+  void FailConnection(const std::string& message);
+
+  Result<void> HandleSignalingMessageInner(const Json::Value& msg);
+  Result<void> OnOfferRequestMsg(
+      const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+          ice_servers);
+  Result<void> OnOfferMsg(
+      std::unique_ptr<webrtc::SessionDescriptionInterface> offer);
+  Result<void> OnAnswerMsg(
+      std::unique_ptr<webrtc::SessionDescriptionInterface> offer);
+  Result<void> OnIceCandidateMsg(
+      std::unique_ptr<webrtc::IceCandidateInterface> ice_candidate);
+  Result<void> OnErrorMsg(const std::string& msg);
+
+  webrtc::CreateSessionDescriptionObserver* ThisAsCreateSDPObserver();
+  webrtc::SetSessionDescriptionObserver* ThisAsSetSDPObserver();
+  rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface>
+  ThisAsSetRemoteSDPObserver();
+
+  void OnCreateSDPSuccess(webrtc::SessionDescriptionInterface* desc);
+  void OnCreateSDPFailure(const webrtc::RTCError& error);
+  void OnSetLocalDescriptionSuccess();
+  void OnSetLocalDescriptionFailure(const webrtc::RTCError& error);
+  void OnSetRemoteDescriptionComplete(const webrtc::RTCError& error);
+
+  PeerSignalingHandler& sig_handler_;
+  PeerConnectionBuilder& connection_builder_;
+  Observer& observer_;
+
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+  std::vector<std::unique_ptr<webrtc::IceCandidateInterface>>
+      pending_ice_candidates_;
+
+  // To await for a connection to be established:
+  std::mutex status_mtx_;
+  std::condition_variable status_cond_var_;
+  Result<webrtc::PeerConnectionInterface::PeerConnectionState>
+      connection_status_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/peer_connection_utils.cpp b/host/frontend/webrtc/libcommon/peer_connection_utils.cpp
new file mode 100644
index 0000000..2dce001
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/peer_connection_utils.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/peer_connection_utils.h"
+
+#include <api/audio_codecs/audio_decoder_factory.h>
+#include <api/audio_codecs/audio_encoder_factory.h>
+#include <api/audio_codecs/builtin_audio_decoder_factory.h>
+#include <api/audio_codecs/builtin_audio_encoder_factory.h>
+#include <api/create_peerconnection_factory.h>
+#include <api/peer_connection_interface.h>
+#include <api/video_codecs/builtin_video_decoder_factory.h>
+#include <api/video_codecs/builtin_video_encoder_factory.h>
+#include <api/video_codecs/video_decoder_factory.h>
+#include <api/video_codecs/video_encoder_factory.h>
+
+#include "host/frontend/webrtc/libcommon/audio_device.h"
+#include "host/frontend/webrtc/libcommon/vp8only_encoder_factory.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+Result<std::unique_ptr<rtc::Thread>> CreateAndStartThread(
+    const std::string& name) {
+  auto thread = rtc::Thread::CreateWithSocketServer();
+  CF_EXPECT(thread.get(), "Failed to create " << name << " thread");
+  thread->SetName(name, nullptr);
+  CF_EXPECT(thread->Start(), "Failed to start " << name << " thread");
+  return thread;
+}
+
+Result<rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>>
+CreatePeerConnectionFactory(
+    rtc::Thread* network_thread, rtc::Thread* worker_thread,
+    rtc::Thread* signal_thread,
+    rtc::scoped_refptr<webrtc::AudioDeviceModule> audio_device_module) {
+  auto peer_connection_factory = webrtc::CreatePeerConnectionFactory(
+      network_thread, worker_thread, signal_thread, audio_device_module,
+      webrtc::CreateBuiltinAudioEncoderFactory(),
+      webrtc::CreateBuiltinAudioDecoderFactory(),
+      // Only VP8 is supported
+      std::make_unique<VP8OnlyEncoderFactory>(
+          webrtc::CreateBuiltinVideoEncoderFactory()),
+      webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */,
+      nullptr /* audio_processing */);
+  CF_EXPECT(peer_connection_factory.get(),
+            "Failed to create peer connection factory");
+
+  webrtc::PeerConnectionFactoryInterface::Options options;
+  // By default the loopback network is ignored, but generating candidates for
+  // it is useful when using TCP port forwarding.
+  options.network_ignore_mask = 0;
+  peer_connection_factory->SetOptions(options);
+
+  return peer_connection_factory;
+}
+
+Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>>
+CreatePeerConnection(
+    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+        peer_connection_factory,
+    webrtc::PeerConnectionDependencies dependencies,
+    uint16_t min_port, uint16_t max_port,
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>& servers) {
+  webrtc::PeerConnectionInterface::RTCConfiguration config;
+  config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+  config.servers.insert(config.servers.end(), servers.begin(), servers.end());
+  config.set_min_port(min_port);
+  config.set_max_port(max_port);
+  auto result = peer_connection_factory->CreatePeerConnectionOrError(
+      config, std::move(dependencies));
+
+  CF_EXPECT(result.ok(),
+            "Failed to create peer connection: " << result.error().message());
+  return result.MoveValue();
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/peer_connection_utils.h b/host/frontend/webrtc/libcommon/peer_connection_utils.h
new file mode 100644
index 0000000..72cdf63
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/peer_connection_utils.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// TODO review includes
+#include <api/peer_connection_interface.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+Result<std::unique_ptr<rtc::Thread>> CreateAndStartThread(
+    const std::string& name);
+
+Result<rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>>
+CreatePeerConnectionFactory(
+    rtc::Thread* network_thread, rtc::Thread* worker_thread,
+    rtc::Thread* signal_thread,
+    rtc::scoped_refptr<webrtc::AudioDeviceModule> audio_device_module);
+
+// TODO(b/263528313): Use a packet socket factory instead of a port range.
+Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>>
+CreatePeerConnection(
+    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+        peer_connection_factory,
+    webrtc::PeerConnectionDependencies dependencies,
+    uint16_t min_port, uint16_t max_port,
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+        per_connection_servers);
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
+
diff --git a/host/frontend/webrtc/libcommon/peer_signaling_handler.h b/host/frontend/webrtc/libcommon/peer_signaling_handler.h
new file mode 100644
index 0000000..1f3aba8
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/peer_signaling_handler.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <json/json.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// Interface for an object capable of sending messages to the peer over a
+// signaling channel. More specifically, it can send 'forward' message types to
+// the signaling sever.
+class PeerSignalingHandler {
+ public:
+  virtual ~PeerSignalingHandler() = default;
+
+  virtual Result<void> SendMessage(const Json::Value& msg) = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/port_range_socket_factory.cpp b/host/frontend/webrtc/libcommon/port_range_socket_factory.cpp
new file mode 100644
index 0000000..f72a50f
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/port_range_socket_factory.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/port_range_socket_factory.h"
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+namespace {
+
+std::pair<uint16_t, uint16_t> IntersectPortRanges(
+    std::pair<uint16_t, uint16_t> own_range, uint16_t min_port,
+    uint16_t max_port) {
+  if (own_range.first == own_range.second && own_range.first == 0) {
+    // No range configured
+    return {min_port, max_port};
+  }
+  if (min_port == max_port && max_port == 0) {
+    // No range requested, use configured
+    return own_range;
+  }
+  uint16_t own_min_port = own_range.first;
+  uint16_t own_max_port = own_range.second;
+
+  if (min_port > own_max_port || max_port < own_min_port) {
+    // Ranges don't intersect
+    LOG(WARNING) << "Port ranges don't intersect: requested=[" << min_port
+                 << "," << max_port << "], configured=[" << own_min_port << ","
+                 << own_max_port << "]";
+  }
+  return {std::max(min_port, own_min_port), std::min(max_port, own_max_port)};
+}
+
+}  // namespace
+
+PortRangeSocketFactory::PortRangeSocketFactory(
+    rtc::SocketFactory* socket_factory, std::pair<uint16_t, uint16_t> udp_port_range,
+    std::pair<uint16_t, uint16_t> tcp_port_range)
+    : rtc::BasicPacketSocketFactory(socket_factory),
+      udp_port_range_(udp_port_range),
+      tcp_port_range_(tcp_port_range) {}
+
+rtc::AsyncPacketSocket* PortRangeSocketFactory::CreateUdpSocket(
+    const rtc::SocketAddress& local_address, uint16_t min_port,
+    uint16_t max_port) {
+  auto port_range = IntersectPortRanges(udp_port_range_, min_port, max_port);
+  if (port_range.second < port_range.first) {
+    // Own range doesn't intersect with requested range
+    return nullptr;
+  }
+  return rtc::BasicPacketSocketFactory::CreateUdpSocket(
+      local_address, port_range.first, port_range.second);
+}
+
+rtc::AsyncListenSocket* PortRangeSocketFactory::CreateServerTcpSocket(
+    const rtc::SocketAddress& local_address, uint16_t min_port,
+    uint16_t max_port, int opts) {
+  auto port_range = IntersectPortRanges(tcp_port_range_, min_port, max_port);
+  if (port_range.second < port_range.first) {
+    // Own range doesn't intersect with requested range
+    return nullptr;
+  }
+
+  return rtc::BasicPacketSocketFactory::CreateServerTcpSocket(
+      local_address, port_range.first, port_range.second, opts);
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/port_range_socket_factory.h b/host/frontend/webrtc/libcommon/port_range_socket_factory.h
new file mode 100644
index 0000000..d6a2e13
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/port_range_socket_factory.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cinttypes>
+#include <utility>
+
+// This is not part of the webrtc api and therefore subject to change
+#include <p2p/base/basic_packet_socket_factory.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// rtc::BasicPacketSocketFactory is not part of the webrtc api so only functions
+// from its upper class should be overridden here.
+class PortRangeSocketFactory : public rtc::BasicPacketSocketFactory {
+ public:
+  PortRangeSocketFactory(rtc::SocketFactory* socket_factory,
+                         std::pair<uint16_t, uint16_t> udp_port_range,
+                         std::pair<uint16_t, uint16_t> tcp_port_range);
+
+  rtc::AsyncPacketSocket* CreateUdpSocket(
+      const rtc::SocketAddress& local_address, uint16_t min_port,
+      uint16_t max_port) override;
+
+  rtc::AsyncListenSocket* CreateServerTcpSocket(
+      const rtc::SocketAddress& local_address, uint16_t min_port,
+      uint16_t max_port, int opts) override;
+
+ private:
+  std::pair<uint16_t, uint16_t> udp_port_range_;
+  std::pair<uint16_t, uint16_t> tcp_port_range_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/signaling_constants.h b/host/frontend/webrtc/libcommon/signaling_constants.h
new file mode 100644
index 0000000..6f068c4
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/signaling_constants.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/utils.cpp b/host/frontend/webrtc/libcommon/utils.cpp
new file mode 100644
index 0000000..5122b32
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/utils.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/utils.h"
+
+#include <functional>
+#include <map>
+
+#include <json/json.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+namespace {
+
+Result<void> ValidateField(const Json::Value& obj, const std::string& type,
+                           const std::string& field_name,
+                           const Json::ValueType& field_type, bool required) {
+  CF_EXPECT(obj.isObject(), "Expected object with name-value pairs");
+  if (!obj.isMember(field_name) && !required) {
+    return {};
+  }
+  if (!(obj.isMember(field_name) &&
+        obj[field_name].isConvertibleTo(field_type))) {
+    std::string error_msg = "Expected a field named '";
+    error_msg += field_name + "' of type '";
+    error_msg += std::to_string(field_type);
+    error_msg += "'";
+    if (!type.empty()) {
+      error_msg += " in message of type '" + type + "'";
+    }
+    error_msg += ".";
+    return CF_ERR(error_msg);
+  }
+  return {};
+}
+
+template <typename T>
+Json::Value ToArray(const std::vector<T>& vec,
+                    std::function<Json::Value(const T&)> to_json) {
+  Json::Value arr(Json::ValueType::arrayValue);
+  for (const auto& t : vec) {
+    arr.append(to_json(t));
+  }
+  return arr;
+}
+
+}  // namespace
+
+Result<void> ValidateJsonObject(
+    const Json::Value& obj, const std::string& type,
+    const std::map<std::string, Json::ValueType>& required_fields,
+    const std::map<std::string, Json::ValueType>& optional_fields) {
+  for (const auto& field_spec : required_fields) {
+    CF_EXPECT(
+        ValidateField(obj, type, field_spec.first, field_spec.second, true));
+  }
+  for (const auto& field_spec : optional_fields) {
+    CF_EXPECT(
+        ValidateField(obj, type, field_spec.first, field_spec.second, false));
+  }
+  return {};
+}
+
+Result<std::unique_ptr<webrtc::SessionDescriptionInterface>>
+ParseSessionDescription(const std::string& type, const Json::Value& message,
+                        webrtc::SdpType sdp_type) {
+  CF_EXPECT(ValidateJsonObject(message, type,
+                               {{"sdp", Json::ValueType::stringValue}}));
+  auto remote_desc_str = message["sdp"].asString();
+  auto remote_desc =
+      webrtc::CreateSessionDescription(sdp_type, remote_desc_str);
+  CF_EXPECT(remote_desc.get(), "Failed to parse sdp.");
+  return remote_desc;
+}
+
+Result<std::unique_ptr<webrtc::IceCandidateInterface>> ParseIceCandidate(
+    const std::string& type, const Json::Value& message) {
+  CF_EXPECT(ValidateJsonObject(message, type,
+                               {{"candidate", Json::ValueType::objectValue}}));
+  auto candidate_json = message["candidate"];
+  CF_EXPECT(ValidateJsonObject(candidate_json, "ice-candidate/candidate",
+                               {
+                                   {"sdpMid", Json::ValueType::stringValue},
+                                   {"candidate", Json::ValueType::stringValue},
+                                   {"sdpMLineIndex", Json::ValueType::intValue},
+                               }));
+  auto mid = candidate_json["sdpMid"].asString();
+  auto candidate_sdp = candidate_json["candidate"].asString();
+  auto line_index = candidate_json["sdpMLineIndex"].asInt();
+
+  auto candidate =
+      std::unique_ptr<webrtc::IceCandidateInterface>(webrtc::CreateIceCandidate(
+          mid, line_index, candidate_sdp, nullptr /*error*/));
+  CF_EXPECT(candidate.get(), "Failed to parse ICE candidate");
+  return candidate;
+}
+
+Result<std::string> ParseError(const std::string& type,
+                               const Json::Value& message) {
+  CF_EXPECT(ValidateJsonObject(message, type,
+                               {{"error", Json::ValueType::stringValue}}));
+  return message["error"].asString();
+}
+
+Result<std::vector<webrtc::PeerConnectionInterface::IceServer>>
+ParseIceServersMessage(const Json::Value& message) {
+  std::vector<webrtc::PeerConnectionInterface::IceServer> ret;
+  if (!message.isMember("ice_servers") || !message["ice_servers"].isArray()) {
+    // The ice_servers field is optional in some messages
+    LOG(VERBOSE)
+        << "ice_servers field not present in json object or not an array";
+    return ret;
+  }
+  auto& servers = message["ice_servers"];
+  for (const auto& server : servers) {
+    webrtc::PeerConnectionInterface::IceServer ice_server;
+    CF_EXPECT(server.isMember("urls") && server["urls"].isArray(),
+              "ICE server specification missing urls field or not an array: "
+                  << server.toStyledString());
+    auto urls = server["urls"];
+    for (int url_idx = 0; url_idx < urls.size(); url_idx++) {
+      auto url = urls[url_idx];
+      CF_EXPECT(url.isString(), "Non string 'urls' field in ice server: "
+                                    << url.toStyledString());
+      ice_server.urls.push_back(url.asString());
+    }
+    if (server.isMember("credential") && server["credential"].isString()) {
+      ice_server.password = server["credential"].asString();
+    }
+    if (server.isMember("username") && server["username"].isString()) {
+      ice_server.username = server["username"].asString();
+    }
+    ret.push_back(ice_server);
+  }
+  return ret;
+}
+
+Json::Value GenerateIceServersMessage(
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+        ice_servers) {
+  return ToArray<webrtc::PeerConnectionInterface::IceServer>(
+      ice_servers,
+      [](const webrtc::PeerConnectionInterface::IceServer& ice_server) {
+        Json::Value server;
+        server["urls"] = ToArray<std::string>(
+            ice_server.urls,
+            [](const std::string& url) { return Json::Value(url); });
+        server["credential"] = ice_server.password;
+        server["username"] = ice_server.username;
+        return server;
+      });
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/utils.h b/host/frontend/webrtc/libcommon/utils.h
new file mode 100644
index 0000000..c6f30db
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/utils.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include <json/json.h>
+
+#include <api/peer_connection_interface.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// Helper method to ensure a json object has the required fields convertible
+// to the appropriate types.
+Result<void> ValidateJsonObject(
+    const Json::Value& obj, const std::string& type,
+    const std::map<std::string, Json::ValueType>& required_fields,
+    const std::map<std::string, Json::ValueType>& optional_fields = {});
+
+// Parses a session description object from a JSON message.
+Result<std::unique_ptr<webrtc::SessionDescriptionInterface>>
+ParseSessionDescription(const std::string& type, const Json::Value& message,
+                        webrtc::SdpType sdp_type);
+
+// Parses an IceCandidate from a JSON message.
+Result<std::unique_ptr<webrtc::IceCandidateInterface>> ParseIceCandidate(
+    const std::string& type, const Json::Value& message);
+
+// Parses a JSON error message.
+Result<std::string> ParseError(const std::string& type,
+                               const Json::Value& message);
+
+// Checks if the message contains an "ice_servers" array field and parses it
+// into a vector of webrtc ICE servers. Returns an empty vector if the field
+// isn't present.
+Result<std::vector<webrtc::PeerConnectionInterface::IceServer>>
+ParseIceServersMessage(const Json::Value& message);
+
+// Generates a JSON message from a list of ICE servers.
+Json::Value GenerateIceServersMessage(
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>& ice_servers);
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/vp8only_encoder_factory.cpp b/host/frontend/webrtc/libcommon/vp8only_encoder_factory.cpp
new file mode 100644
index 0000000..944f2e8
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/vp8only_encoder_factory.cpp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libcommon/vp8only_encoder_factory.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+VP8OnlyEncoderFactory::VP8OnlyEncoderFactory(
+    std::unique_ptr<webrtc::VideoEncoderFactory> inner)
+    : inner_(std::move(inner)) {}
+
+std::vector<webrtc::SdpVideoFormat> VP8OnlyEncoderFactory::GetSupportedFormats()
+    const {
+  std::vector<webrtc::SdpVideoFormat> ret;
+  // Allow only VP8
+  for (auto& format : inner_->GetSupportedFormats()) {
+    if (format.name == "VP8") {
+      ret.push_back(format);
+    }
+  }
+  return ret;
+}
+
+std::unique_ptr<webrtc::VideoEncoder> VP8OnlyEncoderFactory::CreateVideoEncoder(
+    const webrtc::SdpVideoFormat& format) {
+  return inner_->CreateVideoEncoder(format);
+}
+
+std::unique_ptr<webrtc::VideoEncoderFactory::EncoderSelectorInterface>
+VP8OnlyEncoderFactory::GetEncoderSelector() const {
+  return inner_->GetEncoderSelector();
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libcommon/vp8only_encoder_factory.h b/host/frontend/webrtc/libcommon/vp8only_encoder_factory.h
new file mode 100644
index 0000000..87411e0
--- /dev/null
+++ b/host/frontend/webrtc/libcommon/vp8only_encoder_factory.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <api/video_codecs/video_encoder_factory.h>
+#include <api/video_codecs/video_encoder.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class VP8OnlyEncoderFactory : public webrtc::VideoEncoderFactory {
+ public:
+  VP8OnlyEncoderFactory(std::unique_ptr<webrtc::VideoEncoderFactory> inner);
+
+  std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override;
+
+  std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+      const webrtc::SdpVideoFormat& format) override;
+
+  std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
+
+ private:
+  std::unique_ptr<webrtc::VideoEncoderFactory> inner_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/Android.bp b/host/frontend/webrtc/libdevice/Android.bp
new file mode 100644
index 0000000..b4277c2
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/Android.bp
@@ -0,0 +1,77 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_webrtc_device",
+    srcs: [
+        "audio_track_source_impl.cpp",
+        "camera_streamer.cpp",
+        "client_handler.cpp",
+        "data_channels.cpp",
+        "keyboard.cpp",
+        "local_recorder.cpp",
+        "streamer.cpp",
+        "video_track_source_impl.cpp",
+        "server_connection.cpp",
+    ],
+    cflags: [
+        // libwebrtc headers need this
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+        "-DWEBRTC_POSIX",
+        "-DWEBRTC_LINUX",
+    ],
+    header_libs: [
+        "webrtc_signaling_headers",
+        "libwebrtc_absl_headers",
+    ],
+    static_libs: [
+        "libsrtp2",
+        "libcuttlefish_host_config",
+        "libcuttlefish_screen_connector",
+        "libcuttlefish_wayland_server",
+        "libcuttlefish_webrtc_common",
+        "libgflags",
+        "libdrm",
+        "libffi",
+        "libwayland_crosvm_gpu_display_extension_server_protocols",
+        "libwayland_extension_server_protocols",
+        "libwayland_server",
+        "libwebsockets",
+        "libcap",
+        "libcuttlefish_utils",
+        "libwebrtc",
+        "libcvd_gnss_grpc_proxy",
+        "liblocation",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcn-cbor",
+        "libcuttlefish_fs",
+        "libfruit",
+        "libjsoncpp",
+        "libssl",
+        "libwebm_mkvmuxer",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+        "libxml2",
+    ],
+    defaults: ["cuttlefish_buildhost_only"],
+}
+
diff --git a/host/frontend/webrtc/lib/audio_frame_buffer.h b/host/frontend/webrtc/libdevice/audio_frame_buffer.h
similarity index 100%
rename from host/frontend/webrtc/lib/audio_frame_buffer.h
rename to host/frontend/webrtc/libdevice/audio_frame_buffer.h
diff --git a/host/frontend/webrtc/libdevice/audio_sink.h b/host/frontend/webrtc/libdevice/audio_sink.h
new file mode 100644
index 0000000..1baa881
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/audio_sink.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+
+#include "host/frontend/webrtc/libdevice/audio_frame_buffer.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class AudioSink {
+ public:
+  virtual ~AudioSink() = default;
+  virtual void OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
+                       int64_t timestamp_us) = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/audio_track_source_impl.cpp b/host/frontend/webrtc/libdevice/audio_track_source_impl.cpp
new file mode 100644
index 0000000..72756e1
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/audio_track_source_impl.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/audio_track_source_impl.h"
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+void AudioTrackSourceImpl::SetVolume(double volume) {
+  std::lock_guard<std::mutex> lock(observers_mutex_);
+  for (auto observer : audio_observers_) {
+    observer->OnSetVolume(volume);
+  }
+}
+
+void AudioTrackSourceImpl::RegisterAudioObserver(AudioObserver* observer) {
+  std::lock_guard<std::mutex> lock(observers_mutex_);
+  audio_observers_.insert(observer);
+}
+void AudioTrackSourceImpl::UnregisterAudioObserver(AudioObserver* observer) {
+  std::lock_guard<std::mutex> lock(observers_mutex_);
+  audio_observers_.erase(observer);
+}
+
+void AudioTrackSourceImpl::AddSink(webrtc::AudioTrackSinkInterface* sink) {
+  std::lock_guard<std::mutex> lock(sinks_mutex_);
+  sinks_.insert(sink);
+}
+
+void AudioTrackSourceImpl::RemoveSink(webrtc::AudioTrackSinkInterface* sink) {
+  std::lock_guard<std::mutex> lock(sinks_mutex_);
+  sinks_.erase(sink);
+}
+
+const cricket::AudioOptions AudioTrackSourceImpl::options() const {
+  return cricket::AudioOptions();
+}
+
+void AudioTrackSourceImpl::OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
+                                   int64_t timestamp_ms) {
+    std::lock_guard<std::mutex> lock(sinks_mutex_);
+    for (auto sink : sinks_) {
+      sink->OnData(frame->data(), frame->bits_per_sample(),
+                   frame->sample_rate(), frame->channels(), frame->frames(),
+                   timestamp_ms);
+    }
+}
+
+AudioTrackSourceImpl::SourceState AudioTrackSourceImpl::state() const {
+  return SourceState::kLive;
+}
+
+bool AudioTrackSourceImpl::remote() const { return false; }
+
+void AudioTrackSourceImpl::RegisterObserver(
+    webrtc::ObserverInterface* /*observer*/) {}
+
+void AudioTrackSourceImpl::UnregisterObserver(
+    webrtc::ObserverInterface* /*observer*/) {}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/audio_track_source_impl.h b/host/frontend/webrtc/libdevice/audio_track_source_impl.h
new file mode 100644
index 0000000..53dffea
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/audio_track_source_impl.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <set>
+
+#include <api/media_stream_interface.h>
+
+#include "host/frontend/webrtc/libdevice/audio_sink.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class AudioTrackSourceImpl : public webrtc::AudioSourceInterface {
+ public:
+  AudioTrackSourceImpl() = default;
+
+  // Sets the volume of the source. |volume| is in  the range of [0, 10].
+  void SetVolume(double volume) override;
+
+  void RegisterAudioObserver(AudioObserver* observer) override;
+  void UnregisterAudioObserver(AudioObserver* observer) override;
+
+  void AddSink(webrtc::AudioTrackSinkInterface* sink) override;
+  void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override;
+
+  // Returns options for the AudioSource.
+  // (for some of the settings this approach is broken, e.g. setting
+  // audio network adaptation on the source is the wrong layer of abstraction).
+  virtual const cricket::AudioOptions options() const;
+
+  void OnFrame(std::shared_ptr<AudioFrameBuffer> frame, int64_t timestamp_ms);
+
+  // MediaSourceInterface implementation
+  SourceState state() const override;
+  bool remote() const override;
+
+  // NotifierInterface implementation
+  void RegisterObserver(webrtc::ObserverInterface* observer) override;
+  void UnregisterObserver(webrtc::ObserverInterface* observer) override;
+
+ private:
+  std::set<AudioObserver*> audio_observers_;
+  std::mutex observers_mutex_;
+  std::set<webrtc::AudioTrackSinkInterface*> sinks_;
+  std::mutex sinks_mutex_;
+};
+
+// Wraps an AudioTrackSourceImpl as an implementation of the AudioSink
+// interface. This is needed as the AudioTrackSourceImpl is a reference counted
+// object that should only be referenced by rtc::scoped_refptr pointers, but the
+// AudioSink interface is not a reference counted object and therefore not
+// compatible with that kind of pointers. This class can be referenced by a
+// shared pointer and it in turn holds a scoped_refptr to the wrapped object.
+class AudioTrackSourceImplSinkWrapper : public AudioSink {
+ public:
+  virtual ~AudioTrackSourceImplSinkWrapper() = default;
+
+  AudioTrackSourceImplSinkWrapper(rtc::scoped_refptr<AudioTrackSourceImpl> obj)
+      : track_source_impl_(obj) {}
+
+  void OnFrame(std::shared_ptr<AudioFrameBuffer> frame,
+               int64_t timestamp_ms) override {
+    track_source_impl_->OnFrame(frame, timestamp_ms);
+  }
+
+ private:
+  rtc::scoped_refptr<AudioTrackSourceImpl> track_source_impl_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/camera_controller.h b/host/frontend/webrtc/libdevice/camera_controller.h
similarity index 100%
rename from host/frontend/webrtc/lib/camera_controller.h
rename to host/frontend/webrtc/libdevice/camera_controller.h
diff --git a/host/frontend/webrtc/lib/camera_streamer.cpp b/host/frontend/webrtc/libdevice/camera_streamer.cpp
similarity index 100%
rename from host/frontend/webrtc/lib/camera_streamer.cpp
rename to host/frontend/webrtc/libdevice/camera_streamer.cpp
diff --git a/host/frontend/webrtc/libdevice/camera_streamer.h b/host/frontend/webrtc/libdevice/camera_streamer.h
new file mode 100644
index 0000000..1e8bf8f
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/camera_streamer.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <api/video/i420_buffer.h>
+#include <api/video/video_frame.h>
+#include <api/video/video_sink_interface.h>
+#include <json/json.h>
+
+#include "common/libs/utils/vsock_connection.h"
+#include "host/frontend/webrtc/libdevice/camera_controller.h"
+
+#include <atomic>
+#include <mutex>
+#include <thread>
+#include <vector>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class CameraStreamer : public rtc::VideoSinkInterface<webrtc::VideoFrame>,
+                       public CameraController {
+ public:
+  CameraStreamer(unsigned int port, unsigned int cid);
+  ~CameraStreamer();
+
+  CameraStreamer(const CameraStreamer& other) = delete;
+  CameraStreamer& operator=(const CameraStreamer& other) = delete;
+
+  void OnFrame(const webrtc::VideoFrame& frame) override;
+
+  void HandleMessage(const Json::Value& message) override;
+  void HandleMessage(const std::vector<char>& message) override;
+
+ private:
+  using Resolution = struct {
+    int32_t width;
+    int32_t height;
+  };
+  bool ForwardClientMessage(const Json::Value& message);
+  Resolution GetResolutionFromSettings(const Json::Value& settings);
+  bool VsockSendYUVFrame(const webrtc::I420BufferInterface* frame);
+  bool IsConnectionReady();
+  void StartReadLoop();
+  void Disconnect();
+  std::future<bool> pending_connection_;
+  VsockClientConnection cvd_connection_;
+  std::atomic<Resolution> resolution_;
+  std::mutex settings_mutex_;
+  std::string settings_buffer_;
+  std::mutex frame_mutex_;
+  std::mutex onframe_mutex_;
+  rtc::scoped_refptr<webrtc::I420Buffer> scaled_frame_;
+  unsigned int cid_;
+  unsigned int port_;
+  std::thread reader_thread_;
+  std::atomic<bool> camera_session_active_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/client_handler.cpp b/host/frontend/webrtc/libdevice/client_handler.cpp
new file mode 100644
index 0000000..607e38c
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/client_handler.cpp
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ClientHandler"
+
+#include "host/frontend/webrtc/libdevice/client_handler.h"
+
+#include <netdb.h>
+#include <openssl/rand.h>
+
+#include <android-base/logging.h>
+
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// Video streams initiating in the client may be added and removed at unexpected
+// times, causing the webrtc objects to be destroyed and created every time.
+// This class hides away that complexity and allows to set up sinks only once.
+class ClientVideoTrackImpl : public ClientVideoTrackInterface {
+ public:
+  void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink,
+                       const rtc::VideoSinkWants &wants) override {
+    sink_ = sink;
+    wants_ = wants;
+    if (video_track_) {
+      video_track_->AddOrUpdateSink(sink, wants);
+    }
+  }
+
+  void SetVideoTrack(webrtc::VideoTrackInterface *track) {
+    video_track_ = track;
+    if (sink_) {
+      video_track_->AddOrUpdateSink(sink_, wants_);
+    }
+  }
+
+  void UnsetVideoTrack(webrtc::VideoTrackInterface *track) {
+    if (track == video_track_) {
+      video_track_ = nullptr;
+    }
+  }
+
+ private:
+  webrtc::VideoTrackInterface* video_track_;
+  rtc::VideoSinkInterface<webrtc::VideoFrame> *sink_ = nullptr;
+  rtc::VideoSinkWants wants_ = {};
+};
+
+std::shared_ptr<ClientHandler> ClientHandler::Create(
+    int client_id, std::shared_ptr<ConnectionObserver> observer,
+    PeerConnectionBuilder &connection_builder,
+    std::function<void(const Json::Value &)> send_to_client_cb,
+    std::function<void(bool)> on_connection_changed_cb) {
+  return std::shared_ptr<ClientHandler>(
+      new ClientHandler(client_id, observer, connection_builder,
+                        send_to_client_cb, on_connection_changed_cb));
+}
+
+ClientHandler::ClientHandler(
+    int client_id, std::shared_ptr<ConnectionObserver> observer,
+    PeerConnectionBuilder &connection_builder,
+    std::function<void(const Json::Value &)> send_to_client_cb,
+    std::function<void(bool)> on_connection_changed_cb)
+    : client_id_(client_id),
+      observer_(observer),
+      send_to_client_(send_to_client_cb),
+      on_connection_changed_cb_(on_connection_changed_cb),
+      connection_builder_(connection_builder),
+      controller_(*this, *this, *this),
+      data_channels_handler_(observer),
+      camera_track_(new ClientVideoTrackImpl()) {}
+
+rtc::scoped_refptr<webrtc::RtpSenderInterface>
+ClientHandler::AddTrackToConnection(
+    rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track,
+    rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection,
+    const std::string &label) {
+  if (!peer_connection) {
+    return nullptr;
+  }
+  // Send each track as part of a different stream with the label as id
+  auto err_or_sender =
+      peer_connection->AddTrack(track, {label} /* stream_id */);
+  if (!err_or_sender.ok()) {
+    LOG(ERROR) << "Failed to add track to the peer connection";
+    return nullptr;
+  }
+  return err_or_sender.MoveValue();
+}
+
+bool ClientHandler::AddDisplay(
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track,
+    const std::string &label) {
+  auto [it, inserted] = displays_.emplace(label, DisplayTrackAndSender{
+                                                     .track = video_track,
+                                                 });
+  auto sender =
+      AddTrackToConnection(video_track, controller_.peer_connection(), label);
+  if (sender) {
+    DisplayTrackAndSender &info = it->second;
+    info.sender = sender;
+  }
+  // Succeed if the peer connection is null or the track was added
+  return controller_.peer_connection() == nullptr || sender;
+}
+
+bool ClientHandler::RemoveDisplay(const std::string &label) {
+  auto it = displays_.find(label);
+  if (it == displays_.end()) {
+    return false;
+  }
+
+  if (controller_.peer_connection()) {
+    DisplayTrackAndSender &info = it->second;
+
+    auto error = controller_.peer_connection()->RemoveTrackOrError(info.sender);
+    if (!error.ok()) {
+      LOG(ERROR) << "Failed to remove video track for display " << label << ": "
+                 << error.message();
+      return false;
+    }
+  }
+
+  displays_.erase(it);
+  return true;
+}
+
+bool ClientHandler::AddAudio(
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track,
+    const std::string &label) {
+  audio_streams_.emplace_back(audio_track, label);
+  auto peer_connection = controller_.peer_connection();
+  if (!peer_connection) {
+    return true;
+  }
+  return AddTrackToConnection(audio_track, controller_.peer_connection(), label)
+      .get();
+}
+
+ClientVideoTrackInterface* ClientHandler::GetCameraStream() {
+  return camera_track_.get();
+}
+
+Result<void> ClientHandler::SendMessage(const Json::Value &msg) {
+  send_to_client_(msg);
+  return {};
+}
+
+Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>>
+ClientHandler::Build(
+    webrtc::PeerConnectionObserver &observer,
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>
+        &per_connection_servers) {
+  auto peer_connection =
+      CF_EXPECT(connection_builder_.Build(observer, per_connection_servers));
+
+  // Re-add the video and audio tracks after the peer connection has been
+  // created
+  for (auto &[label, info] : displays_) {
+    info.sender =
+        CF_EXPECT(AddTrackToConnection(info.track, peer_connection, label).get());
+  }
+  // Add the audio tracks to the peer connection
+  for (auto &[audio_track, label] : audio_streams_) {
+    // Audio channels are never removed from the connection by the device, so
+    // it's ok to discard the returned sender here. The peer connection keeps
+    // track of it anyways.
+    CF_EXPECT(AddTrackToConnection(audio_track, peer_connection, label).get());
+  }
+
+  // libwebrtc configures the video encoder with a start bitrate of just 300kbs
+  // which causes it to drop the first 4 frames it receives. Any value over 2Mbs
+  // will be capped at 2Mbs when passed to the encoder by the peer_connection
+  // object, so we pass the maximum possible value here.
+  webrtc::BitrateSettings bitrate_settings;
+  bitrate_settings.start_bitrate_bps = 2000000;  // 2Mbs
+  peer_connection->SetBitrate(bitrate_settings);
+
+  // At least one data channel needs to be created on the side that creates the
+  // SDP offer (the device) for data channels to be enabled at all.
+  // This channel is meant to carry control commands from the client.
+  auto control_channel = peer_connection->CreateDataChannel(
+      kControlChannelLabel, nullptr /* config */);
+  CF_EXPECT(control_channel.get(), "Failed to create control data channel");
+
+  data_channels_handler_.OnDataChannelOpen(control_channel);
+
+  return peer_connection;
+}
+
+void ClientHandler::HandleMessage(const Json::Value &message) {
+  controller_.HandleSignalingMessage(message);
+}
+
+void ClientHandler::Close() {
+  // We can't simply call peer_connection_->Close() here because this method
+  // could be called from one of the PeerConnectionObserver callbacks and that
+  // would lead to a deadlock (Close eventually tries to destroy an object that
+  // will then wait for the callback to return -> deadlock). Destroying the
+  // peer_connection_ has the same effect. The only alternative is to postpone
+  // that operation until after the callback returns.
+  on_connection_changed_cb_(false);
+}
+
+void ClientHandler::OnConnectionStateChange(
+    Result<webrtc::PeerConnectionInterface::PeerConnectionState> new_state) {
+  if (!new_state.ok()) {
+    LOG(ERROR) << "Connection error: " << new_state.error().Message();
+    LOG(DEBUG) << new_state.error().Trace();
+    Close();
+    return;
+  }
+  switch (*new_state) {
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected:
+      LOG(VERBOSE) << "Client " << client_id_ << ": WebRTC connected";
+      observer_->OnConnected();
+      on_connection_changed_cb_(true);
+      break;
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
+      LOG(VERBOSE) << "Client " << client_id_ << ": Connection disconnected";
+      Close();
+      break;
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed:
+      LOG(ERROR) << "Client " << client_id_ << ": Connection failed";
+      Close();
+      break;
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed:
+      LOG(VERBOSE) << "Client " << client_id_ << ": Connection closed";
+      Close();
+      break;
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kNew:
+      LOG(VERBOSE) << "Client " << client_id_ << ": Connection new";
+      break;
+    case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting:
+      LOG(VERBOSE) << "Client " << client_id_ << ": Connection started";
+      break;
+  }
+}
+
+void ClientHandler::OnDataChannel(
+    rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+  data_channels_handler_.OnDataChannelOpen(data_channel);
+}
+
+void ClientHandler::OnTrack(
+    rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+  auto track = transceiver->receiver()->track();
+  if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+    // It's ok to take the raw pointer here because we make sure to unset it
+    // when the track is removed
+    camera_track_->SetVideoTrack(
+        static_cast<webrtc::VideoTrackInterface *>(track.get()));
+  }
+}
+void ClientHandler::OnRemoveTrack(
+    rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) {
+  auto track = receiver->track();
+  if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+    // this only unsets if the track matches the one already in store
+    camera_track_->UnsetVideoTrack(
+        reinterpret_cast<webrtc::VideoTrackInterface *>(track.get()));
+  }
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/client_handler.h b/host/frontend/webrtc/libdevice/client_handler.h
new file mode 100644
index 0000000..2eacfd1
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/client_handler.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <json/json.h>
+
+#include <api/peer_connection_interface.h>
+#include <pc/video_track_source.h>
+
+#include "common/libs/utils/result.h"
+#include "host/frontend/webrtc/libcommon/connection_controller.h"
+#include "host/frontend/webrtc/libdevice/data_channels.h"
+#include "host/frontend/webrtc/libdevice/connection_observer.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class InputChannelHandler;
+class AdbChannelHandler;
+class ControlChannelHandler;
+class BluetoothChannelHandler;
+class CameraChannelHandler;
+class LocationChannelHandler;
+class KmlLocationsChannelHandler;
+class GpxLocationsChannelHandler;
+
+class ClientVideoTrackInterface;
+class ClientVideoTrackImpl;
+class PeerConnectionBuilder;
+
+class ClientHandler : public ConnectionController::Observer,
+                      public PeerConnectionBuilder,
+                      public PeerSignalingHandler {
+ public:
+  static std::shared_ptr<ClientHandler> Create(
+      int client_id, std::shared_ptr<ConnectionObserver> observer,
+      PeerConnectionBuilder& connection_builder,
+      std::function<void(const Json::Value&)> send_client_cb,
+      std::function<void(bool)> on_connection_changed_cb);
+  ~ClientHandler() override = default;
+
+  bool AddDisplay(rtc::scoped_refptr<webrtc::VideoTrackInterface> track,
+                  const std::string& label);
+  bool RemoveDisplay(const std::string& label);
+
+  bool AddAudio(rtc::scoped_refptr<webrtc::AudioTrackInterface> track,
+                const std::string& label);
+
+  ClientVideoTrackInterface* GetCameraStream();
+
+  void HandleMessage(const Json::Value& client_message);
+
+  // ConnectionController::Observer implementation
+  void OnConnectionStateChange(
+      Result<webrtc::PeerConnectionInterface::PeerConnectionState> status) override;
+  void OnDataChannel(
+      rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+  void OnTrack(
+      rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) override;
+  void OnRemoveTrack(
+      rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) override;
+
+  // PeerSignalingHandling implementation
+  Result<void> SendMessage(const Json::Value& msg) override;
+
+  // PeerConnectionBuilder implementation
+  // Delegates on its own pc builder to create the pc and then adds the displays
+  // and other streams as required.
+  Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>> Build(
+      webrtc::PeerConnectionObserver& observer,
+      const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+          per_connection_servers) override;
+
+ private:
+  ClientHandler(int client_id, std::shared_ptr<ConnectionObserver> observer,
+                PeerConnectionBuilder& connection_builder,
+                std::function<void(const Json::Value&)> send_client_cb,
+                std::function<void(bool)> on_connection_changed_cb);
+
+  // Intentionally private, disconnect the client by destroying the object.
+  void Close();
+
+  void LogAndReplyError(const std::string& error_msg) const;
+  Result<void> CreateOffer();
+  rtc::scoped_refptr<webrtc::RtpSenderInterface> AddTrackToConnection(
+      rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track,
+      rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection,
+      const std::string& label);
+
+  int client_id_;
+  std::shared_ptr<ConnectionObserver> observer_;
+  std::function<void(const Json::Value&)> send_to_client_;
+  std::function<void(bool)> on_connection_changed_cb_;
+  PeerConnectionBuilder& connection_builder_;
+  ConnectionController controller_;
+  DataChannelHandlers data_channels_handler_;
+  std::unique_ptr<ClientVideoTrackImpl> camera_track_;
+  struct DisplayTrackAndSender {
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> track;
+    rtc::scoped_refptr<webrtc::RtpSenderInterface> sender;
+  };
+  std::map<std::string, DisplayTrackAndSender> displays_;
+  std::vector<
+      std::pair<rtc::scoped_refptr<webrtc::AudioTrackInterface>, std::string>>
+      audio_streams_;
+};
+
+class ClientVideoTrackInterface {
+ public:
+  virtual ~ClientVideoTrackInterface() = default;
+  virtual void AddOrUpdateSink(
+      rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+      const rtc::VideoSinkWants& wants) = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/connection_observer.h b/host/frontend/webrtc/libdevice/connection_observer.h
new file mode 100644
index 0000000..2db36d2
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/connection_observer.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+
+#include <json/json.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// The ConnectionObserver is the boundary between device specific code and
+// general WebRTC streaming code. Device specific code should be left to
+// implementations of this class while code that could be shared between any
+// device using this streaming library should remain in the library.
+// For example:
+// - Parsing JSON messages to obtain input events is common to all android
+// devices and should stay in the library.
+// - Sending input events to the device by writing to a socket is cuttlefish
+// specific and should be done in the ConnectionObserver implementation. Other
+// devices could choose to send those events over ADB for example. A good rule
+// of thumb is: if it was encoded client side in cf_webrtc.js it should be
+// decoded in the library.
+class ConnectionObserver {
+ public:
+  ConnectionObserver() = default;
+  virtual ~ConnectionObserver() = default;
+
+  virtual void OnConnected() = 0;
+
+  virtual void OnTouchEvent(const std::string& display_label, int x, int y,
+                            bool down) = 0;
+  virtual void OnMultiTouchEvent(const std::string& label, Json::Value id,
+                                 Json::Value slot, Json::Value x, Json::Value y,
+                                 bool down, int size) = 0;
+
+  virtual void OnKeyboardEvent(uint16_t keycode, bool down) = 0;
+
+  virtual void OnAdbChannelOpen(
+      std::function<bool(const uint8_t*, size_t)> adb_message_sender) = 0;
+  virtual void OnAdbMessage(const uint8_t* msg, size_t size) = 0;
+
+  virtual void OnControlChannelOpen(
+      std::function<bool(const Json::Value)> control_message_sender) = 0;
+  virtual void OnLidStateChange(bool lid_open) = 0;
+  virtual void OnHingeAngleChange(int hinge_angle) = 0;
+  virtual void OnPowerButton(bool button_down) = 0;
+  virtual void OnBackButton(bool button_down) = 0;
+  virtual void OnHomeButton(bool button_down) = 0;
+  virtual void OnMenuButton(bool button_down) = 0;
+  virtual void OnVolumeDownButton(bool button_down) = 0;
+  virtual void OnVolumeUpButton(bool button_down) = 0;
+  virtual void OnCustomActionButton(const std::string& command,
+                                    const std::string& button_state) = 0;
+
+  virtual void OnCameraControlMsg(const Json::Value& msg) = 0;
+
+  virtual void OnBluetoothChannelOpen(
+      std::function<bool(const uint8_t*, size_t)> bluetooth_message_sender) = 0;
+  virtual void OnBluetoothMessage(const uint8_t* msg, size_t size) = 0;
+  virtual void OnLocationChannelOpen(
+      std::function<bool(const uint8_t*, size_t)> location_message_sender) = 0;
+  virtual void OnLocationMessage(const uint8_t* msg, size_t size) = 0;
+
+  virtual void OnKmlLocationsChannelOpen(
+      std::function<bool(const uint8_t*, size_t)>
+          kml_locations_message_sender) = 0;
+
+  virtual void OnGpxLocationsChannelOpen(
+      std::function<bool(const uint8_t*, size_t)>
+          gpx_locations_message_sender) = 0;
+  virtual void OnKmlLocationsMessage(const uint8_t* msg, size_t size) = 0;
+  virtual void OnGpxLocationsMessage(const uint8_t* msg, size_t size) = 0;
+
+  virtual void OnCameraData(const std::vector<char>& data) = 0;
+};
+
+class ConnectionObserverFactory {
+ public:
+  virtual ~ConnectionObserverFactory() = default;
+  // Called when a new connection is requested
+  virtual std::shared_ptr<ConnectionObserver> CreateObserver() = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/data_channels.cpp b/host/frontend/webrtc/libdevice/data_channels.cpp
new file mode 100644
index 0000000..db88eed
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/data_channels.cpp
@@ -0,0 +1,423 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/data_channels.h"
+
+#include <android-base/logging.h>
+
+#include "host/frontend/webrtc/libcommon/utils.h"
+#include "host/frontend/webrtc/libdevice/keyboard.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class DataChannelHandler : public webrtc::DataChannelObserver {
+ public:
+  virtual ~DataChannelHandler() = default;
+
+  bool Send(const uint8_t *msg, size_t size, bool binary);
+  bool Send(const Json::Value &message);
+
+  // webrtc::DataChannelObserver implementation
+  void OnStateChange() override;
+  void OnMessage(const webrtc::DataBuffer &msg) override;
+
+ protected:
+  // Provide access to the underlying data channel and the connection observer.
+  virtual rtc::scoped_refptr<webrtc::DataChannelInterface> channel() = 0;
+  virtual std::shared_ptr<ConnectionObserver> observer() = 0;
+
+  // Subclasses must override this to process messages.
+  virtual void OnMessageInner(const webrtc::DataBuffer &msg) = 0;
+  // Some subclasses may override this to defer some work until the channel is
+  // actually used.
+  virtual void OnFirstMessage() {}
+  virtual void OnStateChangeInner(webrtc::DataChannelInterface::DataState) {}
+
+  std::function<bool(const uint8_t *, size_t len)> GetBinarySender() {
+    return [this](const uint8_t *msg, size_t size) {
+      return Send(msg, size, true /*binary*/);
+    };
+  }
+  std::function<bool(const Json::Value &)> GetJSONSender() {
+    return [this](const Json::Value &msg) { return Send(msg); };
+  }
+ private:
+  bool first_msg_received_ = false;
+};
+
+namespace {
+
+static constexpr auto kInputChannelLabel = "input-channel";
+static constexpr auto kAdbChannelLabel = "adb-channel";
+static constexpr auto kBluetoothChannelLabel = "bluetooth-channel";
+static constexpr auto kCameraDataChannelLabel = "camera-data-channel";
+static constexpr auto kLocationDataChannelLabel = "location-channel";
+static constexpr auto kKmlLocationsDataChannelLabel = "kml-locations-channel";
+static constexpr auto kGpxLocationsDataChannelLabel = "gpx-locations-channel";
+static constexpr auto kCameraDataEof = "EOF";
+
+// These classes use the Template pattern to minimize code repetition between
+// data channel handlers.
+
+class InputChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    if (msg.binary) {
+      // TODO (jemoreira) consider binary protocol to avoid JSON parsing
+      // overhead
+      LOG(ERROR) << "Received invalid (binary) data on input channel";
+      return;
+    }
+    auto size = msg.size();
+
+    Json::Value evt;
+    Json::CharReaderBuilder builder;
+    std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
+    std::string errorMessage;
+    auto str = msg.data.cdata<char>();
+    if (!json_reader->parse(str, str + size, &evt, &errorMessage) < 0) {
+      LOG(ERROR) << "Received invalid JSON object over input channel: "
+                 << errorMessage;
+      return;
+    }
+    if (!evt.isMember("type") || !evt["type"].isString()) {
+      LOG(ERROR) << "Input event doesn't have a valid 'type' field: "
+                 << evt.toStyledString();
+      return;
+    }
+    auto event_type = evt["type"].asString();
+    if (event_type == "mouse") {
+      auto result =
+          ValidateJsonObject(evt, "mouse",
+                             {{"down", Json::ValueType::intValue},
+                              {"x", Json::ValueType::intValue},
+                              {"y", Json::ValueType::intValue},
+                              {"display_label", Json::ValueType::stringValue}});
+      if (!result.ok()) {
+        LOG(ERROR) << result.error().Trace();
+        return;
+      }
+      auto label = evt["display_label"].asString();
+      int32_t down = evt["down"].asInt();
+      int32_t x = evt["x"].asInt();
+      int32_t y = evt["y"].asInt();
+
+      observer()->OnTouchEvent(label, x, y, down);
+    } else if (event_type == "multi-touch") {
+      auto result =
+          ValidateJsonObject(evt, "multi-touch",
+                             {{"id", Json::ValueType::arrayValue},
+                              {"down", Json::ValueType::intValue},
+                              {"x", Json::ValueType::arrayValue},
+                              {"y", Json::ValueType::arrayValue},
+                              {"slot", Json::ValueType::arrayValue},
+                              {"display_label", Json::ValueType::stringValue}});
+      if (!result.ok()) {
+        LOG(ERROR) << result.error().Trace();
+        return;
+      }
+
+      auto label = evt["display_label"].asString();
+      auto idArr = evt["id"];
+      int32_t down = evt["down"].asInt();
+      auto xArr = evt["x"];
+      auto yArr = evt["y"];
+      auto slotArr = evt["slot"];
+      int size = evt["id"].size();
+
+      observer()->OnMultiTouchEvent(label, idArr, slotArr, xArr, yArr, down,
+                                    size);
+    } else if (event_type == "keyboard") {
+      auto result =
+          ValidateJsonObject(evt, "keyboard",
+                             {{"event_type", Json::ValueType::stringValue},
+                              {"keycode", Json::ValueType::stringValue}});
+      if (!result.ok()) {
+        LOG(ERROR) << result.error().Trace();
+        return;
+      }
+      auto down = evt["event_type"].asString() == std::string("keydown");
+      auto code = DomKeyCodeToLinux(evt["keycode"].asString());
+      observer()->OnKeyboardEvent(code, down);
+    } else {
+      LOG(ERROR) << "Unrecognized event type: " << event_type;
+      return;
+    }
+  }
+};
+
+class ControlChannelHandler : public DataChannelHandler {
+ public:
+  void OnStateChangeInner(
+      webrtc::DataChannelInterface::DataState state) override {
+    if (state == webrtc::DataChannelInterface::kOpen) {
+      observer()->OnControlChannelOpen(GetJSONSender());
+    }
+  }
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    auto msg_str = msg.data.cdata<char>();
+    auto size = msg.size();
+    Json::Value evt;
+    Json::CharReaderBuilder builder;
+    std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
+    std::string errorMessage;
+    if (!json_reader->parse(msg_str, msg_str + size, &evt, &errorMessage)) {
+      LOG(ERROR) << "Received invalid JSON object over control channel: "
+                 << errorMessage;
+      return;
+    }
+
+    auto result = ValidateJsonObject(
+        evt, "command",
+        /*required_fields=*/{{"command", Json::ValueType::stringValue}},
+        /*optional_fields=*/
+        {
+            {"button_state", Json::ValueType::stringValue},
+            {"lid_switch_open", Json::ValueType::booleanValue},
+            {"hinge_angle_value", Json::ValueType::intValue},
+        });
+    if (!result.ok()) {
+      LOG(ERROR) << result.error().Trace();
+      return;
+    }
+    auto command = evt["command"].asString();
+
+    if (command == "device_state") {
+      if (evt.isMember("lid_switch_open")) {
+        observer()->OnLidStateChange(evt["lid_switch_open"].asBool());
+      }
+      if (evt.isMember("hinge_angle_value")) {
+        observer()->OnHingeAngleChange(evt["hinge_angle_value"].asInt());
+      }
+      return;
+    } else if (command.rfind("camera_", 0) == 0) {
+      observer()->OnCameraControlMsg(evt);
+      return;
+    }
+
+    auto button_state = evt["button_state"].asString();
+    LOG(VERBOSE) << "Control command: " << command << " (" << button_state
+                 << ")";
+    if (command == "power") {
+      observer()->OnPowerButton(button_state == "down");
+    } else if (command == "back") {
+      observer()->OnBackButton(button_state == "down");
+    } else if (command == "home") {
+      observer()->OnHomeButton(button_state == "down");
+    } else if (command == "menu") {
+      observer()->OnMenuButton(button_state == "down");
+    } else if (command == "volumedown") {
+      observer()->OnVolumeDownButton(button_state == "down");
+    } else if (command == "volumeup") {
+      observer()->OnVolumeUpButton(button_state == "down");
+    } else {
+      observer()->OnCustomActionButton(command, button_state);
+    }
+  }
+};
+
+class AdbChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    observer()->OnAdbMessage(msg.data.cdata(), msg.size());
+  }
+  void OnFirstMessage() override {
+    // Report the adb channel as open on the first message received instead of
+    // at channel open, this avoids unnecessarily connecting to the adb daemon
+    // for clients that don't use ADB.
+    observer()->OnAdbChannelOpen(GetBinarySender());
+  }
+};
+
+class BluetoothChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    observer()->OnBluetoothMessage(msg.data.cdata(), msg.size());
+  }
+  void OnFirstMessage() override {
+    // Notify bluetooth channel opening when actually using the channel,
+    // it has the same reason with AdbChannelHandler::OnMessageInner,
+    // to avoid unnecessary connection for Rootcanal.
+    observer()->OnBluetoothChannelOpen(GetBinarySender());
+  }
+};
+
+class CameraChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    auto msg_data = msg.data.cdata<char>();
+    if (msg.size() == strlen(kCameraDataEof) &&
+        !strncmp(msg_data, kCameraDataEof, msg.size())) {
+      // Send complete buffer to observer on EOF marker
+      observer()->OnCameraData(receive_buffer_);
+      receive_buffer_.clear();
+      return;
+    }
+    // Otherwise buffer up data
+    receive_buffer_.insert(receive_buffer_.end(), msg_data,
+                           msg_data + msg.size());
+  }
+
+ private:
+  std::vector<char> receive_buffer_;
+};
+
+class LocationChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    observer()->OnLocationMessage(msg.data.cdata(), msg.size());
+  }
+  void OnFirstMessage() override {
+    // Notify location channel opening when actually using the channel,
+    // it has the same reason with AdbChannelHandler::OnMessageInner,
+    // to avoid unnecessary connections.
+    observer()->OnLocationChannelOpen(GetBinarySender());
+  }
+};
+
+class KmlLocationChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    observer()->OnKmlLocationsMessage(msg.data.cdata(), msg.size());
+  }
+  void OnFirstMessage() override {
+    // Notify location channel opening when actually using the channel,
+    // it has the same reason with AdbChannelHandler::OnMessageInner,
+    // to avoid unnecessary connections.
+    observer()->OnKmlLocationsChannelOpen(GetBinarySender());
+  }
+};
+
+class GpxLocationChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &msg) override {
+    observer()->OnGpxLocationsMessage(msg.data.cdata(), msg.size());
+  }
+  void OnFirstMessage() override {
+    // Notify location channel opening when actually using the channel,
+    // it has the same reason with AdbChannelHandler::OnMessageInner,
+    // to avoid unnecessary connections.
+    observer()->OnGpxLocationsChannelOpen(GetBinarySender());
+  }
+};
+
+class UnknownChannelHandler : public DataChannelHandler {
+ public:
+  void OnMessageInner(const webrtc::DataBuffer &) override {
+    LOG(WARNING) << "Message received on unknown channel: "
+                 << channel()->label();
+  }
+};
+
+template <typename H>
+class DataChannelHandlerImpl : public H {
+ public:
+  DataChannelHandlerImpl(
+      rtc::scoped_refptr<webrtc::DataChannelInterface> channel,
+      std::shared_ptr<ConnectionObserver> observer)
+      : channel_(channel), observer_(observer) {
+    channel->RegisterObserver(this);
+  }
+  ~DataChannelHandlerImpl() override { channel_->UnregisterObserver(); }
+
+ protected:
+  // DataChannelHandler implementation
+  rtc::scoped_refptr<webrtc::DataChannelInterface> channel() override {
+    return channel_;
+  }
+  std::shared_ptr<ConnectionObserver> observer() override { return observer_; }
+
+ private:
+  rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
+  std::shared_ptr<ConnectionObserver> observer_;
+};
+
+}  // namespace
+
+bool DataChannelHandler::Send(const uint8_t *msg, size_t size, bool binary) {
+  webrtc::DataBuffer buffer(rtc::CopyOnWriteBuffer(msg, size), binary);
+  // TODO (b/185832105): When the SCTP channel is congested data channel
+  // messages are buffered up to 16MB, when the buffer is full the channel
+  // is abruptly closed. Keep track of the buffered data to avoid losing the
+  // adb data channel.
+  return channel()->Send(buffer);
+}
+
+bool DataChannelHandler::Send(const Json::Value &message) {
+  Json::StreamWriterBuilder factory;
+  std::string message_string = Json::writeString(factory, message);
+  return Send(reinterpret_cast<const uint8_t *>(message_string.c_str()),
+              message_string.size(), /*binary=*/false);
+}
+
+void DataChannelHandler::OnStateChange() {
+  LOG(VERBOSE) << channel()->label() << " channel state changed to "
+               << webrtc::DataChannelInterface::DataStateString(
+                      channel()->state());
+  OnStateChangeInner(channel()->state());
+}
+
+void DataChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
+  if (!first_msg_received_) {
+    first_msg_received_ = true;
+    OnFirstMessage();
+  }
+  OnMessageInner(msg);
+}
+
+DataChannelHandlers::DataChannelHandlers(
+    std::shared_ptr<ConnectionObserver> observer)
+    : observer_(observer) {}
+
+DataChannelHandlers::~DataChannelHandlers() {}
+
+void DataChannelHandlers::OnDataChannelOpen(
+    rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
+  auto label = channel->label();
+  LOG(VERBOSE) << "Data channel connected: " << label;
+  if (label == kInputChannelLabel) {
+    input_.reset(
+        new DataChannelHandlerImpl<InputChannelHandler>(channel, observer_));
+  } else if (label == kControlChannelLabel) {
+    control_.reset(
+        new DataChannelHandlerImpl<ControlChannelHandler>(channel, observer_));
+  } else if (label == kAdbChannelLabel) {
+    adb_.reset(
+        new DataChannelHandlerImpl<AdbChannelHandler>(channel, observer_));
+  } else if (label == kBluetoothChannelLabel) {
+    bluetooth_.reset(new DataChannelHandlerImpl<BluetoothChannelHandler>(
+        channel, observer_));
+  } else if (label == kCameraDataChannelLabel) {
+    camera_.reset(
+        new DataChannelHandlerImpl<CameraChannelHandler>(channel, observer_));
+  } else if (label == kLocationDataChannelLabel) {
+    location_.reset(
+        new DataChannelHandlerImpl<LocationChannelHandler>(channel, observer_));
+  } else if (label == kKmlLocationsDataChannelLabel) {
+    kml_location_.reset(new DataChannelHandlerImpl<KmlLocationChannelHandler>(
+        channel, observer_));
+  } else if (label == kGpxLocationsDataChannelLabel) {
+    gpx_location_.reset(new DataChannelHandlerImpl<GpxLocationChannelHandler>(
+        channel, observer_));
+  } else {
+    unknown_channels_.emplace_back(
+        new DataChannelHandlerImpl<UnknownChannelHandler>(channel, observer_));
+  }
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/data_channels.h b/host/frontend/webrtc/libdevice/data_channels.h
new file mode 100644
index 0000000..c4a9601
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/data_channels.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+
+#include <api/data_channel_interface.h>
+
+#include "host/frontend/webrtc/libdevice/connection_observer.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+constexpr auto kControlChannelLabel = "device-control";
+
+class DataChannelHandler;
+
+// Groups all data channel handlers.
+// Each handler is an implementation of the DataChannelHandler abstract class
+// providing custom message handlers and calling the appropriate methods on the
+// connection observer.
+class DataChannelHandlers {
+ public:
+  DataChannelHandlers(std::shared_ptr<ConnectionObserver> observer);
+  ~DataChannelHandlers();
+
+  void OnDataChannelOpen(
+      rtc::scoped_refptr<webrtc::DataChannelInterface> channel);
+
+ private:
+  std::unique_ptr<DataChannelHandler> input_;
+  std::unique_ptr<DataChannelHandler> control_;
+  std::unique_ptr<DataChannelHandler> adb_;
+  std::unique_ptr<DataChannelHandler> bluetooth_;
+  std::unique_ptr<DataChannelHandler> camera_;
+  std::unique_ptr<DataChannelHandler> location_;
+  std::unique_ptr<DataChannelHandler> kml_location_;
+  std::unique_ptr<DataChannelHandler> gpx_location_;
+  std::vector<std::unique_ptr<DataChannelHandler>> unknown_channels_;
+
+  std::shared_ptr<ConnectionObserver> observer_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/keyboard.cpp b/host/frontend/webrtc/libdevice/keyboard.cpp
new file mode 100644
index 0000000..89de834
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/keyboard.cpp
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/keyboard.h"
+
+#include <linux/input.h>
+
+#include <map>
+
+static const std::map<std::string, uint16_t> kDomToLinuxMapping = {
+    {"Backquote", KEY_GRAVE},
+    {"Backslash", KEY_BACKSLASH},
+    {"Backspace", KEY_BACKSPACE},
+    {"BracketLeft", KEY_LEFTBRACE},
+    {"BracketRight", KEY_RIGHTBRACE},
+    {"Comma", KEY_COMMA},
+    {"Digit0", KEY_0},
+    {"Digit1", KEY_1},
+    {"Digit2", KEY_2},
+    {"Digit3", KEY_3},
+    {"Digit4", KEY_4},
+    {"Digit5", KEY_5},
+    {"Digit6", KEY_6},
+    {"Digit7", KEY_7},
+    {"Digit8", KEY_8},
+    {"Digit9", KEY_9},
+    {"Equal", KEY_EQUAL},
+    {"IntlBackslash", KEY_BACKSLASH},
+    {"IntlRo", KEY_RO},
+    {"IntlYen", KEY_BACKSLASH},
+    {"KeyA", KEY_A},
+    {"KeyB", KEY_B},
+    {"KeyC", KEY_C},
+    {"KeyD", KEY_D},
+    {"KeyE", KEY_E},
+    {"KeyF", KEY_F},
+    {"KeyG", KEY_G},
+    {"KeyH", KEY_H},
+    {"KeyI", KEY_I},
+    {"KeyJ", KEY_J},
+    {"KeyK", KEY_K},
+    {"KeyL", KEY_L},
+    {"KeyM", KEY_M},
+    {"KeyN", KEY_N},
+    {"KeyO", KEY_O},
+    {"KeyP", KEY_P},
+    {"KeyQ", KEY_Q},
+    {"KeyR", KEY_R},
+    {"KeyS", KEY_S},
+    {"KeyT", KEY_T},
+    {"KeyU", KEY_U},
+    {"KeyV", KEY_V},
+    {"KeyW", KEY_W},
+    {"KeyX", KEY_X},
+    {"KeyY", KEY_Y},
+    {"KeyZ", KEY_Z},
+    {"Minus", KEY_MINUS},
+    {"Period", KEY_DOT},
+    {"Quote", KEY_APOSTROPHE},
+    {"Semicolon", KEY_SEMICOLON},
+    {"Slash", KEY_SLASH},
+    {"AltLeft", KEY_LEFTALT},
+    {"AltRight", KEY_RIGHTALT},
+    {"CapsLock", KEY_CAPSLOCK},
+    {"ContextMenu", KEY_CONTEXT_MENU},
+    {"ControlLeft", KEY_LEFTCTRL},
+    {"ControlRight", KEY_RIGHTCTRL},
+    {"Enter", KEY_ENTER},
+    {"MetaLeft", KEY_LEFTMETA},
+    {"MetaRight", KEY_RIGHTMETA},
+    {"ShiftLeft", KEY_LEFTSHIFT},
+    {"ShiftRight", KEY_RIGHTSHIFT},
+    {"Space", KEY_SPACE},
+    {"Tab", KEY_TAB},
+    {"Delete", KEY_DELETE},
+    {"End", KEY_END},
+    {"Help", KEY_HELP},
+    {"Home", KEY_HOME},
+    {"Insert", KEY_INSERT},
+    {"PageDown", KEY_PAGEDOWN},
+    {"PageUp", KEY_PAGEUP},
+    {"ArrowDown", KEY_DOWN},
+    {"ArrowLeft", KEY_LEFT},
+    {"ArrowRight", KEY_RIGHT},
+    {"ArrowUp", KEY_UP},
+
+    {"NumLock", KEY_NUMLOCK},
+    {"Numpad0", KEY_KP0},
+    {"Numpad1", KEY_KP1},
+    {"Numpad2", KEY_KP2},
+    {"Numpad3", KEY_KP3},
+    {"Numpad4", KEY_KP4},
+    {"Numpad5", KEY_KP5},
+    {"Numpad6", KEY_KP6},
+    {"Numpad7", KEY_KP7},
+    {"Numpad8", KEY_KP8},
+    {"Numpad9", KEY_KP9},
+    {"NumpadAdd", KEY_KPPLUS},
+    {"NumpadBackspace", KEY_BACKSPACE},
+    {"NumpadClear", KEY_CLEAR},
+    {"NumpadComma", KEY_KPCOMMA},
+    {"NumpadDecimal", KEY_KPDOT},
+    {"NumpadDivide", KEY_KPSLASH},
+    {"NumpadEnter", KEY_KPENTER},
+    {"NumpadEqual", KEY_KPEQUAL},
+    /*
+    {"NumpadClearEntry", },
+    {"NumpadHash", },
+    {"NumpadMemoryAdd", },
+    {"NumpadMemoryClear", },
+    {"NumpadMemoryRecall", },
+    {"NumpadMemoryStore", },
+    {"NumpadMemorySubtract", },
+    */
+    {"NumpadMultiply", KEY_KPASTERISK},
+    {"NumpadParenLeft", KEY_KPLEFTPAREN},
+    {"NumpadParenRight", KEY_KPRIGHTPAREN},
+    {"NumpadStar", KEY_KPASTERISK},
+    {"NumpadSubtract", KEY_KPMINUS},
+
+    {"Escape", KEY_ESC},
+    {"F1", KEY_F1},
+    {"F2", KEY_F2},
+    {"F3", KEY_F3},
+    {"F4", KEY_F4},
+    {"F5", KEY_F5},
+    {"F6", KEY_F6},
+    {"F7", KEY_F7},
+    {"F8", KEY_F8},
+    {"F9", KEY_F9},
+    {"F10", KEY_F10},
+    {"F11", KEY_F11},
+    {"F12", KEY_F12},
+    {"Fn", KEY_FN},
+    /*{"FnLock", },*/
+    {"PrintScreen", KEY_SYSRQ},
+    {"ScrollLock", KEY_SCROLLLOCK},
+    {"Pause", KEY_PAUSE}};
+
+uint16_t DomKeyCodeToLinux(const std::string& dom_KEY_code) {
+  const auto it = kDomToLinuxMapping.find(dom_KEY_code);
+  if (it == kDomToLinuxMapping.end()) {
+    return 0;
+  }
+  return it->second;
+}
diff --git a/host/frontend/webrtc/lib/keyboard.h b/host/frontend/webrtc/libdevice/keyboard.h
similarity index 100%
rename from host/frontend/webrtc/lib/keyboard.h
rename to host/frontend/webrtc/libdevice/keyboard.h
diff --git a/host/frontend/webrtc/libdevice/local_recorder.cpp b/host/frontend/webrtc/libdevice/local_recorder.cpp
new file mode 100644
index 0000000..5d2ed6c
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/local_recorder.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/local_recorder.h"
+
+#include <atomic>
+#include <chrono>
+#include <list>
+#include <mutex>
+#include <thread>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <api/media_stream_interface.h>
+#include <api/rtp_parameters.h>
+#include <api/task_queue/default_task_queue_factory.h>
+#include <api/video/builtin_video_bitrate_allocator_factory.h>
+#include <api/video_codecs/builtin_video_encoder_factory.h>
+#include <api/video_codecs/video_encoder.h>
+#include <mkvmuxer/mkvmuxer.h>
+#include <mkvmuxer/mkvwriter.h>
+#include <system_wrappers/include/clock.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+constexpr double kRtpTicksPerSecond = 90000.;
+constexpr double kRtpTicksPerMs = kRtpTicksPerSecond / 1000.;
+constexpr double kRtpTicksPerUs = kRtpTicksPerMs / 1000.;
+constexpr double kRtpTicksPerNs = kRtpTicksPerUs / 1000.;
+
+class LocalRecorder::Display
+    : public webrtc::EncodedImageCallback
+    , public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+public:
+  Display(LocalRecorder::Impl& impl);
+
+  void EncoderLoop();
+  void Stop();
+
+  // VideoSinkInterface
+  virtual void OnFrame(const webrtc::VideoFrame& frame) override;
+
+  // EncodedImageCallback
+  virtual webrtc::EncodedImageCallback::Result OnEncodedImage(
+      const webrtc::EncodedImage& encoded_image,
+      const webrtc::CodecSpecificInfo* codec_specific_info) override;
+
+  LocalRecorder::Impl& impl_;
+  std::shared_ptr<webrtc::VideoTrackSourceInterface> source_;
+  std::unique_ptr<webrtc::VideoEncoder> video_encoder_;
+  uint64_t video_track_number_;
+
+  // TODO(schuffelen): Use a WebRTC task queue?
+  std::thread encoder_thread_;
+  std::condition_variable encoder_queue_signal_;
+  std::mutex encode_queue_mutex_;
+  std::list<webrtc::VideoFrame> encode_queue_;
+  std::atomic_bool encoder_running_ = true;
+};
+
+class LocalRecorder::Impl {
+public:
+  mkvmuxer::MkvWriter file_writer_;
+  mkvmuxer::Segment segment_;
+  std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory_;
+  std::mutex mkv_mutex_;
+  std::vector<std::unique_ptr<Display>> displays_;
+};
+
+/* static */
+std::unique_ptr<LocalRecorder> LocalRecorder::Create(
+    const std::string& filename) {
+  std::unique_ptr<Impl> impl(new Impl());
+
+  if (!impl->file_writer_.Open(filename.c_str())) {
+    LOG(ERROR) << "Failed to open \"" << filename << "\" to write a webm";
+    return {};
+  }
+
+  if (!impl->segment_.Init(&impl->file_writer_)) {
+    LOG(ERROR) << "Failed to initialize the mkvkmuxer segment";
+    return {};
+  }
+
+  impl->segment_.AccurateClusterDuration(true);
+  impl->segment_.set_estimate_file_duration(true);
+
+  impl->encoder_factory_ = webrtc::CreateBuiltinVideoEncoderFactory();
+  if (!impl->encoder_factory_) {
+    LOG(ERROR) << "Failed to create webRTC built-in video encoder factory";
+    return {};
+  }
+
+  return std::unique_ptr<LocalRecorder>(new LocalRecorder(std::move(impl)));
+}
+
+LocalRecorder::LocalRecorder(std::unique_ptr<LocalRecorder::Impl> impl)
+    : impl_(std::move(impl)) {
+}
+
+LocalRecorder::~LocalRecorder() = default;
+
+void LocalRecorder::AddDisplay(
+    size_t width,
+    size_t height,
+    std::shared_ptr<webrtc::VideoTrackSourceInterface> source) {
+  std::unique_ptr<Display> display(new Display(*impl_));
+  display->source_ = source;
+
+  std::lock_guard lock(impl_->mkv_mutex_);
+  display->video_track_number_ =
+      impl_->segment_.AddVideoTrack(width, height, 0);
+  if (display->video_track_number_ == 0) {
+    LOG(ERROR) << "Failed to add video track to webm muxer";
+    return;
+  }
+
+  display->video_encoder_ =
+      impl_->encoder_factory_->CreateVideoEncoder(webrtc::SdpVideoFormat("VP8"));
+  if (!display->video_encoder_) {
+    LOG(ERROR) << "Could not create vp8 video encoder";
+    return;
+  }
+  auto rc =
+      display->video_encoder_->RegisterEncodeCompleteCallback(display.get());
+  if (rc != 0) {
+    LOG(ERROR) << "Could not register encode complete callback";
+    return;
+  }
+  source->AddOrUpdateSink(display.get(), rtc::VideoSinkWants{});
+
+  webrtc::VideoCodec codec {};
+  memset(&codec, 0, sizeof(codec));
+  codec.codecType = webrtc::kVideoCodecVP8;
+  codec.width = width;
+  codec.height = height;
+  codec.startBitrate = 1000; // kilobits/sec
+  codec.maxBitrate = 2000;
+  codec.minBitrate = 0;
+  codec.maxFramerate = 60;
+  codec.active = true;
+  codec.qpMax = 56; // kDefaultMaxQp from simulcast_encoder_adapter.cc
+  codec.mode = webrtc::VideoCodecMode::kScreensharing;
+  codec.expect_encode_from_texture = false;
+  *codec.VP8() = webrtc::VideoEncoder::GetDefaultVp8Settings();
+
+  webrtc::VideoEncoder::Capabilities capabilities(false);
+  webrtc::VideoEncoder::Settings settings(capabilities, 1, 1 << 20);
+
+  rc = display->video_encoder_->InitEncode(&codec, settings);
+  if (rc != 0) {
+    LOG(ERROR) << "Failed to InitEncode";
+    return;
+  }
+
+  display->encoder_running_ = true;
+  display->encoder_thread_ = std::thread([](Display* display) {
+    display->EncoderLoop();
+  }, display.get());
+
+  impl_->displays_.emplace_back(std::move(display));
+}
+
+void LocalRecorder::Stop() {
+  for (auto& display : impl_->displays_) {
+    display->Stop();
+  }
+  impl_->displays_.clear();
+
+  std::lock_guard lock(impl_->mkv_mutex_);
+  impl_->segment_.Finalize();
+}
+
+LocalRecorder::Display::Display(LocalRecorder::Impl& impl) : impl_(impl) {
+}
+
+void LocalRecorder::Display::OnFrame(const webrtc::VideoFrame& frame) {
+  std::lock_guard queue_lock(encode_queue_mutex_);
+  static int kMaxQueuedFrames = 10;
+  if (encode_queue_.size() >= kMaxQueuedFrames) {
+    LOG(VERBOSE) << "Dropped frame, encoder queue too long";
+    return;
+  }
+  encode_queue_.push_back(frame);
+  encoder_queue_signal_.notify_one();
+}
+
+void LocalRecorder::Display::EncoderLoop() {
+  int frames_since_keyframe = 0;
+  std::chrono::time_point<std::chrono::steady_clock> start_timestamp;
+  auto last_keyframe_time = std::chrono::steady_clock::now();
+  while (encoder_running_) {
+    std::unique_ptr<webrtc::VideoFrame> frame;
+    {
+      std::unique_lock queue_lock(encode_queue_mutex_);
+      while (encode_queue_.size() == 0 && encoder_running_) {
+        encoder_queue_signal_.wait(queue_lock);
+      }
+      if (!encoder_running_) {
+        break;
+      }
+      frame = std::make_unique<webrtc::VideoFrame>(
+          std::move(encode_queue_.front()));
+      encode_queue_.pop_front();
+    }
+
+    auto now = std::chrono::steady_clock::now();
+    if (start_timestamp.time_since_epoch().count() == 0) {
+      start_timestamp = now;
+    }
+    auto timestamp_diff =
+        std::chrono::duration_cast<std::chrono::microseconds>(
+              now - start_timestamp);
+    frame->set_timestamp_us(timestamp_diff.count());
+    frame->set_timestamp(timestamp_diff.count() * kRtpTicksPerUs);
+
+    std::vector<webrtc::VideoFrameType> types;
+    auto time_since_keyframe = now - last_keyframe_time;
+    const auto min_keyframe_time = std::chrono::seconds(10);
+    if (frames_since_keyframe > 60 || time_since_keyframe > min_keyframe_time) {
+      last_keyframe_time = now;
+      frames_since_keyframe = 0;
+      types.push_back(webrtc::VideoFrameType::kVideoFrameKey);
+    } else {
+      types.push_back(webrtc::VideoFrameType::kVideoFrameDelta);
+    }
+    auto rc = video_encoder_->Encode(*frame, &types);
+    if (rc != 0) {
+      LOG(ERROR) << "Failed to encode frame";
+    }
+  }
+}
+
+void LocalRecorder::Display::Stop() {
+  encoder_running_ = false;
+  encoder_queue_signal_.notify_all();
+  if (encoder_thread_.joinable()) {
+    encoder_thread_.join();
+  }
+}
+
+webrtc::EncodedImageCallback::Result LocalRecorder::Display::OnEncodedImage(
+    const webrtc::EncodedImage& encoded_image,
+    const webrtc::CodecSpecificInfo* codec_specific_info) {
+  uint64_t timestamp = encoded_image.Timestamp() / kRtpTicksPerNs;
+
+  std::lock_guard(impl_.mkv_mutex_);
+
+  bool is_key =
+      encoded_image._frameType == webrtc::VideoFrameType::kVideoFrameKey;
+  bool success = impl_.segment_.AddFrame(
+      encoded_image.data(),
+      encoded_image.size(),
+      video_track_number_,
+      timestamp,
+      is_key);
+
+  webrtc::EncodedImageCallback::Result result(
+      success
+          ? webrtc::EncodedImageCallback::Result::Error::OK
+          : webrtc::EncodedImageCallback::Result::Error::ERROR_SEND_FAILED);
+
+  if (success) {
+    result.frame_id = encoded_image.Timestamp();
+  }
+  return result;
+}
+
+} // namespace webrtc_streaming
+} // namespace cuttlefish
+
diff --git a/host/frontend/webrtc/lib/local_recorder.h b/host/frontend/webrtc/libdevice/local_recorder.h
similarity index 100%
rename from host/frontend/webrtc/lib/local_recorder.h
rename to host/frontend/webrtc/libdevice/local_recorder.h
diff --git a/host/frontend/webrtc/libdevice/server_connection.cpp b/host/frontend/webrtc/libdevice/server_connection.cpp
new file mode 100644
index 0000000..5da692e
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/server_connection.cpp
@@ -0,0 +1,609 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+//
+
+#include "host/frontend/webrtc/libdevice/server_connection.h"
+
+#include <android-base/logging.h>
+#include <libwebsockets.h>
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/fs/shared_select.h"
+#include "common/libs/utils/files.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+// ServerConnection over Unix socket
+class UnixServerConnection : public ServerConnection {
+ public:
+  UnixServerConnection(const std::string& addr,
+                       std::weak_ptr<ServerConnectionObserver> observer);
+  ~UnixServerConnection() override;
+
+  bool Send(const Json::Value& msg) override;
+
+ private:
+  void Connect() override;
+  void StopThread();
+  void ReadLoop();
+
+  const std::string addr_;
+  SharedFD conn_;
+  std::mutex write_mtx_;
+  std::weak_ptr<ServerConnectionObserver> observer_;
+  // The event fd must be declared before the thread to ensure it's initialized
+  // before the thread starts and is safe to be accessed from it.
+  SharedFD thread_notifier_;
+  std::atomic_bool running_ = false;
+  std::thread thread_;
+};
+
+// ServerConnection using websockets
+class WsConnectionContext;
+
+class WsConnection : public std::enable_shared_from_this<WsConnection> {
+ public:
+  struct CreateConnectionSul {
+    lws_sorted_usec_list_t sul = {};
+    std::weak_ptr<WsConnection> weak_this;
+  };
+
+  WsConnection(int port, const std::string& addr, const std::string& path,
+               ServerConfig::Security secure,
+               std::weak_ptr<ServerConnectionObserver> observer,
+               std::shared_ptr<WsConnectionContext> context);
+
+  ~WsConnection();
+
+  void Connect();
+  bool Send(const Json::Value& msg);
+
+  void ConnectInner();
+
+  void OnError(const std::string& error);
+  void OnReceive(const uint8_t* data, size_t len, bool is_binary);
+  void OnOpen();
+  void OnClose();
+  void OnWriteable();
+
+ private:
+  struct WsBuffer {
+    WsBuffer() = default;
+    WsBuffer(const uint8_t* data, size_t len, bool binary)
+        : buffer_(LWS_PRE + len), is_binary_(binary) {
+      memcpy(&buffer_[LWS_PRE], data, len);
+    }
+
+    uint8_t* data() { return &buffer_[LWS_PRE]; }
+    bool is_binary() const { return is_binary_; }
+    size_t size() const { return buffer_.size() - LWS_PRE; }
+
+   private:
+    std::vector<uint8_t> buffer_;
+    bool is_binary_;
+  };
+  bool Send(const uint8_t* data, size_t len, bool binary = false);
+
+  CreateConnectionSul extended_sul_;
+  struct lws* wsi_;
+  const int port_;
+  const std::string addr_;
+  const std::string path_;
+  const ServerConfig::Security security_;
+
+  std::weak_ptr<ServerConnectionObserver> observer_;
+
+  // each element contains the data to be sent and whether it's binary or not
+  std::deque<WsBuffer> write_queue_;
+  std::mutex write_queue_mutex_;
+  // The connection object should not outlive the context object. This reference
+  // guarantees it.
+  std::shared_ptr<WsConnectionContext> context_;
+};
+
+class WsConnectionContext
+    : public std::enable_shared_from_this<WsConnectionContext> {
+ public:
+  static std::shared_ptr<WsConnectionContext> Create();
+
+  WsConnectionContext(struct lws_context* lws_ctx);
+  ~WsConnectionContext();
+
+  std::unique_ptr<ServerConnection> CreateConnection(
+      int port, const std::string& addr, const std::string& path,
+      ServerConfig::Security secure,
+      std::weak_ptr<ServerConnectionObserver> observer);
+
+  void RememberConnection(void*, std::weak_ptr<WsConnection>);
+  void ForgetConnection(void*);
+  std::shared_ptr<WsConnection> GetConnection(void*);
+
+  struct lws_context* lws_context() {
+    return lws_context_;
+  }
+
+ private:
+  void Start();
+
+  std::map<void*, std::weak_ptr<WsConnection>> weak_by_ptr_;
+  std::mutex map_mutex_;
+  struct lws_context* lws_context_;
+  std::thread message_loop_;
+};
+
+std::unique_ptr<ServerConnection> ServerConnection::Connect(
+    const ServerConfig& conf,
+    std::weak_ptr<ServerConnectionObserver> observer) {
+  std::unique_ptr<ServerConnection> ret;
+  // If the provided address points to an existing UNIX socket in the file
+  // system connect to it, otherwise assume it's a network address and connect
+  // using websockets
+  if (FileIsSocket(conf.addr)) {
+    ret.reset(new UnixServerConnection(conf.addr, observer));
+  } else {
+    // This can be a local variable since the ws connection will keep a
+    // reference to it.
+    auto ws_context = WsConnectionContext::Create();
+    CHECK(ws_context) << "Failed to create websocket context";
+    ret = ws_context->CreateConnection(conf.port, conf.addr, conf.path,
+                                       conf.security, observer);
+  }
+  ret->Connect();
+  return ret;
+}
+
+void ServerConnection::Reconnect() { Connect(); }
+
+// UnixServerConnection implementation
+
+UnixServerConnection::UnixServerConnection(
+    const std::string& addr, std::weak_ptr<ServerConnectionObserver> observer)
+    : addr_(addr), observer_(observer) {}
+
+UnixServerConnection::~UnixServerConnection() {
+  StopThread();
+}
+
+bool UnixServerConnection::Send(const Json::Value& msg) {
+  Json::StreamWriterBuilder factory;
+  auto str = Json::writeString(factory, msg);
+  std::lock_guard<std::mutex> lock(write_mtx_);
+  auto res =
+      conn_->Send(reinterpret_cast<const uint8_t*>(str.c_str()), str.size(), 0);
+  if (res < 0) {
+    LOG(ERROR) << "Failed to send data to signaling server: "
+               << conn_->StrError();
+    // Don't call OnError() here, the receiving thread probably did it already
+    // or is about to do it.
+  }
+  // A SOCK_SEQPACKET unix socket will send the entire message or fail, but it
+  // won't send a partial message.
+  return res == str.size();
+}
+
+void UnixServerConnection::Connect() {
+  // The thread could be running if this is a Reconnect
+  StopThread();
+
+  conn_ = SharedFD::SocketLocalClient(addr_, false, SOCK_SEQPACKET);
+  if (!conn_->IsOpen()) {
+    LOG(ERROR) << "Failed to connect to unix socket: " << conn_->StrError();
+    if (auto o = observer_.lock(); o) {
+      o->OnError("Failed to connect to unix socket");
+    }
+    return;
+  }
+  thread_notifier_ = SharedFD::Event();
+  if (!thread_notifier_->IsOpen()) {
+    LOG(ERROR) << "Failed to create eventfd for background thread: "
+               << thread_notifier_->StrError();
+    if (auto o = observer_.lock(); o) {
+      o->OnError("Failed to create eventfd for background thread");
+    }
+    return;
+  }
+  if (auto o = observer_.lock(); o) {
+    o->OnOpen();
+  }
+  // Start the thread
+  running_ = true;
+  thread_ = std::thread([this](){ReadLoop();});
+}
+
+void UnixServerConnection::StopThread() {
+  running_ = false;
+  if (!thread_notifier_->IsOpen()) {
+    // The thread won't be running if this isn't open
+    return;
+  }
+  if (thread_notifier_->EventfdWrite(1) < 0) {
+    LOG(ERROR) << "Failed to notify background thread, this thread may block";
+  }
+  if (thread_.joinable()) {
+    thread_.join();
+  }
+}
+
+void UnixServerConnection::ReadLoop() {
+  if (!thread_notifier_->IsOpen()) {
+    LOG(ERROR) << "The UnixServerConnection's background thread is unable to "
+                  "receive notifications so it can't run";
+    return;
+  }
+  std::vector<uint8_t> buffer(4096, 0);
+  while (running_) {
+    SharedFDSet rset;
+    rset.Set(thread_notifier_);
+    rset.Set(conn_);
+    auto res = Select(&rset, nullptr, nullptr, nullptr);
+    if (res < 0) {
+      LOG(ERROR) << "Failed to select from background thread";
+      break;
+    }
+    if (rset.IsSet(thread_notifier_)) {
+      eventfd_t val;
+      auto res = thread_notifier_->EventfdRead(&val);
+      if (res < 0) {
+        LOG(ERROR) << "Error reading from event fd: "
+                   << thread_notifier_->StrError();
+        break;
+      }
+    }
+    if (rset.IsSet(conn_)) {
+      auto size = conn_->Recv(buffer.data(), 0, MSG_TRUNC | MSG_PEEK);
+      if (size > buffer.size()) {
+        // Enlarge enough to accommodate size bytes and be a multiple of 4096
+        auto new_size = (size + 4095) & ~4095;
+        buffer.resize(new_size);
+      }
+      auto res = conn_->Recv(buffer.data(), buffer.size(), MSG_TRUNC);
+      if (res < 0) {
+        LOG(ERROR) << "Failed to read from server: " << conn_->StrError();
+        if (auto observer = observer_.lock(); observer) {
+          observer->OnError(conn_->StrError());
+        }
+        return;
+      }
+      if (res == 0) {
+        auto observer = observer_.lock();
+        if (observer) {
+          observer->OnClose();
+        }
+        break;
+      }
+      auto observer = observer_.lock();
+      if (observer) {
+        observer->OnReceive(buffer.data(), res, false);
+      }
+    }
+  }
+}
+
+// WsConnection implementation
+
+int LwsCallback(struct lws* wsi, enum lws_callback_reasons reason, void* user,
+                void* in, size_t len);
+void CreateConnectionCallback(lws_sorted_usec_list_t* sul);
+
+namespace {
+
+constexpr char kProtocolName[] = "cf-webrtc-device";
+constexpr int kBufferSize = 65536;
+
+const uint32_t backoff_ms[] = {1000, 2000, 3000, 4000, 5000};
+
+const lws_retry_bo_t kRetry = {
+    .retry_ms_table = backoff_ms,
+    .retry_ms_table_count = LWS_ARRAY_SIZE(backoff_ms),
+    .conceal_count = LWS_ARRAY_SIZE(backoff_ms),
+
+    .secs_since_valid_ping = 3,    /* force PINGs after secs idle */
+    .secs_since_valid_hangup = 10, /* hangup after secs idle */
+
+    .jitter_percent = 20,
+};
+
+const struct lws_protocols kProtocols[2] = {
+    {kProtocolName, LwsCallback, 0, kBufferSize, 0, NULL, 0},
+    {NULL, NULL, 0, 0, 0, NULL, 0}};
+
+}  // namespace
+
+std::shared_ptr<WsConnectionContext> WsConnectionContext::Create() {
+  struct lws_context_creation_info context_info = {};
+  context_info.port = CONTEXT_PORT_NO_LISTEN;
+  context_info.options = LWS_SERVER_OPTION_DO_SSL_GLOBAL_INIT;
+  context_info.protocols = kProtocols;
+  struct lws_context* lws_ctx = lws_create_context(&context_info);
+  if (!lws_ctx) {
+    return nullptr;
+  }
+  return std::shared_ptr<WsConnectionContext>(new WsConnectionContext(lws_ctx));
+}
+
+WsConnectionContext::WsConnectionContext(struct lws_context* lws_ctx)
+    : lws_context_(lws_ctx) {
+  Start();
+}
+
+WsConnectionContext::~WsConnectionContext() {
+  lws_context_destroy(lws_context_);
+  if (message_loop_.joinable()) {
+    message_loop_.join();
+  }
+}
+
+void WsConnectionContext::Start() {
+  message_loop_ = std::thread([this]() {
+    for (;;) {
+      if (lws_service(lws_context_, 0) < 0) {
+        break;
+      }
+    }
+  });
+}
+
+// This wrapper is needed because the ServerConnection objects are meant to be
+// referenced by std::unique_ptr but WsConnection needs to be referenced by
+// std::shared_ptr because it's also (weakly) referenced by the websocket
+// thread.
+class WsConnectionWrapper : public ServerConnection {
+ public:
+  WsConnectionWrapper(std::shared_ptr<WsConnection> conn) : conn_(conn) {}
+
+  bool Send(const Json::Value& msg) override { return conn_->Send(msg); }
+
+ private:
+  void Connect() override { return conn_->Connect(); }
+  std::shared_ptr<WsConnection> conn_;
+};
+
+std::unique_ptr<ServerConnection> WsConnectionContext::CreateConnection(
+    int port, const std::string& addr, const std::string& path,
+    ServerConfig::Security security,
+    std::weak_ptr<ServerConnectionObserver> observer) {
+  return std::unique_ptr<ServerConnection>(
+      new WsConnectionWrapper(std::make_shared<WsConnection>(
+          port, addr, path, security, observer, shared_from_this())));
+}
+
+std::shared_ptr<WsConnection> WsConnectionContext::GetConnection(void* raw) {
+  std::shared_ptr<WsConnection> connection;
+  {
+    std::lock_guard<std::mutex> lock(map_mutex_);
+    if (weak_by_ptr_.count(raw) == 0) {
+      return nullptr;
+    }
+    connection = weak_by_ptr_[raw].lock();
+    if (!connection) {
+      weak_by_ptr_.erase(raw);
+    }
+  }
+  return connection;
+}
+
+void WsConnectionContext::RememberConnection(void* raw,
+                                             std::weak_ptr<WsConnection> conn) {
+  std::lock_guard<std::mutex> lock(map_mutex_);
+  weak_by_ptr_.emplace(
+      std::pair<void*, std::weak_ptr<WsConnection>>(raw, conn));
+}
+
+void WsConnectionContext::ForgetConnection(void* raw) {
+  std::lock_guard<std::mutex> lock(map_mutex_);
+  weak_by_ptr_.erase(raw);
+}
+
+WsConnection::WsConnection(int port, const std::string& addr,
+                           const std::string& path,
+                           ServerConfig::Security security,
+                           std::weak_ptr<ServerConnectionObserver> observer,
+                           std::shared_ptr<WsConnectionContext> context)
+    : port_(port),
+      addr_(addr),
+      path_(path),
+      security_(security),
+      observer_(observer),
+      context_(context) {}
+
+WsConnection::~WsConnection() {
+  context_->ForgetConnection(this);
+  // This will cause the callback to be called which will drop the connection
+  // after seeing the context doesn't remember this object
+  lws_callback_on_writable(wsi_);
+}
+
+void WsConnection::Connect() {
+  memset(&extended_sul_.sul, 0, sizeof(extended_sul_.sul));
+  extended_sul_.weak_this = weak_from_this();
+  lws_sul_schedule(context_->lws_context(), 0, &extended_sul_.sul,
+                   CreateConnectionCallback, 1);
+}
+
+void WsConnection::OnError(const std::string& error) {
+  auto observer = observer_.lock();
+  if (observer) {
+    observer->OnError(error);
+  }
+}
+void WsConnection::OnReceive(const uint8_t* data, size_t len, bool is_binary) {
+  auto observer = observer_.lock();
+  if (observer) {
+    observer->OnReceive(data, len, is_binary);
+  }
+}
+void WsConnection::OnOpen() {
+  auto observer = observer_.lock();
+  if (observer) {
+    observer->OnOpen();
+  }
+}
+void WsConnection::OnClose() {
+  auto observer = observer_.lock();
+  if (observer) {
+    observer->OnClose();
+  }
+}
+
+void WsConnection::OnWriteable() {
+  WsBuffer buffer;
+  {
+    std::lock_guard<std::mutex> lock(write_queue_mutex_);
+    if (write_queue_.size() == 0) {
+      return;
+    }
+    buffer = std::move(write_queue_.front());
+    write_queue_.pop_front();
+  }
+  auto flags = lws_write_ws_flags(
+      buffer.is_binary() ? LWS_WRITE_BINARY : LWS_WRITE_TEXT, true, true);
+  auto res = lws_write(wsi_, buffer.data(), buffer.size(),
+                       (enum lws_write_protocol)flags);
+  if (res != buffer.size()) {
+    LOG(WARNING) << "Unable to send the entire message!";
+  }
+}
+
+bool WsConnection::Send(const Json::Value& msg) {
+  Json::StreamWriterBuilder factory;
+  auto str = Json::writeString(factory, msg);
+  return Send(reinterpret_cast<const uint8_t*>(str.c_str()), str.size());
+}
+
+bool WsConnection::Send(const uint8_t* data, size_t len, bool binary) {
+  if (!wsi_) {
+    LOG(WARNING) << "Send called on an uninitialized connection!!";
+    return false;
+  }
+  WsBuffer buffer(data, len, binary);
+  {
+    std::lock_guard<std::mutex> lock(write_queue_mutex_);
+    write_queue_.emplace_back(std::move(buffer));
+  }
+
+  lws_callback_on_writable(wsi_);
+  return true;
+}
+
+int LwsCallback(struct lws* wsi, enum lws_callback_reasons reason, void* user,
+                void* in, size_t len) {
+  constexpr int DROP = -1;
+  constexpr int OK = 0;
+
+  // For some values of `reason`, `user` doesn't point to the value provided
+  // when the connection was created. This function object should be used with
+  // care.
+  auto with_connection =
+      [wsi, user](std::function<void(std::shared_ptr<WsConnection>)> cb) {
+        auto context = reinterpret_cast<WsConnectionContext*>(user);
+        auto connection = context->GetConnection(wsi);
+        if (!connection) {
+          return DROP;
+        }
+        cb(connection);
+        return OK;
+      };
+
+  switch (reason) {
+    case LWS_CALLBACK_CLIENT_CONNECTION_ERROR:
+      return with_connection([in](std::shared_ptr<WsConnection> connection) {
+        connection->OnError(in ? (char*)in : "(null)");
+      });
+
+    case LWS_CALLBACK_CLIENT_RECEIVE:
+      return with_connection(
+          [in, len, wsi](std::shared_ptr<WsConnection> connection) {
+            connection->OnReceive((const uint8_t*)in, len,
+                                  lws_frame_is_binary(wsi));
+          });
+
+    case LWS_CALLBACK_CLIENT_ESTABLISHED:
+      return with_connection([](std::shared_ptr<WsConnection> connection) {
+        connection->OnOpen();
+      });
+
+    case LWS_CALLBACK_CLIENT_CLOSED:
+      return with_connection([](std::shared_ptr<WsConnection> connection) {
+        connection->OnClose();
+      });
+
+    case LWS_CALLBACK_CLIENT_WRITEABLE:
+      return with_connection([](std::shared_ptr<WsConnection> connection) {
+        connection->OnWriteable();
+      });
+
+    default:
+      LOG(VERBOSE) << "Unhandled value: " << reason;
+      return lws_callback_http_dummy(wsi, reason, user, in, len);
+  }
+}
+
+void CreateConnectionCallback(lws_sorted_usec_list_t* sul) {
+  std::shared_ptr<WsConnection> connection =
+      reinterpret_cast<WsConnection::CreateConnectionSul*>(sul)
+          ->weak_this.lock();
+  if (!connection) {
+    LOG(WARNING) << "The object was already destroyed by the time of the first "
+                 << "connection attempt. That's unusual.";
+    return;
+  }
+  connection->ConnectInner();
+}
+
+void WsConnection::ConnectInner() {
+  struct lws_client_connect_info connect_info;
+
+  memset(&connect_info, 0, sizeof(connect_info));
+
+  connect_info.context = context_->lws_context();
+  connect_info.port = port_;
+  connect_info.address = addr_.c_str();
+  connect_info.path = path_.c_str();
+  connect_info.host = connect_info.address;
+  connect_info.origin = connect_info.address;
+  switch (security_) {
+    case ServerConfig::Security::kAllowSelfSigned:
+      connect_info.ssl_connection = LCCSCF_ALLOW_SELFSIGNED |
+                                    LCCSCF_SKIP_SERVER_CERT_HOSTNAME_CHECK |
+                                    LCCSCF_USE_SSL;
+      break;
+    case ServerConfig::Security::kStrict:
+      connect_info.ssl_connection = LCCSCF_USE_SSL;
+      break;
+    case ServerConfig::Security::kInsecure:
+      connect_info.ssl_connection = 0;
+      break;
+  }
+  connect_info.protocol = "webrtc-operator";
+  connect_info.local_protocol_name = kProtocolName;
+  connect_info.pwsi = &wsi_;
+  connect_info.retry_and_idle_policy = &kRetry;
+  // There is no guarantee the connection object still exists when the callback
+  // is called. Put the context instead as the user data which is guaranteed to
+  // still exist and holds a weak ptr to the connection.
+  connect_info.userdata = context_.get();
+
+  if (lws_client_connect_via_info(&connect_info)) {
+    // wsi_ is not initialized until after the call to
+    // lws_client_connect_via_info(). Luckily, this is guaranteed to run before
+    // the protocol callback is called because it runs in the same loop.
+    context_->RememberConnection(wsi_, weak_from_this());
+  } else {
+    LOG(ERROR) << "Connection failed!";
+  }
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/server_connection.h b/host/frontend/webrtc/libdevice/server_connection.h
new file mode 100644
index 0000000..5b00c80
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/server_connection.h
@@ -0,0 +1,90 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+//
+
+#pragma once
+
+#include <string.h>
+
+#include <deque>
+#include <functional>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <string>
+#include <thread>
+#include <vector>
+
+#include <json/json.h>
+#include <libwebsockets.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+struct ServerConfig {
+  enum class Security {
+    kInsecure,
+    kAllowSelfSigned,
+    kStrict,
+  };
+
+  // The ip address or domain name of the operator server.
+  std::string addr;
+  int port;
+  // The path component of the operator server's register url.
+  std::string path;
+  // The security level to use when connecting to the operator server.
+  Security security;
+};
+
+class ServerConnectionObserver {
+ public:
+  virtual ~ServerConnectionObserver() = default;
+  // Called when the connection to the server has been established. This is the
+  // cue to start using Send().
+  virtual void OnOpen() = 0;
+  virtual void OnClose() = 0;
+  // Called when the connection to the server has failed with an unrecoverable
+  // error.
+  virtual void OnError(const std::string& error) = 0;
+  virtual void OnReceive(const uint8_t* msg, size_t length, bool is_binary) = 0;
+};
+
+// Represents a connection to the signaling server. When a connection is created
+// it connects with the server automatically but sends no info.
+// Only Send() can be called from multiple threads simultaneously. Reconnect(),
+// Send() and the destructor will run into race conditions if called
+// concurrently.
+class ServerConnection {
+ public:
+  static std::unique_ptr<ServerConnection> Connect(
+      const ServerConfig& conf,
+      std::weak_ptr<ServerConnectionObserver> observer);
+
+  // Destroying the connection will disconnect from the signaling server and
+  // release any open fds.
+  virtual ~ServerConnection() = default;
+
+  // Sends data to the server encoded as JSON.
+  virtual bool Send(const Json::Value&) = 0;
+
+  // makes re-connect request
+  virtual void Reconnect();
+
+ private:
+  virtual void Connect() = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/streamer.cpp b/host/frontend/webrtc/libdevice/streamer.cpp
new file mode 100644
index 0000000..44fde4e
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/streamer.cpp
@@ -0,0 +1,694 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/streamer.h"
+
+#include <android-base/logging.h>
+#include <json/json.h>
+
+#include <api/audio_codecs/audio_decoder_factory.h>
+#include <api/audio_codecs/audio_encoder_factory.h>
+#include <api/audio_codecs/builtin_audio_decoder_factory.h>
+#include <api/audio_codecs/builtin_audio_encoder_factory.h>
+#include <api/create_peerconnection_factory.h>
+#include <api/peer_connection_interface.h>
+#include <api/video_codecs/builtin_video_decoder_factory.h>
+#include <api/video_codecs/builtin_video_encoder_factory.h>
+#include <api/video_codecs/video_decoder_factory.h>
+#include <api/video_codecs/video_encoder_factory.h>
+#include <media/base/video_broadcaster.h>
+#include <pc/video_track_source.h>
+
+#include "host/frontend/webrtc/libcommon/audio_device.h"
+#include "host/frontend/webrtc/libcommon/peer_connection_utils.h"
+#include "host/frontend/webrtc/libcommon/port_range_socket_factory.h"
+#include "host/frontend/webrtc/libcommon/utils.h"
+#include "host/frontend/webrtc/libcommon/vp8only_encoder_factory.h"
+#include "host/frontend/webrtc/libdevice/audio_track_source_impl.h"
+#include "host/frontend/webrtc/libdevice/camera_streamer.h"
+#include "host/frontend/webrtc/libdevice/client_handler.h"
+#include "host/frontend/webrtc/libdevice/video_track_source_impl.h"
+#include "host/frontend/webrtc_operator/constants/signaling_constants.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+namespace {
+
+constexpr auto kStreamIdField = "stream_id";
+constexpr auto kXResField = "x_res";
+constexpr auto kYResField = "y_res";
+constexpr auto kDpiField = "dpi";
+constexpr auto kIsTouchField = "is_touch";
+constexpr auto kDisplaysField = "displays";
+constexpr auto kAudioStreamsField = "audio_streams";
+constexpr auto kHardwareField = "hardware";
+constexpr auto kControlPanelButtonCommand = "command";
+constexpr auto kControlPanelButtonTitle = "title";
+constexpr auto kControlPanelButtonIconName = "icon_name";
+constexpr auto kControlPanelButtonShellCommand = "shell_command";
+constexpr auto kControlPanelButtonDeviceStates = "device_states";
+constexpr auto kControlPanelButtonLidSwitchOpen = "lid_switch_open";
+constexpr auto kControlPanelButtonHingeAngleValue = "hinge_angle_value";
+constexpr auto kCustomControlPanelButtonsField = "custom_control_panel_buttons";
+
+constexpr int kRegistrationRetries = 3;
+constexpr int kRetryFirstIntervalMs = 1000;
+constexpr int kReconnectRetries = 100;
+constexpr int kReconnectIntervalMs = 1000;
+
+bool ParseMessage(const uint8_t* data, size_t length, Json::Value* msg_out) {
+  auto str = reinterpret_cast<const char*>(data);
+  Json::CharReaderBuilder builder;
+  std::unique_ptr<Json::CharReader> json_reader(builder.newCharReader());
+  std::string errorMessage;
+  return json_reader->parse(str, str + length, msg_out, &errorMessage);
+}
+
+struct DisplayDescriptor {
+  int width;
+  int height;
+  int dpi;
+  bool touch_enabled;
+  rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source;
+};
+
+struct ControlPanelButtonDescriptor {
+  std::string command;
+  std::string title;
+  std::string icon_name;
+  std::optional<std::string> shell_command;
+  std::vector<DeviceState> device_states;
+};
+
+// TODO (jemoreira): move to a place in common with the signaling server
+struct OperatorServerConfig {
+  std::vector<webrtc::PeerConnectionInterface::IceServer> servers;
+};
+
+// Wraps a scoped_refptr pointer to an audio device module
+class AudioDeviceModuleWrapper : public AudioSource {
+ public:
+  AudioDeviceModuleWrapper(
+      rtc::scoped_refptr<CfAudioDeviceModule> device_module)
+      : device_module_(device_module) {}
+  int GetMoreAudioData(void* data, int bytes_per_sample,
+                       int samples_per_channel, int num_channels,
+                       int sample_rate, bool& muted) override {
+    return device_module_->GetMoreAudioData(data, bytes_per_sample,
+                                            samples_per_channel, num_channels,
+                                            sample_rate, muted);
+  }
+
+  rtc::scoped_refptr<CfAudioDeviceModule> device_module() {
+    return device_module_;
+  }
+
+ private:
+  rtc::scoped_refptr<CfAudioDeviceModule> device_module_;
+};
+
+}  // namespace
+
+
+class Streamer::Impl : public ServerConnectionObserver,
+                       public PeerConnectionBuilder,
+                       public std::enable_shared_from_this<ServerConnectionObserver> {
+ public:
+  std::shared_ptr<ClientHandler> CreateClientHandler(int client_id);
+
+  void Register(std::weak_ptr<OperatorObserver> observer);
+
+  void SendMessageToClient(int client_id, const Json::Value& msg);
+  void DestroyClientHandler(int client_id);
+  void SetupCameraForClient(int client_id);
+
+  // WsObserver
+  void OnOpen() override;
+  void OnClose() override;
+  void OnError(const std::string& error) override;
+  void OnReceive(const uint8_t* msg, size_t length, bool is_binary) override;
+
+  void HandleConfigMessage(const Json::Value& msg);
+  void HandleClientMessage(const Json::Value& server_message);
+
+  // PeerConnectionBuilder
+  Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>> Build(
+      webrtc::PeerConnectionObserver& observer,
+      const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+          per_connection_servers) override;
+
+  // All accesses to these variables happen from the signal_thread, so there is
+  // no need for extra synchronization mechanisms (mutex)
+  StreamerConfig config_;
+  OperatorServerConfig operator_config_;
+  std::unique_ptr<ServerConnection> server_connection_;
+  std::shared_ptr<ConnectionObserverFactory> connection_observer_factory_;
+  rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+      peer_connection_factory_;
+  std::unique_ptr<rtc::Thread> network_thread_;
+  std::unique_ptr<rtc::Thread> worker_thread_;
+  std::unique_ptr<rtc::Thread> signal_thread_;
+  std::map<std::string, DisplayDescriptor> displays_;
+  std::map<std::string, rtc::scoped_refptr<AudioTrackSourceImpl>>
+      audio_sources_;
+  std::map<int, std::shared_ptr<ClientHandler>> clients_;
+  std::weak_ptr<OperatorObserver> operator_observer_;
+  std::map<std::string, std::string> hardware_;
+  std::vector<ControlPanelButtonDescriptor> custom_control_panel_buttons_;
+  std::shared_ptr<AudioDeviceModuleWrapper> audio_device_module_;
+  std::unique_ptr<CameraStreamer> camera_streamer_;
+  int registration_retries_left_ = kRegistrationRetries;
+  int retry_interval_ms_ = kRetryFirstIntervalMs;
+  LocalRecorder* recorder_ = nullptr;
+};
+
+Streamer::Streamer(std::unique_ptr<Streamer::Impl> impl)
+    : impl_(std::move(impl)) {}
+
+/* static */
+std::unique_ptr<Streamer> Streamer::Create(
+    const StreamerConfig& cfg, LocalRecorder* recorder,
+    std::shared_ptr<ConnectionObserverFactory> connection_observer_factory) {
+  rtc::LogMessage::LogToDebug(rtc::LS_ERROR);
+
+  std::unique_ptr<Streamer::Impl> impl(new Streamer::Impl());
+  impl->config_ = cfg;
+  impl->recorder_ = recorder;
+  impl->connection_observer_factory_ = connection_observer_factory;
+
+  auto network_thread_result = CreateAndStartThread("network-thread");
+  if (!network_thread_result.ok()) {
+    LOG(ERROR) << network_thread_result.error().Trace();
+    return nullptr;
+  }
+  impl->network_thread_ = std::move(*network_thread_result);
+
+  auto worker_thread_result = CreateAndStartThread("worker-thread");
+  if (!worker_thread_result.ok()) {
+    LOG(ERROR) << worker_thread_result.error().Trace();
+    return nullptr;
+  }
+  impl->worker_thread_ = std::move(*worker_thread_result);
+
+  auto signal_thread_result = CreateAndStartThread("signal-thread");
+  if (!signal_thread_result.ok()) {
+    LOG(ERROR) << signal_thread_result.error().Trace();
+    return nullptr;
+  }
+  impl->signal_thread_ = std::move(*signal_thread_result);
+
+  impl->audio_device_module_ = std::make_shared<AudioDeviceModuleWrapper>(
+      rtc::scoped_refptr<CfAudioDeviceModule>(
+          new rtc::RefCountedObject<CfAudioDeviceModule>()));
+
+  auto result = CreatePeerConnectionFactory(
+      impl->network_thread_.get(), impl->worker_thread_.get(),
+      impl->signal_thread_.get(), impl->audio_device_module_->device_module());
+
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Trace();
+    return nullptr;
+  }
+  impl->peer_connection_factory_ = *result;
+
+  return std::unique_ptr<Streamer>(new Streamer(std::move(impl)));
+}
+
+std::shared_ptr<VideoSink> Streamer::AddDisplay(const std::string& label,
+                                                int width, int height, int dpi,
+                                                bool touch_enabled) {
+  // Usually called from an application thread
+  return impl_->signal_thread_->BlockingCall(
+      [this, &label, width, height, dpi,
+       touch_enabled]() -> std::shared_ptr<VideoSink> {
+        if (impl_->displays_.count(label)) {
+          LOG(ERROR) << "Display with same label already exists: " << label;
+          return nullptr;
+        }
+        rtc::scoped_refptr<VideoTrackSourceImpl> source(
+            new rtc::RefCountedObject<VideoTrackSourceImpl>(width, height));
+        impl_->displays_[label] = {width, height, dpi, touch_enabled, source};
+
+        auto video_track = impl_->peer_connection_factory_->CreateVideoTrack(
+            label, source.get());
+
+        for (auto& [_, client] : impl_->clients_) {
+          client->AddDisplay(video_track, label);
+        }
+
+        if (impl_->recorder_) {
+          rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source2 =
+              source;
+          auto deleter = [](webrtc::VideoTrackSourceInterface* source) {
+            source->Release();
+          };
+          std::shared_ptr<webrtc::VideoTrackSourceInterface> source_shared(
+              source2.release(), deleter);
+          impl_->recorder_->AddDisplay(width, height, source_shared);
+        }
+
+        return std::shared_ptr<VideoSink>(
+            new VideoTrackSourceImplSinkWrapper(source));
+      });
+}
+
+bool Streamer::RemoveDisplay(const std::string& label) {
+  // Usually called from an application thread
+  return impl_->signal_thread_->BlockingCall(
+      [this, &label]() -> bool {
+        for (auto& [_, client] : impl_->clients_) {
+          client->RemoveDisplay(label);
+        }
+
+        impl_->displays_.erase(label);
+        return true;
+      });
+}
+
+std::shared_ptr<AudioSink> Streamer::AddAudioStream(const std::string& label) {
+  // Usually called from an application thread
+  return impl_->signal_thread_->BlockingCall(
+      [this, &label]() -> std::shared_ptr<AudioSink> {
+        if (impl_->audio_sources_.count(label)) {
+          LOG(ERROR) << "Audio stream with same label already exists: "
+                     << label;
+          return nullptr;
+        }
+        rtc::scoped_refptr<AudioTrackSourceImpl> source(
+            new rtc::RefCountedObject<AudioTrackSourceImpl>());
+        impl_->audio_sources_[label] = source;
+        return std::shared_ptr<AudioSink>(
+            new AudioTrackSourceImplSinkWrapper(source));
+      });
+}
+
+std::shared_ptr<AudioSource> Streamer::GetAudioSource() {
+  return impl_->audio_device_module_;
+}
+
+CameraController* Streamer::AddCamera(unsigned int port, unsigned int cid) {
+  impl_->camera_streamer_ = std::make_unique<CameraStreamer>(port, cid);
+  return impl_->camera_streamer_.get();
+}
+
+void Streamer::SetHardwareSpec(std::string key, std::string value) {
+  impl_->hardware_.emplace(key, value);
+}
+
+void Streamer::AddCustomControlPanelButton(const std::string& command,
+                                           const std::string& title,
+                                           const std::string& icon_name) {
+  ControlPanelButtonDescriptor button = {
+      .command = command, .title = title, .icon_name = icon_name};
+  impl_->custom_control_panel_buttons_.push_back(button);
+}
+
+void Streamer::AddCustomControlPanelButtonWithShellCommand(
+    const std::string& command, const std::string& title,
+    const std::string& icon_name, const std::string& shell_command) {
+  ControlPanelButtonDescriptor button = {
+      .command = command, .title = title, .icon_name = icon_name};
+  button.shell_command = shell_command;
+  impl_->custom_control_panel_buttons_.push_back(button);
+}
+
+void Streamer::AddCustomControlPanelButtonWithDeviceStates(
+    const std::string& command, const std::string& title,
+    const std::string& icon_name,
+    const std::vector<DeviceState>& device_states) {
+  ControlPanelButtonDescriptor button = {
+      .command = command, .title = title, .icon_name = icon_name};
+  button.device_states = device_states;
+  impl_->custom_control_panel_buttons_.push_back(button);
+}
+
+void Streamer::Register(std::weak_ptr<OperatorObserver> observer) {
+  // Usually called from an application thread
+  // No need to block the calling thread on this, the observer will be notified
+  // when the connection is established.
+  impl_->signal_thread_->PostTask([this, observer]() {
+    impl_->Register(observer);
+  });
+}
+
+void Streamer::Unregister() {
+  // Usually called from an application thread.
+  impl_->signal_thread_->PostTask(
+      [this]() { impl_->server_connection_.reset(); });
+}
+
+void Streamer::Impl::Register(std::weak_ptr<OperatorObserver> observer) {
+  operator_observer_ = observer;
+  // When the connection is established the OnOpen function will be called where
+  // the registration will take place
+  if (!server_connection_) {
+    server_connection_ =
+        ServerConnection::Connect(config_.operator_server, weak_from_this());
+  } else {
+    // in case connection attempt is retried, just call Reconnect().
+    // Recreating server_connection_ object will destroy existing WSConnection
+    // object and task re-scheduling will fail
+    server_connection_->Reconnect();
+  }
+}
+
+void Streamer::Impl::OnOpen() {
+  // Called from the websocket thread.
+  // Connected to operator.
+  signal_thread_->PostTask([this]() {
+    Json::Value register_obj;
+    register_obj[cuttlefish::webrtc_signaling::kTypeField] =
+        cuttlefish::webrtc_signaling::kRegisterType;
+    register_obj[cuttlefish::webrtc_signaling::kDeviceIdField] =
+        config_.device_id;
+    CHECK(config_.client_files_port >= 0) << "Invalid device port provided";
+    register_obj[cuttlefish::webrtc_signaling::kDevicePortField] =
+        config_.client_files_port;
+
+    Json::Value device_info;
+    Json::Value displays(Json::ValueType::arrayValue);
+    // No need to synchronize with other accesses to display_ because all
+    // happens on signal_thread.
+    for (auto& entry : displays_) {
+      Json::Value display;
+      display[kStreamIdField] = entry.first;
+      display[kXResField] = entry.second.width;
+      display[kYResField] = entry.second.height;
+      display[kDpiField] = entry.second.dpi;
+      display[kIsTouchField] = true;
+      displays.append(display);
+    }
+    device_info[kDisplaysField] = displays;
+    Json::Value audio_streams(Json::ValueType::arrayValue);
+    for (auto& entry : audio_sources_) {
+      Json::Value audio;
+      audio[kStreamIdField] = entry.first;
+      audio_streams.append(audio);
+    }
+    device_info[kAudioStreamsField] = audio_streams;
+    Json::Value hardware;
+    for (const auto& [k, v] : hardware_) {
+      hardware[k] = v;
+    }
+    device_info[kHardwareField] = hardware;
+    Json::Value custom_control_panel_buttons(Json::arrayValue);
+    for (const auto& button : custom_control_panel_buttons_) {
+      Json::Value button_entry;
+      button_entry[kControlPanelButtonCommand] = button.command;
+      button_entry[kControlPanelButtonTitle] = button.title;
+      button_entry[kControlPanelButtonIconName] = button.icon_name;
+      if (button.shell_command) {
+        button_entry[kControlPanelButtonShellCommand] = *(button.shell_command);
+      } else if (!button.device_states.empty()) {
+        Json::Value device_states(Json::arrayValue);
+        for (const DeviceState& device_state : button.device_states) {
+          Json::Value device_state_entry;
+          if (device_state.lid_switch_open) {
+            device_state_entry[kControlPanelButtonLidSwitchOpen] =
+                *device_state.lid_switch_open;
+          }
+          if (device_state.hinge_angle_value) {
+            device_state_entry[kControlPanelButtonHingeAngleValue] =
+                *device_state.hinge_angle_value;
+          }
+          device_states.append(device_state_entry);
+        }
+        button_entry[kControlPanelButtonDeviceStates] = device_states;
+      }
+      custom_control_panel_buttons.append(button_entry);
+    }
+    device_info[kCustomControlPanelButtonsField] = custom_control_panel_buttons;
+    register_obj[cuttlefish::webrtc_signaling::kDeviceInfoField] = device_info;
+    server_connection_->Send(register_obj);
+    // Do this last as OnRegistered() is user code and may take some time to
+    // complete (although it shouldn't...)
+    auto observer = operator_observer_.lock();
+    if (observer) {
+      observer->OnRegistered();
+    }
+  });
+}
+
+void Streamer::Impl::OnClose() {
+  // Called from websocket thread
+  // The operator shouldn't close the connection with the client, it's up to the
+  // device to decide when to disconnect.
+  LOG(WARNING) << "Connection with server closed unexpectedly";
+  signal_thread_->PostTask([this]() {
+    auto observer = operator_observer_.lock();
+    if (observer) {
+      observer->OnClose();
+    }
+  });
+  LOG(INFO) << "Trying to re-connect to operator..";
+  registration_retries_left_ = kReconnectRetries;
+  retry_interval_ms_ = kReconnectIntervalMs;
+  signal_thread_->PostDelayedTask(
+      [this]() { Register(operator_observer_); },
+      webrtc::TimeDelta::Millis(retry_interval_ms_));
+}
+
+void Streamer::Impl::OnError(const std::string& error) {
+  // Called from websocket thread.
+  if (registration_retries_left_) {
+    LOG(WARNING) << "Connection to operator failed (" << error << "), "
+                 << registration_retries_left_ << " retries left"
+                 << " (will retry in " << retry_interval_ms_ / 1000 << "s)";
+    --registration_retries_left_;
+    signal_thread_->PostDelayedTask(
+        [this]() {
+          // Need to reconnect and register again with operator
+          Register(operator_observer_);
+        },
+        webrtc::TimeDelta::Millis(retry_interval_ms_));
+    retry_interval_ms_ *= 2;
+  } else {
+    LOG(ERROR) << "Error on connection with the operator: " << error;
+    signal_thread_->PostTask([this]() {
+      auto observer = operator_observer_.lock();
+      if (observer) {
+        observer->OnError();
+      }
+    });
+  }
+}
+
+void Streamer::Impl::HandleConfigMessage(const Json::Value& server_message) {
+  CHECK(signal_thread_->IsCurrent())
+      << __FUNCTION__ << " called from the wrong thread";
+  auto result = ParseIceServersMessage(server_message);
+  if (!result.ok()) {
+    LOG(WARNING) << "Failed to parse ice servers message from server: "
+                 << result.error().Trace();
+  }
+  operator_config_.servers = *result;
+}
+
+void Streamer::Impl::HandleClientMessage(const Json::Value& server_message) {
+  CHECK(signal_thread_->IsCurrent())
+      << __FUNCTION__ << " called from the wrong thread";
+  if (!server_message.isMember(cuttlefish::webrtc_signaling::kClientIdField) ||
+      !server_message[cuttlefish::webrtc_signaling::kClientIdField].isInt()) {
+    LOG(ERROR) << "Client message received without valid client id";
+    return;
+  }
+  auto client_id =
+      server_message[cuttlefish::webrtc_signaling::kClientIdField].asInt();
+  if (!server_message.isMember(cuttlefish::webrtc_signaling::kPayloadField)) {
+    LOG(WARNING) << "Received empty client message";
+    return;
+  }
+  auto client_message =
+      server_message[cuttlefish::webrtc_signaling::kPayloadField];
+  if (clients_.count(client_id) == 0) {
+    auto client_handler = CreateClientHandler(client_id);
+    if (!client_handler) {
+      LOG(ERROR) << "Failed to create a new client handler";
+      return;
+    }
+    clients_.emplace(client_id, client_handler);
+  }
+  auto client_handler = clients_[client_id];
+
+  client_handler->HandleMessage(client_message);
+}
+
+void Streamer::Impl::OnReceive(const uint8_t* msg, size_t length,
+                               bool is_binary) {
+  // Usually called from websocket thread.
+  Json::Value server_message;
+  // Once OnReceive returns the buffer can be destroyed/recycled at any time, so
+  // parse the data into a JSON object while still on the websocket thread.
+  if (is_binary || !ParseMessage(msg, length, &server_message)) {
+    LOG(ERROR) << "Received invalid JSON from server: '"
+               << (is_binary ? std::string("(binary_data)")
+                             : std::string(msg, msg + length))
+               << "'";
+    return;
+  }
+  // Transition to the signal thread before member variables are accessed.
+  signal_thread_->PostTask([this, server_message]() {
+    if (!server_message.isMember(cuttlefish::webrtc_signaling::kTypeField) ||
+        !server_message[cuttlefish::webrtc_signaling::kTypeField].isString()) {
+      LOG(ERROR) << "No message_type field from server";
+      // Notify the caller
+      OnError(
+          "Invalid message received from operator: no message type field "
+          "present");
+      return;
+    }
+    auto type =
+        server_message[cuttlefish::webrtc_signaling::kTypeField].asString();
+    if (type == cuttlefish::webrtc_signaling::kConfigType) {
+      HandleConfigMessage(server_message);
+    } else if (type == cuttlefish::webrtc_signaling::kClientDisconnectType) {
+      if (!server_message.isMember(
+              cuttlefish::webrtc_signaling::kClientIdField) ||
+          !server_message.isMember(
+              cuttlefish::webrtc_signaling::kClientIdField)) {
+        LOG(ERROR) << "Invalid disconnect message received from server";
+        // Notify the caller
+        OnError("Invalid disconnect message: client_id is required");
+        return;
+      }
+      auto client_id =
+          server_message[cuttlefish::webrtc_signaling::kClientIdField].asInt();
+      LOG(INFO) << "Client " << client_id << " has disconnected.";
+      DestroyClientHandler(client_id);
+    } else if (type == cuttlefish::webrtc_signaling::kClientMessageType) {
+      HandleClientMessage(server_message);
+    } else {
+      LOG(ERROR) << "Unknown message type: " << type;
+      // Notify the caller
+      OnError("Invalid message received from operator: unknown message type");
+      return;
+    }
+  });
+}
+
+std::shared_ptr<ClientHandler> Streamer::Impl::CreateClientHandler(
+    int client_id) {
+  CHECK(signal_thread_->IsCurrent())
+      << __FUNCTION__ << " called from the wrong thread";
+  auto observer = connection_observer_factory_->CreateObserver();
+
+  auto client_handler = ClientHandler::Create(
+      client_id, observer, *this,
+      [this, client_id](const Json::Value& msg) {
+        SendMessageToClient(client_id, msg);
+      },
+      [this, client_id](bool isOpen) {
+        if (isOpen) {
+          SetupCameraForClient(client_id);
+        } else {
+          DestroyClientHandler(client_id);
+        }
+      });
+
+  for (auto& entry : displays_) {
+    auto& label = entry.first;
+    auto& video_source = entry.second.source;
+
+    auto video_track =
+        peer_connection_factory_->CreateVideoTrack(label, video_source.get());
+    client_handler->AddDisplay(video_track, label);
+  }
+
+  for (auto& entry : audio_sources_) {
+    auto& label = entry.first;
+    auto& audio_stream = entry.second;
+    auto audio_track =
+        peer_connection_factory_->CreateAudioTrack(label, audio_stream.get());
+    client_handler->AddAudio(audio_track, label);
+  }
+
+  return client_handler;
+}
+
+Result<rtc::scoped_refptr<webrtc::PeerConnectionInterface>>
+Streamer::Impl::Build(
+    webrtc::PeerConnectionObserver& observer,
+    const std::vector<webrtc::PeerConnectionInterface::IceServer>&
+        per_connection_servers) {
+  webrtc::PeerConnectionDependencies dependencies(&observer);
+  auto servers = operator_config_.servers;
+  servers.insert(servers.end(), per_connection_servers.begin(),
+                 per_connection_servers.end());
+  if (config_.udp_port_range != config_.tcp_port_range) {
+    // libwebrtc removed the ability to provide a packet socket factory when
+    // creating a peer connection. They plan to provide that functionality with
+    // the peer connection factory, but that's currently incomplete (the packet
+    // socket factory is ignored by the peer connection factory). The only other
+    // choice to customize port ranges is through the port allocator config, but
+    // this is suboptimal as it only allows to specify a single port range that
+    // will be use for both tcp and udp ports.
+    LOG(WARNING) << "TCP and UDP port ranges differ, TCP connections may not "
+                    "work properly";
+  }
+  return CF_EXPECT(
+      CreatePeerConnection(peer_connection_factory_, std::move(dependencies),
+                           config_.udp_port_range.first,
+                           config_.udp_port_range.second, servers),
+      "Failed to build peer connection");
+}
+
+void Streamer::Impl::SendMessageToClient(int client_id,
+                                         const Json::Value& msg) {
+  LOG(VERBOSE) << "Sending to client: " << msg.toStyledString();
+  CHECK(signal_thread_->IsCurrent())
+      << __FUNCTION__ << " called from the wrong thread";
+  Json::Value wrapper;
+  wrapper[cuttlefish::webrtc_signaling::kPayloadField] = msg;
+  wrapper[cuttlefish::webrtc_signaling::kTypeField] =
+      cuttlefish::webrtc_signaling::kForwardType;
+  wrapper[cuttlefish::webrtc_signaling::kClientIdField] = client_id;
+  // This is safe to call from the webrtc threads because
+  // ServerConnection(s) are thread safe
+  server_connection_->Send(wrapper);
+}
+
+void Streamer::Impl::DestroyClientHandler(int client_id) {
+  // Usually called from signal thread, could be called from websocket thread or
+  // an application thread.
+  signal_thread_->PostTask([this, client_id]() {
+    // This needs to be 'posted' to the thread instead of 'invoked'
+    // immediately for two reasons:
+    // * The client handler is destroyed by this code, it's generally a
+    // bad idea (though not necessarily wrong) to return to a member
+    // function of a destroyed object.
+    // * The client handler may call this from within a peer connection
+    // observer callback, destroying the client handler there leads to a
+    // deadlock.
+    clients_.erase(client_id);
+  });
+}
+
+void Streamer::Impl::SetupCameraForClient(int client_id) {
+  if (!camera_streamer_) {
+    return;
+  }
+  auto client_handler = clients_[client_id];
+  if (client_handler) {
+    auto camera_track = client_handler->GetCameraStream();
+    if (camera_track) {
+      camera_track->AddOrUpdateSink(camera_streamer_.get(),
+                                    rtc::VideoSinkWants());
+    }
+  }
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/streamer.h b/host/frontend/webrtc/libdevice/streamer.h
new file mode 100644
index 0000000..e050b73
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/streamer.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <optional>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "host/libs/config/custom_actions.h"
+
+#include "host/frontend/webrtc/libcommon/audio_source.h"
+#include "host/frontend/webrtc/libdevice/audio_sink.h"
+#include "host/frontend/webrtc/libdevice/camera_controller.h"
+#include "host/frontend/webrtc/libdevice/connection_observer.h"
+#include "host/frontend/webrtc/libdevice/local_recorder.h"
+#include "host/frontend/webrtc/libdevice/video_sink.h"
+#include "host/frontend/webrtc/libdevice/server_connection.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class ClientHandler;
+
+struct StreamerConfig {
+  // The id with which to register with the operator server.
+  std::string device_id;
+  // The port on which the client files are being served
+  int client_files_port;
+  ServerConfig operator_server;
+  // The port ranges webrtc is allowed to use.
+  // [0,0] means all ports
+  std::pair<uint16_t, uint16_t> udp_port_range = {15550, 15599};
+  std::pair<uint16_t, uint16_t> tcp_port_range = {15550, 15599};
+};
+
+class OperatorObserver {
+ public:
+  virtual ~OperatorObserver() = default;
+  // Called when the websocket connection with the operator is established.
+  virtual void OnRegistered() = 0;
+  // Called when the websocket connection with the operator is closed.
+  virtual void OnClose() = 0;
+  // Called when an error is encountered in the connection to the operator.
+  virtual void OnError() = 0;
+};
+
+class Streamer {
+ public:
+  // The observer_factory will be used to create an observer for every new
+  // client connection. Unregister() needs to be called to stop accepting
+  // connections.
+  static std::unique_ptr<Streamer> Create(
+      const StreamerConfig& cfg, LocalRecorder* recorder,
+      std::shared_ptr<ConnectionObserverFactory> factory);
+  ~Streamer() = default;
+
+  std::shared_ptr<VideoSink> AddDisplay(const std::string& label, int width,
+                                        int height, int dpi,
+                                        bool touch_enabled);
+  bool RemoveDisplay(const std::string& label);
+
+  void SetHardwareSpec(std::string key, std::string value);
+
+  template <typename V>
+  void SetHardwareSpec(std::string key, V value) {
+    SetHardwareSpec(key, std::to_string(value));
+  }
+
+  std::shared_ptr<AudioSink> AddAudioStream(const std::string& label);
+  // Grants access to streams originating on the client side. If there are
+  // multiple streams (either because one client sends more than one or there
+  // are several clients) the audio will be mixed and provided as a single
+  // stream here.
+  std::shared_ptr<AudioSource> GetAudioSource();
+
+  CameraController* AddCamera(unsigned int port, unsigned int cid);
+
+  // Add a custom button to the control panel.
+  void AddCustomControlPanelButton(const std::string& command,
+                                   const std::string& title,
+                                   const std::string& icon_name);
+  void AddCustomControlPanelButtonWithShellCommand(
+      const std::string& command, const std::string& title,
+      const std::string& icon_name, const std::string& shell_command);
+  void AddCustomControlPanelButtonWithDeviceStates(
+      const std::string& command, const std::string& title,
+      const std::string& icon_name,
+      const std::vector<DeviceState>& device_states);
+
+  // Register with the operator.
+  void Register(std::weak_ptr<OperatorObserver> operator_observer);
+  void Unregister();
+
+ private:
+  /*
+   * Private Implementation idiom.
+   * https://en.cppreference.com/w/cpp/language/pimpl
+   */
+  class Impl;
+
+  Streamer(std::unique_ptr<Impl> impl);
+  std::shared_ptr<Impl> impl_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/lib/video_frame_buffer.h b/host/frontend/webrtc/libdevice/video_frame_buffer.h
similarity index 100%
rename from host/frontend/webrtc/lib/video_frame_buffer.h
rename to host/frontend/webrtc/libdevice/video_frame_buffer.h
diff --git a/host/frontend/webrtc/libdevice/video_sink.h b/host/frontend/webrtc/libdevice/video_sink.h
new file mode 100644
index 0000000..14eee41
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/video_sink.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+
+#include "host/frontend/webrtc/libdevice/video_frame_buffer.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class VideoSink {
+ public:
+  virtual ~VideoSink() = default;
+  virtual void OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
+                       int64_t timestamp_us) = 0;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/video_track_source_impl.cpp b/host/frontend/webrtc/libdevice/video_track_source_impl.cpp
new file mode 100644
index 0000000..8770f67
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/video_track_source_impl.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/libdevice/video_track_source_impl.h"
+
+#include <api/video/video_frame_buffer.h>
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+namespace {
+
+class VideoFrameWrapper : public webrtc::I420BufferInterface {
+ public:
+  VideoFrameWrapper(
+      std::shared_ptr<::cuttlefish::webrtc_streaming::VideoFrameBuffer>
+          frame_buffer)
+      : frame_buffer_(frame_buffer) {}
+  ~VideoFrameWrapper() override = default;
+  // From VideoFrameBuffer
+  int width() const override { return frame_buffer_->width(); }
+  int height() const override { return frame_buffer_->height(); }
+
+  // From class PlanarYuvBuffer
+  int StrideY() const override { return frame_buffer_->StrideY(); }
+  int StrideU() const override { return frame_buffer_->StrideU(); }
+  int StrideV() const override { return frame_buffer_->StrideV(); }
+
+  // From class PlanarYuv8Buffer
+  const uint8_t *DataY() const override { return frame_buffer_->DataY(); }
+  const uint8_t *DataU() const override { return frame_buffer_->DataU(); }
+  const uint8_t *DataV() const override { return frame_buffer_->DataV(); }
+
+ private:
+  std::shared_ptr<::cuttlefish::webrtc_streaming::VideoFrameBuffer>
+      frame_buffer_;
+};
+
+}  // namespace
+
+VideoTrackSourceImpl::VideoTrackSourceImpl(int width, int height)
+    : webrtc::VideoTrackSource(false), width_(width), height_(height) {}
+
+void VideoTrackSourceImpl::OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
+                                   int64_t timestamp_us) {
+  auto video_frame =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(rtc::scoped_refptr<webrtc::VideoFrameBuffer>(
+              new rtc::RefCountedObject<VideoFrameWrapper>(frame)))
+          .set_timestamp_us(timestamp_us)
+          .build();
+  broadcaster_.OnFrame(video_frame);
+}
+
+bool VideoTrackSourceImpl::GetStats(Stats *stats) {
+  stats->input_height = height_;
+  stats->input_width = width_;
+  return true;
+}
+
+bool VideoTrackSourceImpl::SupportsEncodedOutput() const { return false; }
+rtc::VideoSourceInterface<webrtc::VideoFrame> *VideoTrackSourceImpl::source() {
+  return &broadcaster_;
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/libdevice/video_track_source_impl.h b/host/frontend/webrtc/libdevice/video_track_source_impl.h
new file mode 100644
index 0000000..0210117
--- /dev/null
+++ b/host/frontend/webrtc/libdevice/video_track_source_impl.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/base/video_broadcaster.h>
+#include <pc/video_track_source.h>
+
+#include "host/frontend/webrtc/libdevice/video_sink.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+class VideoTrackSourceImpl : public webrtc::VideoTrackSource {
+ public:
+  VideoTrackSourceImpl(int width, int height);
+
+  void OnFrame(std::shared_ptr<VideoFrameBuffer> frame, int64_t timestamp_us);
+
+  // Returns false if no stats are available, e.g, for a remote source, or a
+  // source which has not seen its first frame yet.
+  //
+  // Implementation should avoid blocking.
+  bool GetStats(Stats* stats) override;
+
+  bool SupportsEncodedOutput() const override;
+  void GenerateKeyFrame() override {}
+  void AddEncodedSink(
+      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+  void RemoveEncodedSink(
+      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+
+  rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override;
+
+ private:
+  int width_;
+  int height_;
+  rtc::VideoBroadcaster broadcaster_;
+};
+
+// Wraps a VideoTrackSourceImpl as an implementation of the VideoSink interface.
+// This is needed as the VideoTrackSourceImpl is a reference counted object that
+// should only be referenced by rtc::scoped_refptr pointers, but the
+// VideoSink interface is not a reference counted object and therefore not
+// compatible with that kind of pointers. This class can be referenced by a
+// shared pointer and it in turn holds a scoped_refptr to the wrapped object.
+class VideoTrackSourceImplSinkWrapper : public VideoSink {
+ public:
+  virtual ~VideoTrackSourceImplSinkWrapper() = default;
+
+  VideoTrackSourceImplSinkWrapper(rtc::scoped_refptr<VideoTrackSourceImpl> obj)
+      : track_source_impl_(obj) {}
+
+  void OnFrame(std::shared_ptr<VideoFrameBuffer> frame,
+               int64_t timestamp_us) override {
+    track_source_impl_->OnFrame(frame, timestamp_us);
+  }
+
+ private:
+  rtc::scoped_refptr<VideoTrackSourceImpl> track_source_impl_;
+};
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/location_handler.cpp b/host/frontend/webrtc/location_handler.cpp
new file mode 100644
index 0000000..10557b1
--- /dev/null
+++ b/host/frontend/webrtc/location_handler.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/frontend/webrtc/location_handler.h"
+#include <android-base/logging.h>
+#include <unistd.h>
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/location/GnssClient.h"
+
+#include <sstream>
+#include <vector>
+using namespace std;
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+LocationHandler::LocationHandler(
+    std::function<void(const uint8_t *, size_t)> send_to_client) {}
+
+LocationHandler::~LocationHandler() {}
+
+void LocationHandler::HandleMessage(const float longitude,
+                                          const float latitude,
+                                          const float elevation) {
+  auto config = CuttlefishConfig::Get();
+  if (!config) {
+    LOG(ERROR) << "Failed to obtain config object";
+    return;
+  }
+  auto instance = config->ForDefaultInstance();
+  auto server_port = instance.gnss_grpc_proxy_server_port();
+  std::string socket_name =
+      std::string("localhost:") + std::to_string(server_port);
+  GnssClient gpsclient(
+      grpc::CreateChannel(socket_name, grpc::InsecureChannelCredentials()));
+
+  GpsFixArray coordinates;
+  GpsFix location;
+  location.longitude=longitude;
+  location.latitude=latitude;
+  location.elevation=elevation;
+  coordinates.push_back(location);
+
+  auto reply = gpsclient.SendGpsLocations(1000,coordinates);
+  LOG(INFO) << "Server port: " << server_port << " socket: " << socket_name
+            << std::endl;
+}
+
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/location_handler.h b/host/frontend/webrtc/location_handler.h
new file mode 100644
index 0000000..65ef350
--- /dev/null
+++ b/host/frontend/webrtc/location_handler.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/fs/shared_select.h"
+
+namespace cuttlefish {
+namespace webrtc_streaming {
+
+struct LocationHandler {
+  explicit LocationHandler(
+      std::function<void(const uint8_t *, size_t)> send_to_client);
+
+  ~LocationHandler();
+
+  void HandleMessage(const float longitude,
+                           const float latitude,
+                           const float elevation);
+};
+}  // namespace webrtc_streaming
+}  // namespace cuttlefish
diff --git a/host/frontend/webrtc/main.cpp b/host/frontend/webrtc/main.cpp
index b607dca..b9564a9 100644
--- a/host/frontend/webrtc/main.cpp
+++ b/host/frontend/webrtc/main.cpp
@@ -17,16 +17,13 @@
 #include <linux/input.h>
 
 #include <memory>
-#include <string>
-#include <utility>
-#include <vector>
 
 #include <android-base/logging.h>
 #include <android-base/strings.h>
+#include <fruit/fruit.h>
 #include <gflags/gflags.h>
 #include <libyuv.h>
 
-#include "common/libs/fs/shared_buf.h"
 #include "common/libs/fs/shared_fd.h"
 #include "common/libs/utils/files.h"
 #include "host/frontend/webrtc/audio_handler.h"
@@ -34,10 +31,10 @@
 #include "host/frontend/webrtc/connection_observer.h"
 #include "host/frontend/webrtc/display_handler.h"
 #include "host/frontend/webrtc/kernel_log_events_handler.h"
-#include "host/frontend/webrtc/lib/camera_controller.h"
-#include "host/frontend/webrtc/lib/local_recorder.h"
-#include "host/frontend/webrtc/lib/streamer.h"
-#include "host/frontend/webrtc/lib/video_sink.h"
+#include "host/frontend/webrtc/libdevice/camera_controller.h"
+#include "host/frontend/webrtc/libdevice/local_recorder.h"
+#include "host/frontend/webrtc/libdevice/streamer.h"
+#include "host/frontend/webrtc/libdevice/video_sink.h"
 #include "host/libs/audio_connector/server.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/logging.h"
@@ -53,6 +50,10 @@
 DEFINE_int32(kernel_log_events_fd, -1,
              "An fd to listen on for kernel log events.");
 DEFINE_int32(command_fd, -1, "An fd to listen to for control messages");
+DEFINE_int32(confui_in_fd, -1,
+             "Confirmation UI virtio-console from host to guest");
+DEFINE_int32(confui_out_fd, -1,
+             "Confirmation UI virtio-console from guest to host");
 DEFINE_string(action_servers, "",
               "A comma-separated list of server_name:fd pairs, "
               "where each entry corresponds to one custom action server.");
@@ -86,47 +87,6 @@
     LOG(ERROR) << "Error encountered in connection with Operator";
   }
 };
-
-static std::vector<std::pair<std::string, std::string>> ParseHttpHeaders(
-    const std::string& path) {
-  auto fd = cuttlefish::SharedFD::Open(path, O_RDONLY);
-  if (!fd->IsOpen()) {
-    LOG(WARNING) << "Unable to open operator (signaling server) headers file, "
-                    "connecting to the operator will probably fail: "
-                 << fd->StrError();
-    return {};
-  }
-  std::string raw_headers;
-  auto res = cuttlefish::ReadAll(fd, &raw_headers);
-  if (res < 0) {
-    LOG(WARNING) << "Unable to open operator (signaling server) headers file, "
-                    "connecting to the operator will probably fail: "
-                 << fd->StrError();
-    return {};
-  }
-  std::vector<std::pair<std::string, std::string>> headers;
-  std::size_t raw_index = 0;
-  while (raw_index < raw_headers.size()) {
-    auto colon_pos = raw_headers.find(':', raw_index);
-    if (colon_pos == std::string::npos) {
-      LOG(ERROR)
-          << "Expected to find ':' in each line of the operator headers file";
-      break;
-    }
-    auto eol_pos = raw_headers.find('\n', colon_pos);
-    if (eol_pos == std::string::npos) {
-      eol_pos = raw_headers.size();
-    }
-    // If the file uses \r\n as line delimiters exclude the \r too.
-    auto eov_pos = raw_headers[eol_pos - 1] == '\r'? eol_pos - 1: eol_pos;
-    headers.emplace_back(
-        raw_headers.substr(raw_index, colon_pos + 1 - raw_index),
-        raw_headers.substr(colon_pos + 1, eov_pos - colon_pos - 1));
-    raw_index = eol_pos + 1;
-  }
-  return headers;
-}
-
 std::unique_ptr<cuttlefish::AudioServer> CreateAudioServer() {
   cuttlefish::SharedFD audio_server_fd =
       cuttlefish::SharedFD::Dup(FLAGS_audio_server_fd);
@@ -140,6 +100,21 @@
       .install(cuttlefish::CustomActionsComponent);
 };
 
+fruit::Component<
+    cuttlefish::ScreenConnector<DisplayHandler::WebRtcScProcessedFrame>,
+    cuttlefish::confui::HostServer, cuttlefish::confui::HostVirtualInput>
+CreateConfirmationUIComponent(
+    int* frames_fd, cuttlefish::confui::PipeConnectionPair* pipe_io_pair) {
+  using cuttlefish::ScreenConnectorFrameRenderer;
+  using ScreenConnector = cuttlefish::DisplayHandler::ScreenConnector;
+  return fruit::createComponent()
+      .bindInstance<
+          fruit::Annotated<cuttlefish::WaylandScreenConnector::FramesFd, int>>(
+          *frames_fd)
+      .bindInstance(*pipe_io_pair)
+      .bind<ScreenConnectorFrameRenderer, ScreenConnector>();
+}
+
 int main(int argc, char** argv) {
   cuttlefish::DefaultSubprocessLogging(argv);
   ::gflags::ParseCommandLineFlags(&argc, &argv, true);
@@ -174,6 +149,7 @@
       cuttlefish::SharedFD::Accept(*input_sockets.switches_server);
 
   std::vector<std::thread> touch_accepters;
+  touch_accepters.reserve(input_sockets.touch_servers.size());
   for (const auto& touch : input_sockets.touch_servers) {
     auto label = touch.first;
     touch_accepters.emplace_back([label, &input_sockets]() {
@@ -202,25 +178,36 @@
 
   auto cvd_config = cuttlefish::CuttlefishConfig::Get();
   auto instance = cvd_config->ForDefaultInstance();
-  auto& host_mode_ctrl = cuttlefish::HostModeCtrl::Get();
-  auto screen_connector_ptr = cuttlefish::DisplayHandler::ScreenConnector::Get(
-      FLAGS_frame_server_fd, host_mode_ctrl);
-  auto& screen_connector = *(screen_connector_ptr.get());
+
+  cuttlefish::confui::PipeConnectionPair conf_ui_comm_fd_pair{
+      .from_guest_ = cuttlefish::SharedFD::Dup(FLAGS_confui_out_fd),
+      .to_guest_ = cuttlefish::SharedFD::Dup(FLAGS_confui_in_fd)};
+  close(FLAGS_confui_in_fd);
+  close(FLAGS_confui_out_fd);
+
+  int frames_fd = FLAGS_frame_server_fd;
+  fruit::Injector<
+      cuttlefish::ScreenConnector<DisplayHandler::WebRtcScProcessedFrame>,
+      cuttlefish::confui::HostServer, cuttlefish::confui::HostVirtualInput>
+      conf_ui_components_injector(CreateConfirmationUIComponent,
+                                  std::addressof(frames_fd),
+                                  &conf_ui_comm_fd_pair);
+  auto& screen_connector =
+      conf_ui_components_injector.get<DisplayHandler::ScreenConnector&>();
+
   auto client_server = cuttlefish::ClientFilesServer::New(FLAGS_client_dir);
   CHECK(client_server) << "Failed to initialize client files server";
-
-  // create confirmation UI service, giving host_mode_ctrl and
-  // screen_connector
-  // keep this singleton object alive until the webRTC process ends
-  static auto& host_confui_server =
-      cuttlefish::confui::HostServer::Get(host_mode_ctrl, screen_connector);
+  auto& host_confui_server =
+      conf_ui_components_injector.get<cuttlefish::confui::HostServer&>();
+  auto& confui_virtual_input =
+      conf_ui_components_injector.get<cuttlefish::confui::HostVirtualInput&>();
 
   StreamerConfig streamer_config;
 
   streamer_config.device_id = instance.webrtc_device_id();
   streamer_config.client_files_port = client_server->port();
-  streamer_config.tcp_port_range = cvd_config->webrtc_tcp_port_range();
-  streamer_config.udp_port_range = cvd_config->webrtc_udp_port_range();
+  streamer_config.tcp_port_range = instance.webrtc_tcp_port_range();
+  streamer_config.udp_port_range = instance.webrtc_udp_port_range();
   streamer_config.operator_server.addr = cvd_config->sig_server_address();
   streamer_config.operator_server.port = cvd_config->sig_server_port();
   streamer_config.operator_server.path = cvd_config->sig_server_path();
@@ -234,42 +221,14 @@
         ServerConfig::Security::kInsecure;
   }
 
-  if (!cvd_config->sig_server_headers_path().empty()) {
-    streamer_config.operator_server.http_headers =
-        ParseHttpHeaders(cvd_config->sig_server_headers_path());
-  }
-
   KernelLogEventsHandler kernel_logs_event_handler(kernel_log_events_client);
   auto observer_factory = std::make_shared<CfConnectionObserverFactory>(
-      input_sockets, &kernel_logs_event_handler, host_confui_server);
+      input_sockets, &kernel_logs_event_handler, confui_virtual_input);
 
-  auto streamer = Streamer::Create(streamer_config, observer_factory);
-  CHECK(streamer) << "Could not create streamer";
-
-  uint32_t display_index = 0;
-  std::vector<std::shared_ptr<VideoSink>> displays;
-  for (const auto& display_config : cvd_config->display_configs()) {
-    const std::string display_id = "display_" + std::to_string(display_index);
-
-    auto display =
-        streamer->AddDisplay(display_id, display_config.width,
-                             display_config.height, display_config.dpi, true);
-    displays.push_back(display);
-
-    ++display_index;
-  }
-
-  auto display_handler =
-      std::make_shared<DisplayHandler>(std::move(displays), screen_connector);
-
-  if (instance.camera_server_port()) {
-    auto camera_controller = streamer->AddCamera(instance.camera_server_port(),
-                                                 instance.vsock_guest_cid());
-    observer_factory->SetCameraHandler(camera_controller);
-  }
-
+  // The recorder is created first, so displays added in callbacks to the
+  // Streamer can also be added to the LocalRecorder.
   std::unique_ptr<cuttlefish::webrtc_streaming::LocalRecorder> local_recorder;
-  if (cvd_config->record_screen()) {
+  if (instance.record_screen()) {
     int recording_num = 0;
     std::string recording_path;
     do {
@@ -280,29 +239,45 @@
     } while (cuttlefish::FileExists(recording_path));
     local_recorder = LocalRecorder::Create(recording_path);
     CHECK(local_recorder) << "Could not create local recorder";
+  }
 
-    streamer->RecordDisplays(*local_recorder);
+  auto streamer =
+      Streamer::Create(streamer_config, local_recorder.get(), observer_factory);
+  CHECK(streamer) << "Could not create streamer";
+
+  auto display_handler =
+      std::make_shared<DisplayHandler>(*streamer, screen_connector);
+
+  if (instance.camera_server_port()) {
+    auto camera_controller = streamer->AddCamera(instance.camera_server_port(),
+                                                 instance.vsock_guest_cid());
+    observer_factory->SetCameraHandler(camera_controller);
   }
 
   observer_factory->SetDisplayHandler(display_handler);
 
-  streamer->SetHardwareSpec("CPUs", cvd_config->cpus());
-  streamer->SetHardwareSpec("RAM", std::to_string(cvd_config->memory_mb()) + " mb");
+  streamer->SetHardwareSpec("CPUs", instance.cpus());
+  streamer->SetHardwareSpec("RAM", std::to_string(instance.memory_mb()) + " mb");
 
   std::string user_friendly_gpu_mode;
-  if (cvd_config->gpu_mode() == cuttlefish::kGpuModeGuestSwiftshader) {
+  if (instance.gpu_mode() == cuttlefish::kGpuModeGuestSwiftshader) {
     user_friendly_gpu_mode = "SwiftShader (Guest CPU Rendering)";
-  } else if (cvd_config->gpu_mode() == cuttlefish::kGpuModeDrmVirgl) {
-    user_friendly_gpu_mode = "VirglRenderer (Accelerated Host GPU Rendering)";
-  } else if (cvd_config->gpu_mode() == cuttlefish::kGpuModeGfxStream) {
-    user_friendly_gpu_mode = "Gfxstream (Accelerated Host GPU Rendering)";
+  } else if (instance.gpu_mode() == cuttlefish::kGpuModeDrmVirgl) {
+    user_friendly_gpu_mode =
+        "VirglRenderer (Accelerated Rendering using Host OpenGL)";
+  } else if (instance.gpu_mode() == cuttlefish::kGpuModeGfxstream) {
+    user_friendly_gpu_mode =
+        "Gfxstream (Accelerated Rendering using Host OpenGL and Vulkan)";
+  } else if (instance.gpu_mode() == cuttlefish::kGpuModeGfxstreamGuestAngle) {
+    user_friendly_gpu_mode =
+        "Gfxstream (Accelerated Rendering using Host Vulkan)";
   } else {
-    user_friendly_gpu_mode = cvd_config->gpu_mode();
+    user_friendly_gpu_mode = instance.gpu_mode();
   }
   streamer->SetHardwareSpec("GPU Mode", user_friendly_gpu_mode);
 
   std::shared_ptr<AudioHandler> audio_handler;
-  if (cvd_config->enable_audio()) {
+  if (instance.enable_audio()) {
     auto audio_stream = streamer->AddAudioStream("audio");
     auto audio_server = CreateAudioServer();
     auto audio_source = streamer->GetAudioSource();
@@ -336,53 +311,50 @@
 
   const auto& actions_provider =
       injector.get<cuttlefish::CustomActionConfigProvider&>();
-  for (const auto& custom_action : actions_provider.CustomActions()) {
-    if (custom_action.shell_command) {
-      if (custom_action.buttons.size() != 1) {
-        LOG(FATAL) << "Expected exactly one button for custom action command: "
-                   << *(custom_action.shell_command);
-      }
-      const auto button = custom_action.buttons[0];
-      streamer->AddCustomControlPanelButtonWithShellCommand(
-          button.command, button.title, button.icon_name,
-          *(custom_action.shell_command));
-    } else if (custom_action.server) {
-      if (action_server_fds.find(*(custom_action.server)) !=
-          action_server_fds.end()) {
-        LOG(INFO) << "Connecting to custom action server "
-                  << *(custom_action.server);
 
-        int fd = action_server_fds[*(custom_action.server)];
-        cuttlefish::SharedFD custom_action_server = cuttlefish::SharedFD::Dup(fd);
-        close(fd);
+  for (const auto& custom_action :
+       actions_provider.CustomShellActions(instance.id())) {
+    const auto button = custom_action.button;
+    streamer->AddCustomControlPanelButtonWithShellCommand(
+        button.command, button.title, button.icon_name,
+        custom_action.shell_command);
+  }
 
-        if (custom_action_server->IsOpen()) {
-          std::vector<std::string> commands_for_this_server;
-          for (const auto& button : custom_action.buttons) {
-            streamer->AddCustomControlPanelButton(button.command, button.title,
-                                                  button.icon_name);
-            commands_for_this_server.push_back(button.command);
-          }
-          observer_factory->AddCustomActionServer(custom_action_server,
-                                                  commands_for_this_server);
-        } else {
-          LOG(ERROR) << "Error connecting to custom action server: "
-                     << *(custom_action.server);
-        }
-      } else {
-        LOG(ERROR) << "Custom action server not provided as command line flag: "
-                   << *(custom_action.server);
-      }
-    } else if (!custom_action.device_states.empty()) {
-      if (custom_action.buttons.size() != 1) {
-        LOG(FATAL)
-            << "Expected exactly one button for custom action device states.";
-      }
-      const auto button = custom_action.buttons[0];
-      streamer->AddCustomControlPanelButtonWithDeviceStates(
-          button.command, button.title, button.icon_name,
-          custom_action.device_states);
+  for (const auto& custom_action :
+       actions_provider.CustomActionServers(instance.id())) {
+    if (action_server_fds.find(custom_action.server) ==
+        action_server_fds.end()) {
+      LOG(ERROR) << "Custom action server not provided as command line flag: "
+                 << custom_action.server;
+      continue;
     }
+    LOG(INFO) << "Connecting to custom action server " << custom_action.server;
+
+    int fd = action_server_fds[custom_action.server];
+    cuttlefish::SharedFD custom_action_server = cuttlefish::SharedFD::Dup(fd);
+    close(fd);
+
+    if (custom_action_server->IsOpen()) {
+      std::vector<std::string> commands_for_this_server;
+      for (const auto& button : custom_action.buttons) {
+        streamer->AddCustomControlPanelButton(button.command, button.title,
+                                              button.icon_name);
+        commands_for_this_server.push_back(button.command);
+      }
+      observer_factory->AddCustomActionServer(custom_action_server,
+                                              commands_for_this_server);
+    } else {
+      LOG(ERROR) << "Error connecting to custom action server: "
+                 << custom_action.server;
+    }
+  }
+
+  for (const auto& custom_action :
+       actions_provider.CustomDeviceStateActions(instance.id())) {
+    const auto button = custom_action.button;
+    streamer->AddCustomControlPanelButtonWithDeviceStates(
+        button.command, button.title, button.icon_name,
+        custom_action.device_states);
   }
 
   std::shared_ptr<cuttlefish::webrtc_streaming::OperatorObserver> operator_observer(
diff --git a/host/frontend/webrtc_operator/assets/js/index.js b/host/frontend/webrtc_operator/assets/js/index.js
index c04f308..49e2b1e 100644
--- a/host/frontend/webrtc_operator/assets/js/index.js
+++ b/host/frontend/webrtc_operator/assets/js/index.js
@@ -90,7 +90,7 @@
     return new Promise((resolve, reject) => {
       let client = window.open(`client.html?deviceId=${deviceId}`, deviceId);
       client.addEventListener('load', evt => {
-        console.log('loaded');
+        console.debug('loaded');
         resolve();
       });
     });
diff --git a/host/frontend/webrtc_operator/assets/js/server_connector.js b/host/frontend/webrtc_operator/assets/js/server_connector.js
index ff19a0a..9044a16 100644
--- a/host/frontend/webrtc_operator/assets/js/server_connector.js
+++ b/host/frontend/webrtc_operator/assets/js/server_connector.js
@@ -60,6 +60,13 @@
     }
   }
 
+  // Registers a callback to receive messages from the device. A race may occur
+  // if this is called after requestDevice() is called in which some device
+  // messages are lost.
+  onDeviceMsg(cb) {
+    throw 'Not implemented!';
+  }
+
   // Selects a particular device in the signaling server and opens the signaling
   // channel with it (but doesn't send any message to the device). Returns a
   // promise to an object with the following properties:
@@ -86,6 +93,55 @@
   }
 }
 
+// Returns real implementation for ParentController.
+export function createParentController() {
+  return null;
+}
+
+// ParentController object provides methods for sending information from device
+// UI to operator UI. This class is just an interface and real implementation is
+// at the operator side. This class shouldn't be instantiated directly.
+class ParentController {
+  constructor() {
+    if (this.constructor === ParentController) {
+      throw new Error('ParentController is an abstract class');
+    }
+  }
+
+  // Create and return a message object that contains display information of
+  // device. Created object can be sent to operator UI using send() method.
+  // rotation argument is device's physycan rotation so it will be commonly
+  // applied to all displays.
+  createDeviceDisplaysMessage(rotation) {
+    throw 'Not implemented';
+  }
+}
+
+// This class represents displays information for a device. This message is
+// intended to be sent to operator UI to determine panel size of device UI.
+// This is an abstract class and should not be instantiated directly. This
+// message is created using createDeviceDisplaysMessage method of
+// ParentController. Real implementation of this class is at operator side.
+export class DeviceDisplaysMessage {
+  constructor(parentController, rotation) {
+    if (this.constructor === DeviceDisplaysMessage) {
+      throw new Error('DeviceDisplaysMessage is an abstract class');
+    }
+  }
+
+  // Add a display information to deviceDisplays message.
+  addDisplay(display_id, width, height) {
+    throw 'Not implemented'
+  }
+
+  // Send DeviceDisplaysMessage created using createDeviceDisplaysMessage to
+  // operator UI. If operator UI does not exist (in the case device web page
+  // is opened directly), the message will just be ignored.
+  send() {
+    throw 'Not implemented'
+  }
+}
+
 // End of Server Connector Interface.
 
 // The following code is internal and shouldn't be accessed outside this file.
diff --git a/host/frontend/webrtc_operator/assets/style.css b/host/frontend/webrtc_operator/assets/style.css
index a594e99..f53166b 100644
--- a/host/frontend/webrtc_operator/assets/style.css
+++ b/host/frontend/webrtc_operator/assets/style.css
@@ -88,7 +88,7 @@
 /* Control panel buttons and device screen(s). */
 
 #controls-and-displays {
-  height: calc(100% - 84px);
+  height: 100%;
 
   /* Items inside this use a row Flexbox.*/
   display: flex;
diff --git a/host/frontend/webrtc_operator/certs/create_certs.sh b/host/frontend/webrtc_operator/certs/create_certs.sh
index fefc275..cca2af3 100755
--- a/host/frontend/webrtc_operator/certs/create_certs.sh
+++ b/host/frontend/webrtc_operator/certs/create_certs.sh
@@ -1,5 +1,19 @@
 #!/bin/sh
 
+# Copyright 2019 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 # As explained in
 #  https://gist.github.com/darrenjs/4645f115d10aa4b5cebf57483ec82eca
 
diff --git a/host/libs/allocd/Android.bp b/host/libs/allocd/Android.bp
index c624817..3c52507 100644
--- a/host/libs/allocd/Android.bp
+++ b/host/libs/allocd/Android.bp
@@ -41,7 +41,6 @@
         "resource.cpp",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_fs",
         "libcuttlefish_utils",
@@ -61,7 +60,6 @@
         "test/client.cpp",
     ],
     shared_libs: [
-        "libext2_blkid",
         "libbase",
         "libcuttlefish_allocd_utils",
         "libcuttlefish_fs",
diff --git a/host/libs/allocd/alloc_utils.cpp b/host/libs/allocd/alloc_utils.cpp
index 8c91686..c788e2e 100644
--- a/host/libs/allocd/alloc_utils.cpp
+++ b/host/libs/allocd/alloc_utils.cpp
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "host/libs/allocd/alloc_utils.h"
 
 #include <cstdint>
diff --git a/host/libs/allocd/request.h b/host/libs/allocd/request.h
index bb088d5..184ecb5 100644
--- a/host/libs/allocd/request.h
+++ b/host/libs/allocd/request.h
@@ -44,7 +44,8 @@
 enum class IfaceType : uint16_t {
   Invalid = 0,  // an invalid interface
   mtap,         // mobile tap
-  wtap,         // wireless tap
+  wtap,         // bridged wireless tap
+  wifiap,       // non bridged wireless tap
   etap,         // ethernet tap
   wbr,          // wireless bridge
   ebr           // ethernet bridge
diff --git a/host/libs/allocd/resource_manager.cpp b/host/libs/allocd/resource_manager.cpp
index aaa8fe0..56e05cd 100644
--- a/host/libs/allocd/resource_manager.cpp
+++ b/host/libs/allocd/resource_manager.cpp
@@ -85,20 +85,20 @@
     const char* idp = iface.c_str() + (iface.size() - 3);
     int small_id = atoi(idp);
     switch (ty) {
+      case IfaceType::wifiap:
+        // TODO(seungjaeyoo) : Support AddInterface for wifiap
+        break;
       case IfaceType::mtap:
+        // TODO(seungjaeyoo) : Support AddInterface for mtap uses IP prefix
+        // different from kMobileIp.
         res = std::make_shared<MobileIface>(iface, uid, small_id, resource_id,
                                             kMobileIp);
         allocatedIface = res->AcquireResource();
         pending_add_.insert({resource_id, res});
         break;
       case IfaceType::wtap: {
-        // TODO (paulkirth): change this to cvd-wbr, to test w/ today's
-        // debian package, this is required since the number of wireless
-        // bridges provided by the debian package has gone from 10 down to
-        // 1, but our debian packages in cloudtop are not up to date
-        auto w = std::make_shared<EthernetIface>(iface, uid, small_id,
-                                                 resource_id, "cvd-wbr-01",
-                                                 kWirelessIp);
+        auto w = std::make_shared<EthernetIface>(
+            iface, uid, small_id, resource_id, "cvd-wbr", kWirelessIp);
         w->SetUseEbtablesLegacy(use_ebtables_legacy_);
         w->SetHasIpv4(use_ipv4_bridge_);
         w->SetHasIpv6(use_ipv6_bridge_);
@@ -108,9 +108,8 @@
         break;
       }
       case IfaceType::etap: {
-        auto w = std::make_shared<EthernetIface>(iface, uid, small_id,
-                                                 resource_id, "cvd-ebr",
-                                                 kEthernetIp);
+        auto w = std::make_shared<EthernetIface>(
+            iface, uid, small_id, resource_id, "cvd-ebr", kEthernetIp);
         w->SetUseEbtablesLegacy(use_ebtables_legacy_);
         w->SetHasIpv4(use_ipv4_bridge_);
         w->SetHasIpv6(use_ipv6_bridge_);
@@ -148,7 +147,12 @@
   bool removedIface = false;
   if (isManagedIface) {
     switch (ty) {
+      case IfaceType::wifiap:
+        // TODO(seungjaeyoo) : Support RemoveInterface for wifiap
+        break;
       case IfaceType::mtap: {
+        // TODO(seungjaeyoo) : Support RemoveInterface for mtap uses IP prefix
+        // different from kMobileIp.
         const char* idp = iface.c_str() + (iface.size() - 3);
         int id = atoi(idp);
         removedIface = DestroyMobileIface(iface, id, kMobileIp);
@@ -171,7 +175,7 @@
     LOG(WARNING) << "Interface not managed: " << iface;
   }
 
-  if (isManagedIface) {
+  if (removedIface) {
     LOG(INFO) << "Removed interface: " << iface;
   } else {
     LOG(WARNING) << "Could not remove interface: " << iface;
diff --git a/host/libs/allocd/test/client.cpp b/host/libs/allocd/test/client.cpp
index 235f521..4b6ce37 100644
--- a/host/libs/allocd/test/client.cpp
+++ b/host/libs/allocd/test/client.cpp
@@ -76,7 +76,8 @@
     req["iface_type"] = "mtap";
     request_list.append(req);
     req["iface_type"] = "wtap";
-
+    request_list.append(req);
+    req["iface_type"] = "wifiap";
     request_list.append(req);
     config["config_request"]["request_list"] = request_list;
 
diff --git a/host/libs/allocd/utils.cpp b/host/libs/allocd/utils.cpp
index 64eb127..deab5c0 100644
--- a/host/libs/allocd/utils.cpp
+++ b/host/libs/allocd/utils.cpp
@@ -55,19 +55,15 @@
     {"invalid", RequestType::Invalid}};
 
 const std::map<std::string, IfaceType> StrToIfaceTyMap = {
-    {"invalid", IfaceType::Invalid},
-    {"mtap", IfaceType::mtap},
-    {"wtap", IfaceType::wtap},
-    {"etap", IfaceType::etap},
-    {"wbr", IfaceType::wbr},
+    {"invalid", IfaceType::Invalid}, {"mtap", IfaceType::mtap},
+    {"wtap", IfaceType::wtap},       {"wifiap", IfaceType::wifiap},
+    {"etap", IfaceType::etap},       {"wbr", IfaceType::wbr},
     {"ebr", IfaceType::ebr}};
 
 const std::map<IfaceType, std::string> IfaceTyToStrMap = {
-    {IfaceType::Invalid, "invalid"},
-    {IfaceType::mtap, "mtap"},
-    {IfaceType::wtap, "wtap"},
-    {IfaceType::etap, "etap"},
-    {IfaceType::wbr, "wbr"},
+    {IfaceType::Invalid, "invalid"}, {IfaceType::mtap, "mtap"},
+    {IfaceType::wtap, "wtap"},       {IfaceType::wifiap, "wifiap"},
+    {IfaceType::etap, "etap"},       {IfaceType::wbr, "wbr"},
     {IfaceType::ebr, "ebr"}};
 
 const std::map<RequestStatus, std::string> ReqStatusToStrMap = {
@@ -171,6 +167,8 @@
       return "mtap";
     case IfaceType::wtap:
       return "wtap";
+    case IfaceType::wifiap:
+      return "wifiap";
     case IfaceType::etap:
       return "etap";
     case IfaceType::wbr:
diff --git a/host/libs/audio_connector/server.cpp b/host/libs/audio_connector/server.cpp
index 4f0570c..ab74daa 100644
--- a/host/libs/audio_connector/server.cpp
+++ b/host/libs/audio_connector/server.cpp
@@ -349,7 +349,7 @@
 
 const volatile uint8_t* AudioClientConnection::TxBufferAt(size_t offset,
                                                           size_t len) const {
-  CHECK(offset < tx_shm_.len() && tx_shm_.len() - offset > len)
+  CHECK(tx_shm_.WithinBounds(offset, len))
       << "Tx buffer bounds outside the buffer area: " << offset << " " << len;
   const void* ptr = tx_shm_.get();
   return &reinterpret_cast<const volatile uint8_t*>(ptr)[offset];
@@ -357,7 +357,7 @@
 
 volatile uint8_t* AudioClientConnection::RxBufferAt(size_t offset,
                                                     size_t len) {
-  CHECK(offset < rx_shm_.len() && rx_shm_.len() - offset > len)
+  CHECK(rx_shm_.WithinBounds(offset, len))
       << "Rx buffer bounds outside the buffer area: " << offset << " " << len;
   void* ptr = rx_shm_.get();
   return &reinterpret_cast<volatile uint8_t*>(ptr)[offset];
diff --git a/host/libs/command_util/Android.bp b/host/libs/command_util/Android.bp
new file mode 100644
index 0000000..3b6aeb4
--- /dev/null
+++ b/host/libs/command_util/Android.bp
@@ -0,0 +1,35 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_command_util",
+    srcs: [
+        "util.cc",
+    ],
+    shared_libs: [
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+    ],
+    static_libs: [
+        "libbase",
+        "libcuttlefish_host_config",
+    ],
+    defaults: ["cuttlefish_host"],
+}
diff --git a/host/libs/command_util/util.cc b/host/libs/command_util/util.cc
new file mode 100644
index 0000000..65253f4
--- /dev/null
+++ b/host/libs/command_util/util.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/command_util/util.h"
+
+#include "sys/time.h"
+#include "sys/types.h"
+
+#include <string>
+
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/fs/shared_select.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace {
+
+template <typename T>
+Result<T> ReadFromMonitor(const SharedFD& monitor_socket) {
+  T response;
+  ssize_t bytes_recv = ReadExactBinary(monitor_socket, &response);
+  CF_EXPECT(bytes_recv != 0, "Launcher socket closed unexpectedly");
+  CF_EXPECT(bytes_recv > 0, "Error receiving response from launcher monitor: "
+                                << monitor_socket->StrError());
+  CF_EXPECT(bytes_recv == sizeof(response),
+            "Launcher response not correct number of bytes");
+  return response;
+}
+
+}  // namespace
+
+Result<LauncherResponse> ReadLauncherResponse(const SharedFD& monitor_socket) {
+  return ReadFromMonitor<LauncherResponse>(monitor_socket);
+}
+
+Result<RunnerExitCodes> ReadExitCode(const SharedFD& monitor_socket) {
+  return ReadFromMonitor<RunnerExitCodes>(monitor_socket);
+}
+
+Result<SharedFD> GetLauncherMonitorFromInstance(
+    const CuttlefishConfig::InstanceSpecific& instance_config,
+    const int timeout_seconds) {
+  std::string monitor_path = instance_config.launcher_monitor_socket_path();
+  CF_EXPECT(!monitor_path.empty(), "No path to launcher monitor found");
+
+  SharedFD monitor_socket = SharedFD::SocketLocalClient(
+      monitor_path.c_str(), false, SOCK_STREAM, timeout_seconds);
+  CF_EXPECT(monitor_socket->IsOpen(),
+            "Unable to connect to launcher monitor at "
+                << monitor_path << ":" << monitor_socket->StrError());
+  return monitor_socket;
+}
+
+Result<SharedFD> GetLauncherMonitor(const CuttlefishConfig& config,
+                                    const int instance_num,
+                                    const int timeout_seconds) {
+  auto instance_config = config.ForInstance(instance_num);
+  return GetLauncherMonitorFromInstance(instance_config, timeout_seconds);
+}
+
+Result<void> WriteLauncherAction(const SharedFD& monitor_socket,
+                                 const LauncherAction request) {
+  ssize_t bytes_sent = WriteAllBinary(monitor_socket, &request);
+  CF_EXPECT(bytes_sent > 0, "Error sending launcher monitor the command: "
+                                << monitor_socket->StrError());
+  CF_EXPECT(bytes_sent == sizeof(request),
+            "Launcher did not send correct number of bytes");
+  return {};
+}
+
+Result<void> WaitForRead(const SharedFD& monitor_socket,
+                         const int timeout_seconds) {
+  // Perform a select with a timeout to guard against launcher hanging
+  SharedFDSet read_set;
+  read_set.Set(monitor_socket);
+  struct timeval timeout = {timeout_seconds, 0};
+  int select_result = Select(&read_set, nullptr, nullptr,
+                             timeout_seconds <= 0 ? nullptr : &timeout);
+  CF_EXPECT(select_result != 0,
+            "Timeout expired waiting for launcher monitor to respond");
+  CF_EXPECT(
+      select_result > 0,
+      "Failed communication with the launcher monitor: " << strerror(errno));
+  return {};
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/command_util/util.h b/host/libs/command_util/util.h
new file mode 100644
index 0000000..58cf1bf
--- /dev/null
+++ b/host/libs/command_util/util.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "host/commands/run_cvd/runner_defs.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+Result<LauncherResponse> ReadLauncherResponse(const SharedFD& monitor_socket);
+
+Result<RunnerExitCodes> ReadExitCode(const SharedFD& monitor_socket);
+
+Result<SharedFD> GetLauncherMonitorFromInstance(
+    const CuttlefishConfig::InstanceSpecific& instance_config,
+    const int timeout_seconds);
+
+Result<SharedFD> GetLauncherMonitor(const CuttlefishConfig& config,
+                                    const int instance_num,
+                                    const int timeout_seconds);
+
+Result<void> WriteLauncherAction(const SharedFD& monitor_socket,
+                                 const LauncherAction request);
+
+Result<void> WaitForRead(const SharedFD& monitor_socket,
+                         const int timeout_seconds);
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/Android.bp b/host/libs/config/Android.bp
index b3d0efb..97676d2 100644
--- a/host/libs/config/Android.bp
+++ b/host/libs/config/Android.bp
@@ -26,12 +26,15 @@
         "cuttlefish_config.cpp",
         "cuttlefish_config_instance.cpp",
         "data_image.cpp",
+        "esp.cpp",
         "feature.cpp",
         "fetcher_config.cpp",
         "host_tools_version.cpp",
         "kernel_args.cpp",
         "known_paths.cpp",
+        "instance_nums.cpp",
         "logging.cpp",
+        "openwrt_args.cpp",
     ],
     shared_libs: [
         "libext2_blkid",
diff --git a/host/libs/config/adb/data.cpp b/host/libs/config/adb/data.cpp
index 81db3e9..59a6a89 100644
--- a/host/libs/config/adb/data.cpp
+++ b/host/libs/config/adb/data.cpp
@@ -19,7 +19,7 @@
 #include <set>
 
 namespace cuttlefish {
-namespace {}
+namespace {
 
 class AdbConfigImpl : public AdbConfig {
  public:
@@ -46,6 +46,8 @@
   bool run_connector_;
 };
 
+}  // namespace
+
 fruit::Component<AdbConfig> AdbConfigComponent() {
   return fruit::createComponent().bind<AdbConfig, AdbConfigImpl>();
 }
diff --git a/host/libs/config/adb/launch.cpp b/host/libs/config/adb/launch.cpp
index 65f439b..d400d80 100644
--- a/host/libs/config/adb/launch.cpp
+++ b/host/libs/config/adb/launch.cpp
@@ -15,6 +15,16 @@
  */
 #include "host/libs/config/adb/adb.h"
 
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/kernel_log_monitor/utils.h"
+#include "host/libs/config/command_source.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/known_paths.h"
 
@@ -68,8 +78,7 @@
   INJECT(AdbConnector(const AdbHelper& helper)) : helper_(helper) {}
 
   // CommandSource
-  std::vector<Command> Commands() override {
-    Command console_forwarder_cmd(ConsoleForwarderBinary());
+  Result<std::vector<MonitorCommand>> Commands() override {
     Command adb_connector(AdbConnectorBinary());
     std::set<std::string> addresses;
 
@@ -89,9 +98,10 @@
     }
     address_arg.pop_back();
     adb_connector.AddParameter(address_arg);
-    std::vector<Command> commands;
+
+    std::vector<MonitorCommand> commands;
     commands.emplace_back(std::move(adb_connector));
-    return std::move(commands);
+    return commands;
   }
 
   // SetupFeature
@@ -107,7 +117,7 @@
   const AdbHelper& helper_;
 };
 
-class SocketVsockProxy : public CommandSource {
+class SocketVsockProxy : public CommandSource, public KernelLogPipeConsumer {
  public:
   INJECT(SocketVsockProxy(const AdbHelper& helper,
                           const CuttlefishConfig::InstanceSpecific& instance,
@@ -117,11 +127,10 @@
         log_pipe_provider_(log_pipe_provider) {}
 
   // CommandSource
-  std::vector<Command> Commands() override {
-    std::vector<Command> commands;
+  Result<std::vector<MonitorCommand>> Commands() override {
+    std::vector<MonitorCommand> commands;
     if (helper_.VsockTunnelEnabled()) {
       Command adb_tunnel(SocketVsockProxyBinary());
-      adb_tunnel.AddParameter("-adbd_events_fd=", kernel_log_pipe_);
       /**
        * This socket_vsock_proxy (a.k.a. sv proxy) runs on the host. It assumes
        * that another sv proxy runs inside the guest. see:
@@ -136,15 +145,17 @@
        * instance.adb_host_port()
        *
        */
-      adb_tunnel.AddParameter("--server=tcp");
-      adb_tunnel.AddParameter("--vsock_port=6520");
+      adb_tunnel.AddParameter("--events_fd=", kernel_log_pipe_);
+      adb_tunnel.AddParameter("--start_event_id=", monitor::Event::AdbdStarted);
+      adb_tunnel.AddParameter("--server_type=tcp");
       adb_tunnel.AddParameter("--server_fd=", tcp_server_);
-      adb_tunnel.AddParameter("--vsock_cid=", instance_.vsock_guest_cid());
+      adb_tunnel.AddParameter("--client_type=vsock");
+      adb_tunnel.AddParameter("--client_vsock_port=6520");
+      adb_tunnel.AddParameter("--client_vsock_id=", instance_.vsock_guest_cid());
       commands.emplace_back(std::move(adb_tunnel));
     }
     if (helper_.VsockHalfTunnelEnabled()) {
       Command adb_tunnel(SocketVsockProxyBinary());
-      adb_tunnel.AddParameter("-adbd_events_fd=", kernel_log_pipe_);
       /*
        * This socket_vsock_proxy (a.k.a. sv proxy) runs on the host, and
        * cooperates with the adbd inside the guest. See this file:
@@ -155,10 +166,14 @@
        * should be therefore tcp, and the port should differ from instance to
        * instance and be equal to instance.adb_host_port()
        */
-      adb_tunnel.AddParameter("--server=tcp");
-      adb_tunnel.AddParameter("--vsock_port=", 5555);
+      adb_tunnel.AddParameter("--events_fd=", kernel_log_pipe_);
+      adb_tunnel.AddParameter("--start_event_id=", monitor::Event::AdbdStarted);
+      adb_tunnel.AddParameter("--server_type=tcp");
       adb_tunnel.AddParameter("--server_fd=", tcp_server_);
-      adb_tunnel.AddParameter("--vsock_cid=", instance_.vsock_guest_cid());
+      adb_tunnel.AddParameter("--client_type=vsock");
+      adb_tunnel.AddParameter("--client_vsock_port=", 5555);
+      adb_tunnel.AddParameter("--client_vsock_id=", instance_.vsock_guest_cid());
+      adb_tunnel.AddParameter("--label=", "adb");
       commands.emplace_back(std::move(adb_tunnel));
     }
     return commands;
@@ -202,6 +217,7 @@
       .addMultibinding<CommandSource, AdbConnector>()
       .addMultibinding<CommandSource, SocketVsockProxy>()
       .addMultibinding<SetupFeature, AdbConnector>()
+      .addMultibinding<KernelLogPipeConsumer, SocketVsockProxy>()
       .addMultibinding<SetupFeature, SocketVsockProxy>();
 }
 
diff --git a/host/libs/config/adb/test.cpp b/host/libs/config/adb/test.cpp
index 3250568..d90a9d5 100644
--- a/host/libs/config/adb/test.cpp
+++ b/host/libs/config/adb/test.cpp
@@ -49,7 +49,7 @@
   };
   auto flags = injector.getMultibindings<FlagFeature>();
   auto processed = FlagFeature::ProcessFlags(flags, args);
-  ASSERT_TRUE(processed.ok()) << processed.error();
+  ASSERT_TRUE(processed.ok()) << processed.error().Trace();
   ASSERT_TRUE(args.empty());
 
   std::set<AdbMode> modes = {AdbMode::VsockTunnel, AdbMode::VsockHalfTunnel,
diff --git a/host/libs/config/bootconfig_args.cpp b/host/libs/config/bootconfig_args.cpp
index 00673f6..8c61953 100644
--- a/host/libs/config/bootconfig_args.cpp
+++ b/host/libs/config/bootconfig_args.cpp
@@ -37,148 +37,183 @@
 namespace {
 
 template <typename T>
-void AppendVector(std::vector<T>* destination, const std::vector<T>& source) {
-  destination->insert(destination->end(), source.begin(), source.end());
-}
-
-template <typename S, typename T>
-std::string concat(const S& s, const T& t) {
-  std::ostringstream os;
-  os << s << t;
-  return os.str();
+void AppendMapWithReplacement(T* destination, const T& source) {
+  for (const auto& [k, v] : source) {
+    (*destination)[k] = v;
+  }
 }
 
 // TODO(schuffelen): Move more of this into host/libs/vm_manager, as a
 // substitute for the vm_manager comparisons.
-std::vector<std::string> VmManagerBootconfig(const CuttlefishConfig& config) {
-  std::vector<std::string> vm_manager_cmdline;
-  if (config.console()) {
-    vm_manager_cmdline.push_back("androidboot.console=" + config.console_dev());
+Result<std::unordered_map<std::string, std::string>> VmManagerBootconfig(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::unordered_map<std::string, std::string> bootconfig_args;
+  if (instance.console()) {
+    bootconfig_args["androidboot.console"] = instance.console_dev();
+    bootconfig_args["androidboot.serialconsole"] = "1";
   } else {
     // Specify an invalid path under /dev, so the init process will disable the
     // console service due to the console not being found. On physical devices,
-    // it is enough to not specify androidboot.console= *and* not specify the
-    // console= kernel command line parameter, because the console and kernel
-    // dmesg are muxed. However, on cuttlefish, we don't need to mux, and would
-    // prefer to retain the kernel dmesg logging, so we must work around init
-    // falling back to the check for /dev/console (which we'll always have).
-    vm_manager_cmdline.push_back("androidboot.console=invalid");
+    // *and on older kernels* it is enough to not specify androidboot.console=
+    // *and* not specify the console= kernel command line parameter, because
+    // the console and kernel dmesg are muxed. However, on cuttlefish, we don't
+    // need to mux, and would prefer to retain the kernel dmesg logging, so we
+    // must work around init falling back to the check for /dev/console (which
+    // we'll always have).
+    //bootconfig_args["androidboot.console"] = "invalid";
+    // The bug above has been fixed in Android 14 and later so we can just
+    // specify androidboot.serialconsole=0 instead.
+    bootconfig_args["androidboot.serialconsole"] = "0";
   }
-  return vm_manager_cmdline;
+  return bootconfig_args;
 }
 
 }  // namespace
 
-std::vector<std::string> BootconfigArgsFromConfig(
+Result<std::unordered_map<std::string, std::string>> BootconfigArgsFromConfig(
     const CuttlefishConfig& config,
     const CuttlefishConfig::InstanceSpecific& instance) {
-  std::vector<std::string> bootconfig_args;
+  std::unordered_map<std::string, std::string> bootconfig_args;
 
-  AppendVector(&bootconfig_args, VmManagerBootconfig(config));
-  auto vmm = vm_manager::GetVmManager(config.vm_manager(), config.target_arch());
-  bootconfig_args.push_back(
-      vmm->ConfigureBootDevices(instance.virtual_disk_paths().size()));
-  AppendVector(&bootconfig_args, vmm->ConfigureGraphics(config));
+  AppendMapWithReplacement(&bootconfig_args,
+                           CF_EXPECT(VmManagerBootconfig(instance)));
 
-  bootconfig_args.push_back(
-      concat("androidboot.serialno=", instance.serial_number()));
+  auto vmm =
+      vm_manager::GetVmManager(config.vm_manager(), instance.target_arch());
+  AppendMapWithReplacement(&bootconfig_args,
+                           CF_EXPECT(vmm->ConfigureBootDevices(
+                               instance.virtual_disk_paths().size(),
+                               instance.hwcomposer() != kHwComposerNone)));
+
+  AppendMapWithReplacement(&bootconfig_args,
+                           CF_EXPECT(vmm->ConfigureGraphics(instance)));
+
+  bootconfig_args["androidboot.serialno"] = instance.serial_number();
+  bootconfig_args["androidboot.ddr_size"] =
+      std::to_string(instance.ddr_mem_mb()) + "MB";
 
   // TODO(b/131884992): update to specify multiple once supported.
-  const auto display_configs = config.display_configs();
-  CHECK_GE(display_configs.size(), 1);
-  bootconfig_args.push_back(
-      concat("androidboot.lcd_density=", display_configs[0].dpi));
+  const auto display_configs = instance.display_configs();
+  if (!display_configs.empty()) {
+    bootconfig_args["androidboot.lcd_density"] =
+        std::to_string(display_configs[0].dpi);
+  }
 
-  bootconfig_args.push_back(
-      concat("androidboot.setupwizard_mode=", config.setupwizard_mode()));
-  if (!config.guest_enforce_security()) {
-    bootconfig_args.push_back("androidboot.selinux=permissive");
+  bootconfig_args["androidboot.setupwizard_mode"] = instance.setupwizard_mode();
+
+  bootconfig_args["androidboot.enable_bootanimation"] =
+      std::to_string(instance.enable_bootanimation());
+
+  if (!instance.guest_enforce_security()) {
+    bootconfig_args["androidboot.selinux"] = "permissive";
   }
 
   if (instance.tombstone_receiver_port()) {
-    bootconfig_args.push_back(concat("androidboot.vsock_tombstone_port=",
-                                     instance.tombstone_receiver_port()));
+    bootconfig_args["androidboot.vsock_tombstone_port"] =
+        std::to_string(instance.tombstone_receiver_port());
   }
 
-  if (instance.confui_host_vsock_port()) {
-    bootconfig_args.push_back(concat("androidboot.vsock_confirmationui_port=",
-                                     instance.confui_host_vsock_port()));
-  }
+  const auto enable_confui =
+      (config.vm_manager() == QemuManager::name() ? 0 : 1);
+  bootconfig_args["androidboot.enable_confirmationui"] =
+      std::to_string(enable_confui);
 
   if (instance.config_server_port()) {
-    bootconfig_args.push_back(
-        concat("androidboot.cuttlefish_config_server_port=",
-               instance.config_server_port()));
+    bootconfig_args["androidboot.cuttlefish_config_server_port"] =
+        std::to_string(instance.config_server_port());
   }
 
   if (instance.keyboard_server_port()) {
-    bootconfig_args.push_back(concat("androidboot.vsock_keyboard_port=",
-                                     instance.keyboard_server_port()));
+    bootconfig_args["androidboot.vsock_keyboard_port"] =
+        std::to_string(instance.keyboard_server_port());
   }
 
   if (instance.touch_server_port()) {
-    bootconfig_args.push_back(
-        concat("androidboot.vsock_touch_port=", instance.touch_server_port()));
-  }
-
-  if (config.enable_vehicle_hal_grpc_server() &&
-      instance.vehicle_hal_server_port() &&
-      FileExists(VehicleHalGrpcServerBinary())) {
-    constexpr int vehicle_hal_server_cid = 2;
-    bootconfig_args.push_back(concat(
-        "androidboot.vendor.vehiclehal.server.cid=", vehicle_hal_server_cid));
-    bootconfig_args.push_back(
-        concat("androidboot.vendor.vehiclehal.server.port=",
-               instance.vehicle_hal_server_port()));
+    bootconfig_args["androidboot.vsock_touch_port"] =
+        std::to_string(instance.touch_server_port());
   }
 
   if (instance.audiocontrol_server_port()) {
-    bootconfig_args.push_back(
-        concat("androidboot.vendor.audiocontrol.server.cid=",
-               instance.vsock_guest_cid()));
-    bootconfig_args.push_back(
-        concat("androidboot.vendor.audiocontrol.server.port=",
-               instance.audiocontrol_server_port()));
+    bootconfig_args["androidboot.vendor.audiocontrol.server.cid"] =
+        std::to_string(instance.vsock_guest_cid());
+    bootconfig_args["androidboot.vendor.audiocontrol.server.port"] =
+        std::to_string(instance.audiocontrol_server_port());
+  }
+
+  if (!instance.enable_audio()) {
+    bootconfig_args["androidboot.audio.tinyalsa.ignore_output"] = "true";
+    bootconfig_args["androidboot.audio.tinyalsa.simulate_input"] = "true";
   }
 
   if (instance.camera_server_port()) {
-    bootconfig_args.push_back(concat("androidboot.vsock_camera_port=",
-                                     instance.camera_server_port()));
-    bootconfig_args.push_back(
-        concat("androidboot.vsock_camera_cid=", instance.vsock_guest_cid()));
+    bootconfig_args["androidboot.vsock_camera_port"] =
+        std::to_string(instance.camera_server_port());
+    bootconfig_args["androidboot.vsock_camera_cid"] =
+        std::to_string(instance.vsock_guest_cid());
   }
 
-  if (config.enable_modem_simulator() &&
+  if (instance.enable_modem_simulator() &&
       instance.modem_simulator_ports() != "") {
-    bootconfig_args.push_back(concat("androidboot.modem_simulator_ports=",
-                                     instance.modem_simulator_ports()));
+    bootconfig_args["androidboot.modem_simulator_ports"] =
+        instance.modem_simulator_ports();
   }
 
-  bootconfig_args.push_back(concat("androidboot.fstab_suffix=",
-                                   config.userdata_format()));
+  // Once all Cuttlefish kernel versions are at least 5.15, filename encryption
+  // will not need to be set conditionally. HCTR2 will always be available.
+  // At that point fstab.cf.f2fs.cts and fstab.cf.ext4.cts can be removed.
+  std::string fstab_suffix = fmt::format("cf.{}.{}", instance.userdata_format(),
+                                         instance.filename_encryption_mode());
 
-  bootconfig_args.push_back(
-      concat("androidboot.wifi_mac_prefix=", instance.wifi_mac_prefix()));
+  bootconfig_args["androidboot.fstab_suffix"] = fstab_suffix;
+
+  bootconfig_args["androidboot.wifi_mac_prefix"] =
+      std::to_string(instance.wifi_mac_prefix());
 
   // Non-native architecture implies a significantly slower execution speed, so
   // set a large timeout multiplier.
-  if (!IsHostCompatible(config.target_arch())) {
-    bootconfig_args.push_back("androidboot.hw_timeout_multiplier=50");
+  if (!IsHostCompatible(instance.target_arch())) {
+    bootconfig_args["androidboot.hw_timeout_multiplier"] = "50";
   }
 
   // TODO(b/217564326): improve this checks for a hypervisor in the VM.
-  if (config.target_arch() == Arch::X86 ||
-      config.target_arch() == Arch::X86_64) {
-    bootconfig_args.push_back(
-        concat("androidboot.hypervisor.version=cf-", config.vm_manager()));
-    bootconfig_args.push_back("androidboot.hypervisor.vm.supported=1");
-    bootconfig_args.push_back(
-        "androidboot.hypervisor.protected_vm.supported=0");
+  if (instance.target_arch() == Arch::X86 ||
+      instance.target_arch() == Arch::X86_64) {
+    bootconfig_args["androidboot.hypervisor.version"] =
+        "cf-" + config.vm_manager();
+    bootconfig_args["androidboot.hypervisor.vm.supported"] = "1";
+  } else {
+    bootconfig_args["androidboot.hypervisor.vm.supported"] = "0";
+  }
+  bootconfig_args["androidboot.hypervisor.protected_vm.supported"] = "0";
+  if (!instance.kernel_path().empty()) {
+    bootconfig_args["androidboot.kernel_hotswapped"] = "1";
+  }
+  if (!instance.initramfs_path().empty()) {
+    bootconfig_args["androidboot.ramdisk_hotswapped"] = "1";
   }
 
-  AppendVector(&bootconfig_args, config.extra_bootconfig_args());
+  for (const std::string& kv : config.extra_bootconfig_args()) {
+    if (kv.empty()) {
+      continue;
+    }
+    const auto& parts = android::base::Split(kv, "=");
+    CF_EXPECT_EQ(parts.size(), 2,
+                 "Failed to parse --extra_bootconfig_args: \"" << kv << "\"");
+    bootconfig_args[parts[0]] = parts[1];
+  }
 
   return bootconfig_args;
 }
 
+Result<std::string> BootconfigArgsString(
+    const std::unordered_map<std::string, std::string>& args,
+    const std::string& separator) {
+  std::vector<std::string> combined_args;
+  for (const auto& [k, v] : args) {
+    CF_EXPECT(!v.empty(), "Found empty bootconfig value for " << k);
+    combined_args.push_back(k + "=" + v);
+  }
+  return android::base::Join(combined_args, separator);
+}
+
 }  // namespace cuttlefish
diff --git a/host/libs/config/bootconfig_args.h b/host/libs/config/bootconfig_args.h
index 6c43204..6153ecf 100644
--- a/host/libs/config/bootconfig_args.h
+++ b/host/libs/config/bootconfig_args.h
@@ -17,14 +17,19 @@
 #pragma once
 
 #include <string>
-#include <vector>
+#include <unordered_map>
 
+#include "common/libs/utils/result.h"
 #include "host/libs/config/cuttlefish_config.h"
 
 namespace cuttlefish {
 
-std::vector<std::string> BootconfigArgsFromConfig(
+Result<std::unordered_map<std::string, std::string>> BootconfigArgsFromConfig(
     const CuttlefishConfig& config,
     const CuttlefishConfig::InstanceSpecific& instance);
 
+Result<std::string> BootconfigArgsString(
+    const std::unordered_map<std::string, std::string>& args,
+    const std::string& separator);
+
 }  // namespace cuttlefish
diff --git a/host/libs/config/command_source.h b/host/libs/config/command_source.h
index c4e3b23..9da1cbb 100644
--- a/host/libs/config/command_source.h
+++ b/host/libs/config/command_source.h
@@ -15,18 +15,29 @@
 
 #pragma once
 
-#include <fruit/fruit.h>
+#include <utility>
 #include <vector>
 
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/libs/config/feature.h"
 
 namespace cuttlefish {
 
+struct MonitorCommand {
+  Command command;
+  bool is_critical;
+
+  MonitorCommand(Command command, bool is_critical = false)
+      : command(std::move(command)), is_critical(is_critical) {}
+};
+
 class CommandSource : public virtual SetupFeature {
  public:
   virtual ~CommandSource() = default;
-  virtual std::vector<Command> Commands() = 0;
+  virtual Result<std::vector<MonitorCommand>> Commands() = 0;
 };
 
 }  // namespace cuttlefish
diff --git a/host/libs/config/config_flag.cpp b/host/libs/config/config_flag.cpp
index d7b5f00..d4d5157 100644
--- a/host/libs/config/config_flag.cpp
+++ b/host/libs/config/config_flag.cpp
@@ -26,10 +26,12 @@
 
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/flag_parser.h"
+#include "host/commands/assemble_cvd/flags_defaults.h"
 #include "host/libs/config/cuttlefish_config.h"
 
 // To support other files that use this from gflags.
-DEFINE_string(system_image_dir, "", "");
+// TODO: Add a description to this flag
+DEFINE_string(system_image_dir, CF_DEFAULTS_SYSTEM_IMAGE_DIR, "");
 
 using gflags::FlagSettingMode::SET_FLAGS_DEFAULT;
 
@@ -97,8 +99,11 @@
   std::string Name() const override { return "ConfigReader"; }
   std::unordered_set<FlagFeature*> Dependencies() const override { return {}; }
   bool Process(std::vector<std::string>&) override {
-    for (const std::string& file :
-         DirectoryContents(DefaultHostArtifactsPath("etc/cvd_config"))) {
+    auto directory_contents_result =
+        DirectoryContents(DefaultHostArtifactsPath("etc/cvd_config"));
+    CHECK(directory_contents_result.ok())
+        << directory_contents_result.error().Message();
+    for (const std::string& file : *directory_contents_result) {
       std::string_view local_file(file);
       if (android::base::ConsumePrefix(&local_file, "cvd_config_") &&
           android::base::ConsumeSuffix(&local_file, ".json")) {
diff --git a/host/libs/config/custom_actions.cpp b/host/libs/config/custom_actions.cpp
index af5524e..a44a1f6 100644
--- a/host/libs/config/custom_actions.cpp
+++ b/host/libs/config/custom_actions.cpp
@@ -16,6 +16,7 @@
 #include "host/libs/config/custom_actions.h"
 
 #include <android-base/logging.h>
+#include <android-base/parseint.h>
 #include <android-base/strings.h>
 #include <json/json.h>
 
@@ -31,6 +32,7 @@
 namespace cuttlefish {
 namespace {
 
+const char* kCustomActionInstanceID = "instance_id";
 const char* kCustomActionShellCommand = "shell_command";
 const char* kCustomActionServer = "server";
 const char* kCustomActionDeviceStates = "device_states";
@@ -42,118 +44,136 @@
 const char* kCustomActionButtonTitle = "title";
 const char* kCustomActionButtonIconName = "icon_name";
 
-std::optional<CustomActionConfig> CustomActionConfigFromJson(
+CustomActionInstanceID GetCustomActionInstanceIDFromJson(
     const Json::Value& dictionary) {
-  bool has_shell_command = dictionary.isMember(kCustomActionShellCommand);
-  bool has_server = dictionary.isMember(kCustomActionServer);
-  bool has_device_states = dictionary.isMember(kCustomActionDeviceStates);
-  if (!!has_shell_command + !!has_server + !!has_device_states != 1) {
-    LOG(ERROR) << "Custom action must contain exactly one of shell_command, "
-               << "server, or device_states";
-    return {};
+  CustomActionInstanceID config;
+  config.instance_id = dictionary[kCustomActionInstanceID].asString();
+  return config;
+}
+
+CustomShellActionConfig GetCustomShellActionConfigFromJson(
+    const Json::Value& dictionary) {
+  CustomShellActionConfig config;
+  // Shell command with one button.
+  Json::Value button_entry = dictionary[kCustomActionButton];
+  config.button = {button_entry[kCustomActionButtonCommand].asString(),
+    button_entry[kCustomActionButtonTitle].asString(),
+    button_entry[kCustomActionButtonIconName].asString()};
+  config.shell_command = dictionary[kCustomActionShellCommand].asString();
+  return config;
+}
+
+CustomActionServerConfig GetCustomActionServerConfigFromJson(
+    const Json::Value& dictionary) {
+  CustomActionServerConfig config;
+  // Action server with possibly multiple buttons.
+  for (const Json::Value& button_entry : dictionary[kCustomActionButtons]) {
+    config.buttons.push_back(
+        {button_entry[kCustomActionButtonCommand].asString(),
+        button_entry[kCustomActionButtonTitle].asString(),
+        button_entry[kCustomActionButtonIconName].asString()});
   }
-  CustomActionConfig config;
-  if (has_shell_command) {
-    // Shell command with one button.
-    Json::Value button_entry = dictionary[kCustomActionButton];
-    config.buttons = {{button_entry[kCustomActionButtonCommand].asString(),
-                       button_entry[kCustomActionButtonTitle].asString(),
-                       button_entry[kCustomActionButtonIconName].asString()}};
-    config.shell_command = dictionary[kCustomActionShellCommand].asString();
-  } else if (has_server) {
-    // Action server with possibly multiple buttons.
-    for (const Json::Value& button_entry : dictionary[kCustomActionButtons]) {
-      ControlPanelButton button = {
-          button_entry[kCustomActionButtonCommand].asString(),
-          button_entry[kCustomActionButtonTitle].asString(),
-          button_entry[kCustomActionButtonIconName].asString()};
-      config.buttons.push_back(button);
+  config.server = dictionary[kCustomActionServer].asString();
+  return config;
+}
+
+CustomDeviceStateActionConfig GetCustomDeviceStateActionConfigFromJson(
+    const Json::Value& dictionary) {
+  CustomDeviceStateActionConfig config;
+  // Device state(s) with one button.
+  // Each button press cycles to the next state, then repeats to the first.
+  Json::Value button_entry = dictionary[kCustomActionButton];
+  config.button = {button_entry[kCustomActionButtonCommand].asString(),
+    button_entry[kCustomActionButtonTitle].asString(),
+    button_entry[kCustomActionButtonIconName].asString()};
+  for (const Json::Value& device_state_entry :
+      dictionary[kCustomActionDeviceStates]) {
+    DeviceState state;
+    if (device_state_entry.isMember(
+          kCustomActionDeviceStateLidSwitchOpen)) {
+      state.lid_switch_open =
+        device_state_entry[kCustomActionDeviceStateLidSwitchOpen].asBool();
     }
-    config.server = dictionary[kCustomActionServer].asString();
-  } else if (has_device_states) {
-    // Device state(s) with one button.
-    // Each button press cycles to the next state, then repeats to the first.
-    Json::Value button_entry = dictionary[kCustomActionButton];
-    config.buttons = {{button_entry[kCustomActionButtonCommand].asString(),
-                       button_entry[kCustomActionButtonTitle].asString(),
-                       button_entry[kCustomActionButtonIconName].asString()}};
-    for (const Json::Value& device_state_entry :
-         dictionary[kCustomActionDeviceStates]) {
-      DeviceState state;
-      if (device_state_entry.isMember(kCustomActionDeviceStateLidSwitchOpen)) {
-        state.lid_switch_open =
-            device_state_entry[kCustomActionDeviceStateLidSwitchOpen].asBool();
-      }
-      if (device_state_entry.isMember(
-              kCustomActionDeviceStateHingeAngleValue)) {
-        state.hinge_angle_value =
-            device_state_entry[kCustomActionDeviceStateHingeAngleValue].asInt();
-      }
-      config.device_states.push_back(state);
+    if (device_state_entry.isMember(
+          kCustomActionDeviceStateHingeAngleValue)) {
+      state.hinge_angle_value =
+        device_state_entry[kCustomActionDeviceStateHingeAngleValue].asInt();
     }
-  } else {
-    LOG(ERROR) << "Unknown custom action type.";
-    return {};
+    config.device_states.push_back(state);
   }
   return config;
 }
 
-Json::Value ToJson(const CustomActionConfig& custom_action) {
+Json::Value ToJson(const CustomActionInstanceID& custom_action) {
   Json::Value json;
-  if (custom_action.shell_command) {
-    // Shell command with one button.
-    json[kCustomActionShellCommand] = *custom_action.shell_command;
-    json[kCustomActionButton] = Json::Value();
-    json[kCustomActionButton][kCustomActionButtonCommand] =
-        custom_action.buttons[0].command;
-    json[kCustomActionButton][kCustomActionButtonTitle] =
-        custom_action.buttons[0].title;
-    json[kCustomActionButton][kCustomActionButtonIconName] =
-        custom_action.buttons[0].icon_name;
-  } else if (custom_action.server) {
-    // Action server with possibly multiple buttons.
-    json[kCustomActionServer] = *custom_action.server;
-    json[kCustomActionButtons] = Json::Value(Json::arrayValue);
-    for (const auto& button : custom_action.buttons) {
-      Json::Value button_entry;
-      button_entry[kCustomActionButtonCommand] = button.command;
-      button_entry[kCustomActionButtonTitle] = button.title;
-      button_entry[kCustomActionButtonIconName] = button.icon_name;
-      json[kCustomActionButtons].append(button_entry);
-    }
-  } else if (!custom_action.device_states.empty()) {
-    // Device state(s) with one button.
-    json[kCustomActionDeviceStates] = Json::Value(Json::arrayValue);
-    for (const auto& device_state : custom_action.device_states) {
-      Json::Value device_state_entry;
-      if (device_state.lid_switch_open) {
-        device_state_entry[kCustomActionDeviceStateLidSwitchOpen] =
-            *device_state.lid_switch_open;
-      }
-      if (device_state.hinge_angle_value) {
-        device_state_entry[kCustomActionDeviceStateHingeAngleValue] =
-            *device_state.hinge_angle_value;
-      }
-      json[kCustomActionDeviceStates].append(device_state_entry);
-    }
-    json[kCustomActionButton] = Json::Value();
-    json[kCustomActionButton][kCustomActionButtonCommand] =
-        custom_action.buttons[0].command;
-    json[kCustomActionButton][kCustomActionButtonTitle] =
-        custom_action.buttons[0].title;
-    json[kCustomActionButton][kCustomActionButtonIconName] =
-        custom_action.buttons[0].icon_name;
-  } else {
-    LOG(FATAL) << "Unknown custom action type.";
+  json[kCustomActionInstanceID] = custom_action.instance_id;
+  return json;
+}
+
+Json::Value ToJson(const CustomShellActionConfig& custom_action) {
+  Json::Value json;
+  // Shell command with one button.
+  json[kCustomActionShellCommand] = custom_action.shell_command;
+  json[kCustomActionButton] = Json::Value();
+  json[kCustomActionButton][kCustomActionButtonCommand] =
+      custom_action.button.command;
+  json[kCustomActionButton][kCustomActionButtonTitle] =
+      custom_action.button.title;
+  json[kCustomActionButton][kCustomActionButtonIconName] =
+      custom_action.button.icon_name;
+  return json;
+}
+
+Json::Value ToJson(const CustomActionServerConfig& custom_action) {
+  Json::Value json;
+  // Action server with possibly multiple buttons.
+  json[kCustomActionServer] = custom_action.server;
+  json[kCustomActionButtons] = Json::Value(Json::arrayValue);
+  for (const auto& button : custom_action.buttons) {
+    Json::Value button_entry;
+    button_entry[kCustomActionButtonCommand] = button.command;
+    button_entry[kCustomActionButtonTitle] = button.title;
+    button_entry[kCustomActionButtonIconName] = button.icon_name;
+    json[kCustomActionButtons].append(button_entry);
   }
   return json;
 }
 
+Json::Value ToJson(const CustomDeviceStateActionConfig& custom_action) {
+  Json::Value json;
+  // Device state(s) with one button.
+  json[kCustomActionDeviceStates] = Json::Value(Json::arrayValue);
+  for (const auto& device_state : custom_action.device_states) {
+    Json::Value device_state_entry;
+    if (device_state.lid_switch_open) {
+      device_state_entry[kCustomActionDeviceStateLidSwitchOpen] =
+          *device_state.lid_switch_open;
+    }
+    if (device_state.hinge_angle_value) {
+      device_state_entry[kCustomActionDeviceStateHingeAngleValue] =
+          *device_state.hinge_angle_value;
+    }
+    json[kCustomActionDeviceStates].append(device_state_entry);
+  }
+  json[kCustomActionButton] = Json::Value();
+  json[kCustomActionButton][kCustomActionButtonCommand] =
+      custom_action.button.command;
+  json[kCustomActionButton][kCustomActionButtonTitle] =
+      custom_action.button.title;
+  json[kCustomActionButton][kCustomActionButtonIconName] =
+      custom_action.button.icon_name;
+  return json;
+}
+
 std::string DefaultCustomActionConfig() {
   auto custom_action_config_dir =
       DefaultHostArtifactsPath("etc/cvd_custom_action_config");
   if (DirectoryExists(custom_action_config_dir)) {
-    auto custom_action_configs = DirectoryContents(custom_action_config_dir);
+    auto directory_contents_result =
+        DirectoryContents(custom_action_config_dir);
+    CHECK(directory_contents_result.ok())
+        << directory_contents_result.error().Trace();
+    auto custom_action_configs = std::move(*directory_contents_result);
     // Two entries are always . and ..
     if (custom_action_configs.size() > 3) {
       LOG(ERROR) << "Expected at most one custom action config in "
@@ -169,6 +189,18 @@
   return "";
 }
 
+int get_instance_order(const std::string& id_str) {
+  int instance_index = 0;
+  const auto& config = CuttlefishConfig::Get();
+  for (const auto& instance : config->Instances()) {
+    if (instance.id() == id_str) {
+      break;
+    }
+    instance_index++;
+  }
+  return instance_index;
+}
+
 class CustomActionConfigImpl : public CustomActionConfigProvider {
  public:
   INJECT(CustomActionConfigImpl(ConfigFlag& config)) : config_(config) {
@@ -178,14 +210,18 @@
         "build variable CVD_CUSTOM_ACTION_CONFIG. If this build variable is "
         "empty then the custom action config will be empty as well.");
     custom_action_config_flag_.Getter(
-        [this]() { return custom_action_config_; });
+        [this]() { return custom_action_config_[0]; });
     custom_action_config_flag_.Setter([this](const FlagMatch& match) {
-      if (!match.value.empty() && !FileExists(match.value)) {
+      if (!match.value.empty() &&
+          (match.value == "unset" || match.value == "\"unset\"")) {
+        custom_action_config_.push_back(DefaultCustomActionConfig());
+      } else if (!match.value.empty() && !FileExists(match.value)) {
         LOG(ERROR) << "custom_action_config file \"" << match.value << "\" "
                    << "does not exist.";
         return false;
+      } else {
+        custom_action_config_.push_back(match.value);
       }
-      custom_action_config_ = match.value;
       return true;
     });
     // TODO(schuffelen): Access ConfigFlag directly for these values.
@@ -198,6 +234,10 @@
         "combined with actions in --custom_action_config.");
     custom_actions_flag_.Setter([this](const FlagMatch& match) {
       // Load the custom action from the --config preset file.
+      if (match.value == "unset" || match.value == "\"unset\"") {
+        AddEmptyJsonCustomActionConfigs();
+        return true;
+      }
       Json::CharReaderBuilder builder;
       std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
       std::string errorMessage;
@@ -212,15 +252,73 @@
     });
   }
 
-  const std::vector<CustomActionConfig>& CustomActions() const override {
-    return custom_actions_;
+  const std::vector<CustomShellActionConfig> CustomShellActions(
+      const std::string& id_str = std::string()) const override {
+    int instance_index = 0;
+    if (instance_actions_.empty()) {
+      // No Custom Action input, return empty vector
+      return {};
+    }
+
+    if (!id_str.empty()) {
+      instance_index = get_instance_order(id_str);
+    }
+    if (instance_index >= instance_actions_.size()) {
+      instance_index = 0;
+    }
+    return instance_actions_[instance_index].custom_shell_actions_;
+  }
+
+  const std::vector<CustomActionServerConfig> CustomActionServers(
+      const std::string& id_str = std::string()) const override {
+    int instance_index = 0;
+    if (instance_actions_.empty()) {
+      // No Custom Action input, return empty vector
+      return {};
+    }
+
+    if (!id_str.empty()) {
+      instance_index = get_instance_order(id_str);
+    }
+    if (instance_index >= instance_actions_.size()) {
+      instance_index = 0;
+    }
+    return instance_actions_[instance_index].custom_action_servers_;
+  }
+
+  const std::vector<CustomDeviceStateActionConfig> CustomDeviceStateActions(
+      const std::string& id_str = std::string()) const override {
+    int instance_index = 0;
+    if (instance_actions_.empty()) {
+      // No Custom Action input, return empty vector
+      return {};
+    }
+
+    if (!id_str.empty()) {
+      instance_index = get_instance_order(id_str);
+    }
+    if (instance_index >= instance_actions_.size()) {
+      instance_index = 0;
+    }
+    return instance_actions_[instance_index].custom_device_state_actions_;
   }
 
   // ConfigFragment
   Json::Value Serialize() const override {
     Json::Value actions_array(Json::arrayValue);
-    for (const auto& action : CustomActions()) {
-      actions_array.append(ToJson(action));
+    for (const auto& each_instance_actions_ : instance_actions_) {
+      actions_array.append(
+          ToJson(each_instance_actions_.custom_action_instance_id_));
+      for (const auto& action : each_instance_actions_.custom_shell_actions_) {
+        actions_array.append(ToJson(action));
+      }
+      for (const auto& action : each_instance_actions_.custom_action_servers_) {
+        actions_array.append(ToJson(action));
+      }
+      for (const auto& action :
+           each_instance_actions_.custom_device_state_actions_) {
+        actions_array.append(ToJson(action));
+      }
     }
     return actions_array;
   }
@@ -235,22 +333,31 @@
   }
 
   bool Process(std::vector<std::string>& args) override {
-    custom_action_config_ = DefaultCustomActionConfig();
     if (!ParseFlags(Flags(), args)) {
       return false;
     }
-    if (custom_action_config_ != "") {
-      Json::CharReaderBuilder builder;
-      std::ifstream ifs(custom_action_config_);
-      std::string errorMessage;
-      Json::Value custom_action_array(Json::arrayValue);
-      if (!Json::parseFromStream(builder, ifs, &custom_action_array,
-                                 &errorMessage)) {
-        LOG(ERROR) << "Could not read custom actions config file "
-                   << custom_action_config_ << ": " << errorMessage;
-        return false;
+    if (custom_action_config_.empty()) {
+      // no custom action flag input
+      custom_action_config_.push_back(DefaultCustomActionConfig());
+    }
+    for (const auto& config : custom_action_config_) {
+      if (config != "") {
+        Json::CharReaderBuilder builder;
+        std::ifstream ifs(config);
+        std::string errorMessage;
+        Json::Value custom_action_array(Json::arrayValue);
+        if (!Json::parseFromStream(builder, ifs, &custom_action_array,
+                                   &errorMessage)) {
+          LOG(ERROR) << "Could not read custom actions config file " << config
+                     << ": " << errorMessage;
+          return false;
+        }
+        if (!AddJsonCustomActionConfigs(custom_action_array)) {
+          return false;
+        }
+      } else {
+        AddEmptyJsonCustomActionConfigs();
       }
-      return AddJsonCustomActionConfigs(custom_action_array);
     }
     return true;
   }
@@ -259,33 +366,87 @@
   }
 
  private:
+  struct InstanceActions {
+    std::vector<CustomShellActionConfig> custom_shell_actions_;
+    std::vector<CustomActionServerConfig> custom_action_servers_;
+    std::vector<CustomDeviceStateActionConfig> custom_device_state_actions_;
+    CustomActionInstanceID custom_action_instance_id_;
+  };
+
   std::vector<Flag> Flags() const {
     return {custom_action_config_flag_, custom_actions_flag_};
   }
 
+  void AddEmptyJsonCustomActionConfigs() {
+    InstanceActions instance_action;
+    instance_action.custom_action_instance_id_.instance_id =
+        std::to_string(instance_actions_.size());
+    instance_actions_.push_back(instance_action);
+  }
+
   bool AddJsonCustomActionConfigs(const Json::Value& custom_action_array) {
     if (custom_action_array.type() != Json::arrayValue) {
       LOG(ERROR) << "Expected a JSON array of custom actions";
       return false;
     }
-    for (const auto& custom_action_json : custom_action_array) {
-      auto custom_action = CustomActionConfigFromJson(custom_action_json);
-      if (custom_action) {
-        custom_actions_.push_back(*custom_action);
+    InstanceActions instance_action;
+    instance_action.custom_action_instance_id_.instance_id = "-1";
+
+    for (const auto& custom_action : custom_action_array) {
+      // for multi-instances case, assume instance_id, shell_command,
+      // server and device_states comes together before next instance
+      bool has_instance_id = custom_action.isMember(kCustomActionInstanceID);
+      bool has_shell_command =
+          custom_action.isMember(kCustomActionShellCommand);
+      bool has_server = custom_action.isMember(kCustomActionServer);
+      bool has_device_states =
+          custom_action.isMember(kCustomActionDeviceStates);
+      if (!!has_shell_command + !!has_server + !!has_device_states +
+              !!has_instance_id !=
+          1) {
+        LOG(ERROR) << "Custom action must contain exactly one of "
+                      "shell_command, server, device_states or instance_id";
+        return false;
+      }
+
+      if (has_shell_command) {
+        auto config = GetCustomShellActionConfigFromJson(custom_action);
+        instance_action.custom_shell_actions_.push_back(config);
+      } else if (has_server) {
+        auto config = GetCustomActionServerConfigFromJson(custom_action);
+        instance_action.custom_action_servers_.push_back(config);
+      } else if (has_device_states) {
+        auto config = GetCustomDeviceStateActionConfigFromJson(custom_action);
+        instance_action.custom_device_state_actions_.push_back(config);
+      } else if (has_instance_id) {
+        auto config = GetCustomActionInstanceIDFromJson(custom_action);
+        if (instance_action.custom_action_instance_id_.instance_id != "-1") {
+          // already has instance id, start a new instance
+          instance_actions_.push_back(instance_action);
+          instance_action = InstanceActions();
+        }
+        instance_action.custom_action_instance_id_ = config;
       } else {
-        LOG(ERROR) << "Validation failed on a custom action";
+        LOG(ERROR) << "Unknown custom action type.";
         return false;
       }
     }
+    if (instance_action.custom_action_instance_id_.instance_id == "-1") {
+      // default id "-1" which means no instance id assigned yet
+      // at this time, just assign the # of instance as ID
+      instance_action.custom_action_instance_id_.instance_id =
+          std::to_string(instance_actions_.size());
+    }
+    instance_actions_.push_back(instance_action);
     return true;
   }
 
-  ConfigFlag& config_;
-  Flag custom_action_config_flag_;
-  std::string custom_action_config_;
-  Flag custom_actions_flag_;
-  std::vector<CustomActionConfig> custom_actions_;
-};
+    ConfigFlag& config_;
+    Flag custom_action_config_flag_;
+    std::vector<std::string> custom_action_config_;
+    Flag custom_actions_flag_;
+    std::vector<InstanceActions> instance_actions_;
+  };
 
 }  // namespace
 
diff --git a/host/libs/config/custom_actions.h b/host/libs/config/custom_actions.h
index 73f3901..53beefb 100644
--- a/host/libs/config/custom_actions.h
+++ b/host/libs/config/custom_actions.h
@@ -37,16 +37,33 @@
   std::optional<int> hinge_angle_value;
 };
 
-struct CustomActionConfig {
+struct CustomActionInstanceID {
+  std::string instance_id;
+};
+
+struct CustomShellActionConfig {
+  ControlPanelButton button;
+  std::string shell_command;
+};
+
+struct CustomActionServerConfig {
+  std::string server;
   std::vector<ControlPanelButton> buttons;
-  std::optional<std::string> shell_command;
-  std::optional<std::string> server;
+};
+
+struct CustomDeviceStateActionConfig {
+  ControlPanelButton button;
   std::vector<DeviceState> device_states;
 };
 
 class CustomActionConfigProvider : public FlagFeature, public ConfigFragment {
  public:
-  virtual const std::vector<CustomActionConfig>& CustomActions() const = 0;
+  virtual const std::vector<CustomShellActionConfig> CustomShellActions(
+      const std::string& id_str = std::string()) const = 0;
+  virtual const std::vector<CustomActionServerConfig> CustomActionServers(
+      const std::string& id_str = std::string()) const = 0;
+  virtual const std::vector<CustomDeviceStateActionConfig>
+  CustomDeviceStateActions(const std::string& id_str = std::string()) const = 0;
 };
 
 fruit::Component<fruit::Required<ConfigFlag>, CustomActionConfigProvider>
diff --git a/host/libs/config/cuttlefish_config.cpp b/host/libs/config/cuttlefish_config.cpp
index 4b169af..1b9e07b 100644
--- a/host/libs/config/cuttlefish_config.cpp
+++ b/host/libs/config/cuttlefish_config.cpp
@@ -60,17 +60,14 @@
 }
 
 int InstanceFromEnvironment() {
-  static constexpr char kInstanceEnvironmentVariable[] = "CUTTLEFISH_INSTANCE";
-
-  // CUTTLEFISH_INSTANCE environment variable
-  std::string instance_str = StringFromEnv(kInstanceEnvironmentVariable, "");
+  std::string instance_str = StringFromEnv(kCuttlefishInstanceEnvVarName, "");
   if (instance_str.empty()) {
     // Try to get it from the user instead
     instance_str = StringFromEnv("USER", "");
 
     if (instance_str.empty()) {
-      LOG(DEBUG) << "CUTTLEFISH_INSTANCE and USER unset, using instance id "
-                 << kDefaultInstance;
+      LOG(DEBUG) << kCuttlefishInstanceEnvVarName
+                 << " and USER unset, using instance id " << kDefaultInstance;
       return kDefaultInstance;
     }
     if (!android::base::StartsWith(instance_str, kVsocUserPrefix)) {
@@ -88,13 +85,16 @@
 }  // namespace
 
 const char* const kGpuModeAuto = "auto";
-const char* const kGpuModeGuestSwiftshader = "guest_swiftshader";
 const char* const kGpuModeDrmVirgl = "drm_virgl";
-const char* const kGpuModeGfxStream = "gfxstream";
+const char* const kGpuModeGfxstream = "gfxstream";
+const char* const kGpuModeGfxstreamGuestAngle = "gfxstream_guest_angle";
+const char* const kGpuModeGuestSwiftshader = "guest_swiftshader";
+const char* const kGpuModeNone = "none";
 
 const char* const kHwComposerAuto = "auto";
 const char* const kHwComposerDrm = "drm";
 const char* const kHwComposerRanchu = "ranchu";
+const char* const kHwComposerNone = "none";
 
 std::string DefaultEnvironmentPath(const char* environment_key,
                                    const char* default_value,
@@ -143,125 +143,6 @@
   (*dictionary_)[kVmManager] = name;
 }
 
-static constexpr char kGpuMode[] = "gpu_mode";
-std::string CuttlefishConfig::gpu_mode() const {
-  return (*dictionary_)[kGpuMode].asString();
-}
-void CuttlefishConfig::set_gpu_mode(const std::string& name) {
-  (*dictionary_)[kGpuMode] = name;
-}
-
-static constexpr char kGpuCaptureBinary[] = "gpu_capture_binary";
-std::string CuttlefishConfig::gpu_capture_binary() const {
-  return (*dictionary_)[kGpuCaptureBinary].asString();
-}
-void CuttlefishConfig::set_gpu_capture_binary(const std::string& name) {
-  (*dictionary_)[kGpuCaptureBinary] = name;
-}
-
-static constexpr char kHWComposer[] = "hwcomposer";
-std::string CuttlefishConfig::hwcomposer() const {
-  return (*dictionary_)[kHWComposer].asString();
-}
-void CuttlefishConfig::set_hwcomposer(const std::string& name) {
-  (*dictionary_)[kHWComposer] = name;
-}
-
-static constexpr char kEnableGpuUdmabuf[] = "enable_gpu_udmabuf";
-void CuttlefishConfig::set_enable_gpu_udmabuf(const bool enable_gpu_udmabuf) {
-  (*dictionary_)[kEnableGpuUdmabuf] = enable_gpu_udmabuf;
-}
-bool CuttlefishConfig::enable_gpu_udmabuf() const {
-  return (*dictionary_)[kEnableGpuUdmabuf].asBool();
-}
-
-static constexpr char kEnableGpuAngle[] = "enable_gpu_angle";
-void CuttlefishConfig::set_enable_gpu_angle(const bool enable_gpu_angle) {
-  (*dictionary_)[kEnableGpuAngle] = enable_gpu_angle;
-}
-bool CuttlefishConfig::enable_gpu_angle() const {
-  return (*dictionary_)[kEnableGpuAngle].asBool();
-}
-
-static constexpr char kCpus[] = "cpus";
-int CuttlefishConfig::cpus() const { return (*dictionary_)[kCpus].asInt(); }
-void CuttlefishConfig::set_cpus(int cpus) { (*dictionary_)[kCpus] = cpus; }
-
-static constexpr char kMemoryMb[] = "memory_mb";
-int CuttlefishConfig::memory_mb() const {
-  return (*dictionary_)[kMemoryMb].asInt();
-}
-void CuttlefishConfig::set_memory_mb(int memory_mb) {
-  (*dictionary_)[kMemoryMb] = memory_mb;
-}
-
-static constexpr char kDisplayConfigs[] = "display_configs";
-static constexpr char kXRes[] = "x_res";
-static constexpr char kYRes[] = "y_res";
-static constexpr char kDpi[] = "dpi";
-static constexpr char kRefreshRateHz[] = "refresh_rate_hz";
-std::vector<CuttlefishConfig::DisplayConfig>
-CuttlefishConfig::display_configs() const {
-  std::vector<DisplayConfig> display_configs;
-  for (auto& display_config_json : (*dictionary_)[kDisplayConfigs]) {
-    DisplayConfig display_config = {};
-    display_config.width = display_config_json[kXRes].asInt();
-    display_config.height = display_config_json[kYRes].asInt();
-    display_config.dpi = display_config_json[kDpi].asInt();
-    display_config.refresh_rate_hz =
-        display_config_json[kRefreshRateHz].asInt();
-    display_configs.emplace_back(std::move(display_config));
-  }
-  return display_configs;
-}
-void CuttlefishConfig::set_display_configs(
-    const std::vector<DisplayConfig>& display_configs) {
-  Json::Value display_configs_json(Json::arrayValue);
-
-  for (const DisplayConfig& display_configs : display_configs) {
-    Json::Value display_config_json(Json::objectValue);
-    display_config_json[kXRes] = display_configs.width;
-    display_config_json[kYRes] = display_configs.height;
-    display_config_json[kDpi] = display_configs.dpi;
-    display_config_json[kRefreshRateHz] = display_configs.refresh_rate_hz;
-    display_configs_json.append(display_config_json);
-  }
-
-  (*dictionary_)[kDisplayConfigs] = display_configs_json;
-}
-
-void CuttlefishConfig::SetPath(const std::string& key,
-                               const std::string& path) {
-  if (!path.empty()) {
-    (*dictionary_)[key] = AbsolutePath(path);
-  }
-}
-
-static constexpr char kGdbPort[] = "gdb_port";
-int CuttlefishConfig::gdb_port() const {
-  return (*dictionary_)[kGdbPort].asInt();
-}
-void CuttlefishConfig::set_gdb_port(int port) {
-  (*dictionary_)[kGdbPort] = port;
-}
-
-static constexpr char kDeprecatedBootCompleted[] = "deprecated_boot_completed";
-bool CuttlefishConfig::deprecated_boot_completed() const {
-  return (*dictionary_)[kDeprecatedBootCompleted].asBool();
-}
-void CuttlefishConfig::set_deprecated_boot_completed(
-    bool deprecated_boot_completed) {
-  (*dictionary_)[kDeprecatedBootCompleted] = deprecated_boot_completed;
-}
-
-static constexpr char kCuttlefishEnvPath[] = "cuttlefish_env_path";
-void CuttlefishConfig::set_cuttlefish_env_path(const std::string& path) {
-  SetPath(kCuttlefishEnvPath, path);
-}
-std::string CuttlefishConfig::cuttlefish_env_path() const {
-  return (*dictionary_)[kCuttlefishEnvPath].asString();
-}
-
 static SecureHal StringToSecureHal(std::string mode) {
   std::transform(mode.begin(), mode.end(), mode.begin(), ::tolower);
   if (mode == "keymint") {
@@ -289,22 +170,6 @@
   (*dictionary_)[kSecureHals] = hals_json_obj;
 }
 
-static constexpr char kSetupWizardMode[] = "setupwizard_mode";
-std::string CuttlefishConfig::setupwizard_mode() const {
-  return (*dictionary_)[kSetupWizardMode].asString();
-}
-void CuttlefishConfig::set_setupwizard_mode(const std::string& mode) {
-  (*dictionary_)[kSetupWizardMode] = mode;
-}
-
-static constexpr char kQemuBinaryDir[] = "qemu_binary_dir";
-std::string CuttlefishConfig::qemu_binary_dir() const {
-  return (*dictionary_)[kQemuBinaryDir].asString();
-}
-void CuttlefishConfig::set_qemu_binary_dir(const std::string& qemu_binary_dir) {
-  (*dictionary_)[kQemuBinaryDir] = qemu_binary_dir;
-}
-
 static constexpr char kCrosvmBinary[] = "crosvm_binary";
 std::string CuttlefishConfig::crosvm_binary() const {
   return (*dictionary_)[kCrosvmBinary].asString();
@@ -313,121 +178,12 @@
   (*dictionary_)[kCrosvmBinary] = crosvm_binary;
 }
 
-static constexpr char kGem5BinaryDir[] = "gem5_binary_dir";
-std::string CuttlefishConfig::gem5_binary_dir() const {
-  return (*dictionary_)[kGem5BinaryDir].asString();
+static constexpr char kGem5DebugFlags[] = "gem5_debug_flags";
+std::string CuttlefishConfig::gem5_debug_flags() const {
+  return (*dictionary_)[kGem5DebugFlags].asString();
 }
-void CuttlefishConfig::set_gem5_binary_dir(const std::string& gem5_binary_dir) {
-  (*dictionary_)[kGem5BinaryDir] = gem5_binary_dir;
-}
-
-static constexpr char kEnableGnssGrpcProxy[] = "enable_gnss_grpc_proxy";
-void CuttlefishConfig::set_enable_gnss_grpc_proxy(const bool enable_gnss_grpc_proxy) {
-  (*dictionary_)[kEnableGnssGrpcProxy] = enable_gnss_grpc_proxy;
-}
-bool CuttlefishConfig::enable_gnss_grpc_proxy() const {
-  return (*dictionary_)[kEnableGnssGrpcProxy].asBool();
-}
-
-static constexpr char kEnableSandbox[] = "enable_sandbox";
-void CuttlefishConfig::set_enable_sandbox(const bool enable_sandbox) {
-  (*dictionary_)[kEnableSandbox] = enable_sandbox;
-}
-bool CuttlefishConfig::enable_sandbox() const {
-  return (*dictionary_)[kEnableSandbox].asBool();
-}
-
-static constexpr char kSeccompPolicyDir[] = "seccomp_policy_dir";
-void CuttlefishConfig::set_seccomp_policy_dir(const std::string& seccomp_policy_dir) {
-  if (seccomp_policy_dir.empty()) {
-    (*dictionary_)[kSeccompPolicyDir] = seccomp_policy_dir;
-    return;
-  }
-  SetPath(kSeccompPolicyDir, seccomp_policy_dir);
-}
-std::string CuttlefishConfig::seccomp_policy_dir() const {
-  return (*dictionary_)[kSeccompPolicyDir].asString();
-}
-
-static constexpr char kEnableWebRTC[] = "enable_webrtc";
-void CuttlefishConfig::set_enable_webrtc(bool enable_webrtc) {
-  (*dictionary_)[kEnableWebRTC] = enable_webrtc;
-}
-bool CuttlefishConfig::enable_webrtc() const {
-  return (*dictionary_)[kEnableWebRTC].asBool();
-}
-
-static constexpr char kEnableVehicleHalServer[] = "enable_vehicle_hal_server";
-void CuttlefishConfig::set_enable_vehicle_hal_grpc_server(bool enable_vehicle_hal_grpc_server) {
-  (*dictionary_)[kEnableVehicleHalServer] = enable_vehicle_hal_grpc_server;
-}
-bool CuttlefishConfig::enable_vehicle_hal_grpc_server() const {
-  return (*dictionary_)[kEnableVehicleHalServer].asBool();
-}
-
-static constexpr char kWebRTCAssetsDir[] = "webrtc_assets_dir";
-void CuttlefishConfig::set_webrtc_assets_dir(const std::string& webrtc_assets_dir) {
-  (*dictionary_)[kWebRTCAssetsDir] = webrtc_assets_dir;
-}
-std::string CuttlefishConfig::webrtc_assets_dir() const {
-  return (*dictionary_)[kWebRTCAssetsDir].asString();
-}
-
-static constexpr char kWebRTCEnableADBWebSocket[] =
-    "webrtc_enable_adb_websocket";
-void CuttlefishConfig::set_webrtc_enable_adb_websocket(bool enable) {
-    (*dictionary_)[kWebRTCEnableADBWebSocket] = enable;
-}
-bool CuttlefishConfig::webrtc_enable_adb_websocket() const {
-    return (*dictionary_)[kWebRTCEnableADBWebSocket].asBool();
-}
-
-static constexpr char kRestartSubprocesses[] = "restart_subprocesses";
-bool CuttlefishConfig::restart_subprocesses() const {
-  return (*dictionary_)[kRestartSubprocesses].asBool();
-}
-void CuttlefishConfig::set_restart_subprocesses(bool restart_subprocesses) {
-  (*dictionary_)[kRestartSubprocesses] = restart_subprocesses;
-}
-
-static constexpr char kRunAsDaemon[] = "run_as_daemon";
-bool CuttlefishConfig::run_as_daemon() const {
-  return (*dictionary_)[kRunAsDaemon].asBool();
-}
-void CuttlefishConfig::set_run_as_daemon(bool run_as_daemon) {
-  (*dictionary_)[kRunAsDaemon] = run_as_daemon;
-}
-
-static constexpr char kDataPolicy[] = "data_policy";
-std::string CuttlefishConfig::data_policy() const {
-  return (*dictionary_)[kDataPolicy].asString();
-}
-void CuttlefishConfig::set_data_policy(const std::string& data_policy) {
-  (*dictionary_)[kDataPolicy] = data_policy;
-}
-
-static constexpr char kBlankDataImageMb[] = "blank_data_image_mb";
-int CuttlefishConfig::blank_data_image_mb() const {
-  return (*dictionary_)[kBlankDataImageMb].asInt();
-}
-void CuttlefishConfig::set_blank_data_image_mb(int blank_data_image_mb) {
-  (*dictionary_)[kBlankDataImageMb] = blank_data_image_mb;
-}
-
-static constexpr char kBootloader[] = "bootloader";
-std::string CuttlefishConfig::bootloader() const {
-  return (*dictionary_)[kBootloader].asString();
-}
-void CuttlefishConfig::set_bootloader(const std::string& bootloader) {
-  SetPath(kBootloader, bootloader);
-}
-
-static constexpr char kBootSlot[] = "boot_slot";
-void CuttlefishConfig::set_boot_slot(const std::string& boot_slot) {
-  (*dictionary_)[kBootSlot] = boot_slot;
-}
-std::string CuttlefishConfig::boot_slot() const {
-  return (*dictionary_)[kBootSlot].asString();
+void CuttlefishConfig::set_gem5_debug_flags(const std::string& gem5_debug_flags) {
+  (*dictionary_)[kGem5DebugFlags] = gem5_debug_flags;
 }
 
 static constexpr char kWebRTCCertsDir[] = "webrtc_certs_dir";
@@ -446,36 +202,6 @@
   return (*dictionary_)[kSigServerPort].asInt();
 }
 
-static constexpr char kWebrtcUdpPortRange[] = "webrtc_udp_port_range";
-void CuttlefishConfig::set_webrtc_udp_port_range(
-    std::pair<uint16_t, uint16_t> range) {
-  Json::Value arr(Json::ValueType::arrayValue);
-  arr[0] = range.first;
-  arr[1] = range.second;
-  (*dictionary_)[kWebrtcUdpPortRange] = arr;
-}
-std::pair<uint16_t, uint16_t> CuttlefishConfig::webrtc_udp_port_range() const {
-  std::pair<uint16_t, uint16_t> ret;
-  ret.first = (*dictionary_)[kWebrtcUdpPortRange][0].asInt();
-  ret.second = (*dictionary_)[kWebrtcUdpPortRange][1].asInt();
-  return ret;
-}
-
-static constexpr char kWebrtcTcpPortRange[] = "webrtc_tcp_port_range";
-void CuttlefishConfig::set_webrtc_tcp_port_range(
-    std::pair<uint16_t, uint16_t> range) {
-  Json::Value arr(Json::ValueType::arrayValue);
-  arr[0] = range.first;
-  arr[1] = range.second;
-  (*dictionary_)[kWebrtcTcpPortRange] = arr;
-}
-std::pair<uint16_t, uint16_t> CuttlefishConfig::webrtc_tcp_port_range() const {
-  std::pair<uint16_t, uint16_t> ret;
-  ret.first = (*dictionary_)[kWebrtcTcpPortRange][0].asInt();
-  ret.second = (*dictionary_)[kWebrtcTcpPortRange][1].asInt();
-  return ret;
-}
-
 static constexpr char kSigServerAddress[] = "webrtc_sig_server_addr";
 void CuttlefishConfig::set_sig_server_address(const std::string& addr) {
   (*dictionary_)[kSigServerAddress] = addr;
@@ -509,41 +235,6 @@
   return (*dictionary_)[kSigServerStrict].asBool();
 }
 
-static constexpr char kSigServerHeadersPath[] =
-    "webrtc_sig_server_headers_path";
-void CuttlefishConfig::set_sig_server_headers_path(const std::string& path) {
-  SetPath(kSigServerHeadersPath, path);
-}
-std::string CuttlefishConfig::sig_server_headers_path() const {
-  return (*dictionary_)[kSigServerHeadersPath].asString();
-}
-
-static constexpr char kRunModemSimulator[] = "enable_modem_simulator";
-bool CuttlefishConfig::enable_modem_simulator() const {
-  return (*dictionary_)[kRunModemSimulator].asBool();
-}
-void CuttlefishConfig::set_enable_modem_simulator(bool enable_modem_simulator) {
-  (*dictionary_)[kRunModemSimulator] = enable_modem_simulator;
-}
-
-static constexpr char kModemSimulatorInstanceNumber[] =
-    "modem_simulator_instance_number";
-void CuttlefishConfig::set_modem_simulator_instance_number(
-    int instance_number) {
-  (*dictionary_)[kModemSimulatorInstanceNumber] = instance_number;
-}
-int CuttlefishConfig::modem_simulator_instance_number() const {
-  return (*dictionary_)[kModemSimulatorInstanceNumber].asInt();
-}
-
-static constexpr char kModemSimulatorSimType[] = "modem_simulator_sim_type";
-void CuttlefishConfig::set_modem_simulator_sim_type(int sim_type) {
-  (*dictionary_)[kModemSimulatorSimType] = sim_type;
-}
-int CuttlefishConfig::modem_simulator_sim_type() const {
-  return (*dictionary_)[kModemSimulatorSimType].asInt();
-}
-
 static constexpr char kHostToolsVersion[] = "host_tools_version";
 void CuttlefishConfig::set_host_tools_version(
     const std::map<std::string, uint32_t>& versions) {
@@ -565,12 +256,28 @@
   return versions;
 }
 
-static constexpr char kGuestEnforceSecurity[] = "guest_enforce_security";
-void CuttlefishConfig::set_guest_enforce_security(bool guest_enforce_security) {
-  (*dictionary_)[kGuestEnforceSecurity] = guest_enforce_security;
+static constexpr char kenableHostUwb[] = "enable_host_uwb";
+void CuttlefishConfig::set_enable_host_uwb(bool enable_host_uwb) {
+  (*dictionary_)[kenableHostUwb] = enable_host_uwb;
 }
-bool CuttlefishConfig::guest_enforce_security() const {
-  return (*dictionary_)[kGuestEnforceSecurity].asBool();
+bool CuttlefishConfig::enable_host_uwb() const {
+  return (*dictionary_)[kenableHostUwb].asBool();
+}
+
+static constexpr char kenableHostUwbConnector[] = "enable_host_uwb_connector";
+void CuttlefishConfig::set_enable_host_uwb_connector(bool enable_host_uwb) {
+  (*dictionary_)[kenableHostUwbConnector] = enable_host_uwb;
+}
+bool CuttlefishConfig::enable_host_uwb_connector() const {
+  return (*dictionary_)[kenableHostUwbConnector].asBool();
+}
+
+static constexpr char kPicaUciPort[] = "pica_uci_port";
+int CuttlefishConfig::pica_uci_port() const {
+  return (*dictionary_)[kPicaUciPort].asInt();
+}
+void CuttlefishConfig::set_pica_uci_port(int pica_uci_port) {
+  (*dictionary_)[kPicaUciPort] = pica_uci_port;
 }
 
 static constexpr char kenableHostBluetooth[] = "enable_host_bluetooth";
@@ -581,6 +288,29 @@
   return (*dictionary_)[kenableHostBluetooth].asBool();
 }
 
+static constexpr char kenableHostBluetoothConnector[] = "enable_host_bluetooth_connector";
+void CuttlefishConfig::set_enable_host_bluetooth_connector(bool enable_host_bluetooth) {
+  (*dictionary_)[kenableHostBluetoothConnector] = enable_host_bluetooth;
+}
+bool CuttlefishConfig::enable_host_bluetooth_connector() const {
+  return (*dictionary_)[kenableHostBluetoothConnector].asBool();
+}
+
+static constexpr char kNetsimRadios[] = "netsim_radios";
+
+void CuttlefishConfig::netsim_radio_enable(NetsimRadio flag) {
+  if (dictionary_->isMember(kNetsimRadios)) {
+    // OR the radio to current set of radios
+    (*dictionary_)[kNetsimRadios] = (*dictionary_)[kNetsimRadios].asInt() | flag;
+  } else {
+    (*dictionary_)[kNetsimRadios] = flag;
+  }
+}
+
+bool CuttlefishConfig::netsim_radio_enabled(NetsimRadio flag) const {
+  return (*dictionary_)[kNetsimRadios].asInt() & flag;
+}
+
 static constexpr char kEnableMetrics[] = "enable_metrics";
 void CuttlefishConfig::set_enable_metrics(std::string enable_metrics) {
   (*dictionary_)[kEnableMetrics] = kUnknown;
@@ -643,65 +373,12 @@
   return bootconfig;
 }
 
-static constexpr char kRilDns[] = "ril_dns";
-void CuttlefishConfig::set_ril_dns(const std::string& ril_dns) {
-  (*dictionary_)[kRilDns] = ril_dns;
+static constexpr char kVirtioMac80211Hwsim[] = "virtio_mac80211_hwsim";
+void CuttlefishConfig::set_virtio_mac80211_hwsim(bool virtio_mac80211_hwsim) {
+  (*dictionary_)[kVirtioMac80211Hwsim] = virtio_mac80211_hwsim;
 }
-std::string CuttlefishConfig::ril_dns() const {
-  return (*dictionary_)[kRilDns].asString();
-}
-
-static constexpr char kKgdb[] = "kgdb";
-void CuttlefishConfig::set_kgdb(bool kgdb) {
-  (*dictionary_)[kKgdb] = kgdb;
-}
-bool CuttlefishConfig::kgdb() const {
-  return (*dictionary_)[kKgdb].asBool();
-}
-
-static constexpr char kEnableMinimalMode[] = "enable_minimal_mode";
-bool CuttlefishConfig::enable_minimal_mode() const {
-  return (*dictionary_)[kEnableMinimalMode].asBool();
-}
-void CuttlefishConfig::set_enable_minimal_mode(bool enable_minimal_mode) {
-  (*dictionary_)[kEnableMinimalMode] = enable_minimal_mode;
-}
-
-static constexpr char kConsole[] = "console";
-void CuttlefishConfig::set_console(bool console) {
-  (*dictionary_)[kConsole] = console;
-}
-bool CuttlefishConfig::console() const {
-  return (*dictionary_)[kConsole].asBool();
-}
-std::string CuttlefishConfig::console_dev() const {
-  auto can_use_virtio_console = !kgdb() && !use_bootloader();
-  std::string console_dev;
-  if (can_use_virtio_console ||
-      vm_manager() == vm_manager::Gem5Manager::name()) {
-    // If kgdb and the bootloader are disabled, the Android serial console
-    // spawns on a virtio-console port. If the bootloader is enabled, virtio
-    // console can't be used since uboot doesn't support it.
-    console_dev = "hvc1";
-  } else {
-    // crosvm ARM does not support ttyAMA. ttyAMA is a part of ARM arch.
-    Arch target = target_arch();
-    if ((target == Arch::Arm64 || target == Arch::Arm) &&
-        vm_manager() != vm_manager::CrosvmManager::name()) {
-      console_dev = "ttyAMA0";
-    } else {
-      console_dev = "ttyS0";
-    }
-  }
-  return console_dev;
-}
-
-static constexpr char kVhostNet[] = "vhost_net";
-void CuttlefishConfig::set_vhost_net(bool vhost_net) {
-  (*dictionary_)[kVhostNet] = vhost_net;
-}
-bool CuttlefishConfig::vhost_net() const {
-  return (*dictionary_)[kVhostNet].asBool();
+bool CuttlefishConfig::virtio_mac80211_hwsim() const {
+  return (*dictionary_)[kVirtioMac80211Hwsim].asBool();
 }
 
 static constexpr char kVhostUserMac80211Hwsim[] = "vhost_user_mac80211_hwsim";
@@ -744,6 +421,22 @@
   return (*dictionary_)[kWmediumdConfig].asString();
 }
 
+static constexpr char kRootcanalArgs[] = "rootcanal_args";
+void CuttlefishConfig::set_rootcanal_args(const std::string& rootcanal_args) {
+  Json::Value args_json_obj(Json::arrayValue);
+  for (const auto& arg : android::base::Split(rootcanal_args, " ")) {
+    args_json_obj.append(arg);
+  }
+  (*dictionary_)[kRootcanalArgs] = args_json_obj;
+}
+std::vector<std::string> CuttlefishConfig::rootcanal_args() const {
+  std::vector<std::string> rootcanal_args;
+  for (const Json::Value& arg : (*dictionary_)[kRootcanalArgs]) {
+    rootcanal_args.push_back(arg.asString());
+  }
+  return rootcanal_args;
+}
+
 static constexpr char kRootcanalHciPort[] = "rootcanal_hci_port";
 int CuttlefishConfig::rootcanal_hci_port() const {
   return (*dictionary_)[kRootcanalHciPort].asInt();
@@ -760,6 +453,15 @@
   (*dictionary_)[kRootcanalLinkPort] = rootcanal_link_port;
 }
 
+static constexpr char kRootcanalLinkBlePort[] = "rootcanal_link_ble_port";
+int CuttlefishConfig::rootcanal_link_ble_port() const {
+  return (*dictionary_)[kRootcanalLinkBlePort].asInt();
+}
+void CuttlefishConfig::set_rootcanal_link_ble_port(
+    int rootcanal_link_ble_port) {
+  (*dictionary_)[kRootcanalLinkBlePort] = rootcanal_link_ble_port;
+}
+
 static constexpr char kRootcanalTestPort[] = "rootcanal_test_port";
 int CuttlefishConfig::rootcanal_test_port() const {
   return (*dictionary_)[kRootcanalTestPort].asInt();
@@ -789,72 +491,6 @@
       DefaultHostArtifactsPath(rootcanal_default_commands_file);
 }
 
-static constexpr char kRecordScreen[] = "record_screen";
-void CuttlefishConfig::set_record_screen(bool record_screen) {
-  (*dictionary_)[kRecordScreen] = record_screen;
-}
-bool CuttlefishConfig::record_screen() const {
-  return (*dictionary_)[kRecordScreen].asBool();
-}
-
-static constexpr char kSmt[] = "smt";
-void CuttlefishConfig::set_smt(bool smt) {
-  (*dictionary_)[kSmt] = smt;
-}
-bool CuttlefishConfig::smt() const {
-  return (*dictionary_)[kSmt].asBool();
-}
-
-static constexpr char kEnableAudio[] = "enable_audio";
-void CuttlefishConfig::set_enable_audio(bool enable) {
-  (*dictionary_)[kEnableAudio] = enable;
-}
-bool CuttlefishConfig::enable_audio() const {
-  return (*dictionary_)[kEnableAudio].asBool();
-}
-
-static constexpr char kProtectedVm[] = "protected_vm";
-void CuttlefishConfig::set_protected_vm(bool protected_vm) {
-  (*dictionary_)[kProtectedVm] = protected_vm;
-}
-bool CuttlefishConfig::protected_vm() const {
-  return (*dictionary_)[kProtectedVm].asBool();
-}
-
-static constexpr char kTargetArch[] = "target_arch";
-void CuttlefishConfig::set_target_arch(Arch target_arch) {
-  (*dictionary_)[kTargetArch] = static_cast<int>(target_arch);
-}
-Arch CuttlefishConfig::target_arch() const {
-  return static_cast<Arch>((*dictionary_)[kTargetArch].asInt());
-}
-
-static constexpr char kBootconfigSupported[] = "bootconfig_supported";
-bool CuttlefishConfig::bootconfig_supported() const {
-  return (*dictionary_)[kBootconfigSupported].asBool();
-}
-void CuttlefishConfig::set_bootconfig_supported(bool bootconfig_supported) {
-  (*dictionary_)[kBootconfigSupported] = bootconfig_supported;
-}
-
-static constexpr char kUserdataFormat[] = "userdata_format";
-std::string CuttlefishConfig::userdata_format() const {
-  return (*dictionary_)[kUserdataFormat].asString();
-}
-void CuttlefishConfig::set_userdata_format(const std::string& userdata_format) {
-  auto fmt = userdata_format;
-  std::transform(fmt.begin(), fmt.end(), fmt.begin(), ::tolower);
-  (*dictionary_)[kUserdataFormat] = fmt;
-}
-
-static constexpr char kApImageDevPath[] = "ap_image_dev_path";
-std::string CuttlefishConfig::ap_image_dev_path() const {
-  return (*dictionary_)[kApImageDevPath].asString();
-}
-void CuttlefishConfig::set_ap_image_dev_path(const std::string& dev_path) {
-  (*dictionary_)[kApImageDevPath] = dev_path;
-}
-
 /*static*/ CuttlefishConfig* CuttlefishConfig::BuildConfigImpl(
     const std::string& path) {
   auto ret = new CuttlefishConfig();
@@ -942,8 +578,23 @@
   return AbsolutePath(assembly_dir() + "/" + file_name);
 }
 
-std::string CuttlefishConfig::os_composite_disk_path() const {
-  return AssemblyPath("os_composite.img");
+std::string CuttlefishConfig::instances_uds_dir() const {
+  // Try to use /tmp/cf_avd_{uid}/ for UDS directory.
+  // If it fails, use HOME directory(legacy) instead.
+
+  auto defaultPath = AbsolutePath("/tmp/cf_avd_" + std::to_string(getuid()));
+
+  if (!DirectoryExists(defaultPath) ||
+      CanAccess(defaultPath, R_OK | W_OK | X_OK)) {
+    return defaultPath;
+  }
+
+  return instances_dir();
+}
+
+std::string CuttlefishConfig::InstancesUdsPath(
+    const std::string& file_name) const {
+  return AbsolutePath(instances_uds_dir() + "/" + file_name);
 }
 
 CuttlefishConfig::MutableInstanceSpecific CuttlefishConfig::ForInstance(int num) {
@@ -976,6 +627,7 @@
   std::vector<std::string> result;
   for (const auto& instance : Instances()) {
     result.push_back(instance.instance_dir());
+    result.push_back(instance.instance_uds_dir());
   }
   return result;
 }
diff --git a/host/libs/config/cuttlefish_config.h b/host/libs/config/cuttlefish_config.h
index 78861e0..16151b3 100644
--- a/host/libs/config/cuttlefish_config.h
+++ b/host/libs/config/cuttlefish_config.h
@@ -26,6 +26,7 @@
 #include <vector>
 
 #include "common/libs/utils/environment.h"
+#include "common/libs/utils/result.h"
 #include "host/libs/config/config_fragment.h"
 
 namespace Json {
@@ -38,6 +39,7 @@
 
 constexpr char kDefaultUuidPrefix[] = "699acfc4-c8c4-11e7-882b-5065f31dc1";
 constexpr char kCuttlefishConfigEnvVarName[] = "CUTTLEFISH_CONFIG_FILE";
+constexpr char kCuttlefishInstanceEnvVarName[] = "CUTTLEFISH_INSTANCE";
 constexpr char kVsocUserPrefix[] = "vsoc-";
 constexpr char kCvdNamePrefix[] = "cvd-";
 constexpr char kBootStartedMessage[] ="VIRTUAL_DEVICE_BOOT_STARTED";
@@ -49,10 +51,16 @@
     "VIRTUAL_DEVICE_NETWORK_WIFI_CONNECTED";
 constexpr char kEthernetConnectedMessage[] =
     "VIRTUAL_DEVICE_NETWORK_ETHERNET_CONNECTED";
+// TODO(b/131864854): Replace this with a string less likely to change
+constexpr char kAdbdStartedMessage[] =
+    "init: starting service 'adbd'...";
+constexpr char kFastbootdStartedMessage[] =
+    "init: starting service 'fastbootd'...";
 constexpr char kScreenChangedMessage[] = "VIRTUAL_DEVICE_SCREEN_CHANGED";
 constexpr char kDisplayPowerModeChangedMessage[] =
     "VIRTUAL_DEVICE_DISPLAY_POWER_MODE_CHANGED";
 constexpr char kInternalDirName[] = "internal";
+constexpr char kGrpcSocketDirName[] = "grpc_socket";
 constexpr char kSharedDirName[] = "shared";
 constexpr char kLogDirName[] = "logs";
 constexpr char kCrosvmVarEmptyDir[] = "/var/empty";
@@ -94,32 +102,12 @@
   std::string assembly_dir() const;
   std::string AssemblyPath(const std::string&) const;
 
-  std::string os_composite_disk_path() const;
+  std::string instances_uds_dir() const;
+  std::string InstancesUdsPath(const std::string&) const;
 
   std::string vm_manager() const;
   void set_vm_manager(const std::string& name);
 
-  std::string gpu_mode() const;
-  void set_gpu_mode(const std::string& name);
-
-  std::string gpu_capture_binary() const;
-  void set_gpu_capture_binary(const std::string&);
-
-  std::string hwcomposer() const;
-  void set_hwcomposer(const std::string&);
-
-  void set_enable_gpu_udmabuf(const bool enable_gpu_udmabuf);
-  bool enable_gpu_udmabuf() const;
-
-  void set_enable_gpu_angle(const bool enable_gpu_angle);
-  bool enable_gpu_angle() const;
-
-  int cpus() const;
-  void set_cpus(int cpus);
-
-  int memory_mb() const;
-  void set_memory_mb(int memory_mb);
-
   struct DisplayConfig {
     int width;
     int height;
@@ -127,84 +115,38 @@
     int refresh_rate_hz;
   };
 
-  std::vector<DisplayConfig> display_configs() const;
-  void set_display_configs(const std::vector<DisplayConfig>& display_configs);
-
-  int gdb_port() const;
-  void set_gdb_port(int gdb_port);
-
-  bool deprecated_boot_completed() const;
-  void set_deprecated_boot_completed(bool deprecated_boot_completed);
-
-  void set_cuttlefish_env_path(const std::string& path);
-  std::string cuttlefish_env_path() const;
-
   void set_secure_hals(const std::set<std::string>& hals);
   std::set<SecureHal> secure_hals() const;
 
-  void set_setupwizard_mode(const std::string& title);
-  std::string setupwizard_mode() const;
-
-  void set_qemu_binary_dir(const std::string& qemu_binary_dir);
-  std::string qemu_binary_dir() const;
-
   void set_crosvm_binary(const std::string& crosvm_binary);
   std::string crosvm_binary() const;
 
-  void set_gem5_binary_dir(const std::string& gem5_binary_dir);
-  std::string gem5_binary_dir() const;
+  void set_gem5_debug_flags(const std::string& gem5_debug_flags);
+  std::string gem5_debug_flags() const;
 
-  void set_enable_sandbox(const bool enable_sandbox);
-  bool enable_sandbox() const;
+  void set_enable_host_uwb(bool enable_host_uwb);
+  bool enable_host_uwb() const;
 
-  void set_seccomp_policy_dir(const std::string& seccomp_policy_dir);
-  std::string seccomp_policy_dir() const;
-
-  void set_enable_webrtc(bool enable_webrtc);
-  bool enable_webrtc() const;
-
-  void set_webrtc_assets_dir(const std::string& webrtc_assets_dir);
-  std::string webrtc_assets_dir() const;
-
-  void set_webrtc_enable_adb_websocket(bool enable);
-  bool webrtc_enable_adb_websocket() const;
-
-  void set_enable_vehicle_hal_grpc_server(bool enable_vhal_server);
-  bool enable_vehicle_hal_grpc_server() const;
-
-  void set_restart_subprocesses(bool restart_subprocesses);
-  bool restart_subprocesses() const;
-
-  void set_enable_gnss_grpc_proxy(const bool enable_gnss_grpc_proxy);
-  bool enable_gnss_grpc_proxy() const;
-
-  void set_run_as_daemon(bool run_as_daemon);
-  bool run_as_daemon() const;
-
-  void set_data_policy(const std::string& data_policy);
-  std::string data_policy() const;
-
-  void set_blank_data_image_mb(int blank_data_image_mb);
-  int blank_data_image_mb() const;
-
-  void set_bootloader(const std::string& bootloader_path);
-  std::string bootloader() const;
-
-  // TODO (b/163575714) add virtio console support to the bootloader so the
-  // virtio console path for the console device can be taken again. When that
-  // happens, this function can be deleted along with all the code paths it
-  // forces.
-  bool use_bootloader() const { return true; };
-
-  void set_boot_slot(const std::string& boot_slot);
-  std::string boot_slot() const;
-
-  void set_guest_enforce_security(bool guest_enforce_security);
-  bool guest_enforce_security() const;
+  void set_enable_host_uwb_connector(bool enable_host_uwb);
+  bool enable_host_uwb_connector() const;
 
   void set_enable_host_bluetooth(bool enable_host_bluetooth);
   bool enable_host_bluetooth() const;
 
+  // Bluetooth is enabled by bt_connector and rootcanal
+  void set_enable_host_bluetooth_connector(bool enable_host_bluetooth);
+  bool enable_host_bluetooth_connector() const;
+
+  // Flags for the set of radios that are connected to netsim
+  enum NetsimRadio {
+    Bluetooth = 0b00000001,
+    Wifi      = 0b00000010,
+    Uwb       = 0b00000100,
+  };
+
+  void netsim_radio_enable(NetsimRadio flag);
+  bool netsim_radio_enabled(NetsimRadio flag) const;
+
   enum Answer {
     kUnknown = 0,
     kYes,
@@ -232,14 +174,6 @@
   void set_sig_server_port(int port);
   int sig_server_port() const;
 
-  // The range of UDP ports available for webrtc sessions.
-  void set_webrtc_udp_port_range(std::pair<uint16_t, uint16_t> range);
-  std::pair<uint16_t, uint16_t> webrtc_udp_port_range() const;
-
-  // The range of TCP ports available for webrtc sessions.
-  void set_webrtc_tcp_port_range(std::pair<uint16_t, uint16_t> range);
-  std::pair<uint16_t, uint16_t> webrtc_tcp_port_range() const;
-
   // The address of the signaling server
   void set_sig_server_address(const std::string& addr);
   std::string sig_server_address() const;
@@ -259,42 +193,11 @@
   void set_sig_server_strict(bool strict);
   bool sig_server_strict() const;
 
-  // A file containing http headers to include in the connection to the
-  // signaling server
-  void set_sig_server_headers_path(const std::string& path);
-  std::string sig_server_headers_path() const;
-
-  // The dns address of mobile network (RIL)
-  void set_ril_dns(const std::string& ril_dns);
-  std::string ril_dns() const;
-
-  // KGDB configuration for kernel debugging
-  void set_kgdb(bool kgdb);
-  bool kgdb() const;
-
-  // Serial console
-  void set_console(bool console);
-  bool console() const;
-  std::string console_dev() const;
-
-  // Configuration flags for a minimal device
-  bool enable_minimal_mode() const;
-  void set_enable_minimal_mode(bool enable_minimal_mode);
-
-  void set_enable_modem_simulator(bool enable_modem_simulator);
-  bool enable_modem_simulator() const;
-
-  void set_modem_simulator_instance_number(int instance_numbers);
-  int modem_simulator_instance_number() const;
-
-  void set_modem_simulator_sim_type(int sim_type);
-  int modem_simulator_sim_type() const;
-
   void set_host_tools_version(const std::map<std::string, uint32_t>&);
   std::map<std::string, uint32_t> host_tools_version() const;
 
-  void set_vhost_net(bool vhost_net);
-  bool vhost_net() const;
+  void set_virtio_mac80211_hwsim(bool virtio_mac80211_hwsim);
+  bool virtio_mac80211_hwsim() const;
 
   void set_vhost_user_mac80211_hwsim(const std::string& path);
   std::string vhost_user_mac80211_hwsim() const;
@@ -311,12 +214,21 @@
   void set_wmediumd_config(const std::string& path);
   std::string wmediumd_config() const;
 
+  void set_pica_uci_port(int pica_uci_port);
+  int pica_uci_port() const;
+
+  void set_rootcanal_args(const std::string& rootcanal_args);
+  std::vector<std::string> rootcanal_args() const;
+
   void set_rootcanal_hci_port(int rootcanal_hci_port);
   int rootcanal_hci_port() const;
 
   void set_rootcanal_link_port(int rootcanal_link_port);
   int rootcanal_link_port() const;
 
+  void set_rootcanal_link_ble_port(int rootcanal_link_ble_port);
+  int rootcanal_link_ble_port() const;
+
   void set_rootcanal_test_port(int rootcanal_test_port);
   int rootcanal_test_port() const;
 
@@ -327,27 +239,6 @@
       const std::string& rootcanal_default_commands_file);
   std::string rootcanal_default_commands_file() const;
 
-  void set_record_screen(bool record_screen);
-  bool record_screen() const;
-
-  void set_smt(bool smt);
-  bool smt() const;
-
-  void set_enable_audio(bool enable);
-  bool enable_audio() const;
-
-  void set_protected_vm(bool protected_vm);
-  bool protected_vm() const;
-
-  void set_target_arch(Arch target_arch);
-  Arch target_arch() const;
-
-  void set_bootconfig_supported(bool bootconfig_supported);
-  bool bootconfig_supported() const;
-
-  void set_userdata_format(const std::string& userdata_format);
-  std::string userdata_format() const;
-
   // The path of an AP image in composite disk
   std::string ap_image_dev_path() const;
   void set_ap_image_dev_path(const std::string& dev_path);
@@ -395,12 +286,12 @@
     // Port number to connect to the touch server on the host. (Only
     // operational if QEMU is the vmm.)
     int touch_server_port() const;
-    // Port number to connect to the vehicle HAL server on the host
-    int vehicle_hal_server_port() const;
     // Port number to connect to the audiocontrol server on the guest
     int audiocontrol_server_port() const;
     // Port number to connect to the adb server on the host
     int adb_host_port() const;
+    // Port number to connect to the fastboot server on the host
+    int fastboot_host_port() const;
     // Device-specific ID to distinguish modem simulators. Must be 4 digits.
     int modem_simulator_host_id() const;
     // Port number to connect to the gnss grpc proxy server on the host
@@ -411,10 +302,18 @@
 
     std::string adb_device_name() const;
     std::string gnss_file_path() const;
+    std::string fixed_location_file_path() const;
     std::string mobile_bridge_name() const;
     std::string mobile_tap_name() const;
+    std::string mobile_mac() const;
+    std::string wifi_bridge_name() const;
     std::string wifi_tap_name() const;
+    std::string wifi_mac() const;
+    bool use_bridged_wifi_tap() const;
     std::string ethernet_tap_name() const;
+    std::string ethernet_bridge_name() const;
+    std::string ethernet_mac() const;
+    std::string ethernet_ipv6() const;
     uint32_t session_id() const;
     bool use_allocd() const;
     int vsock_guest_cid() const;
@@ -424,21 +323,30 @@
 
     // Returns the path to a file with the given name in the instance
     // directory..
-    std::string PerInstancePath(const char* file_name) const;
-    std::string PerInstanceInternalPath(const char* file_name) const;
+    std::string PerInstancePath(const std::string& file_name) const;
+    std::string PerInstanceInternalPath(const std::string& file_name) const;
     std::string PerInstanceLogPath(const std::string& file_name) const;
 
     std::string instance_dir() const;
 
     std::string instance_internal_dir() const;
 
+    // Return the Unix domain socket path with given name. Because the
+    // length limitation of Unix domain socket name, it needs to be in
+    // the another directory than normal Instance path.
+    std::string PerInstanceUdsPath(const std::string& file_name) const;
+    std::string PerInstanceInternalUdsPath(const std::string& file_name) const;
+    std::string PerInstanceGrpcSocketPath(const std::string& socket_name) const;
+
+    std::string instance_uds_dir() const;
+
+    std::string instance_internal_uds_dir() const;
+
     std::string touch_socket_path(int screen_idx) const;
     std::string keyboard_socket_path() const;
     std::string switches_socket_path() const;
     std::string frames_socket_path() const;
 
-    int confui_host_vsock_port() const;
-
     std::string access_kregistry_path() const;
 
     std::string hwcomposer_pmem_path() const;
@@ -469,10 +377,34 @@
 
     std::string persistent_composite_disk_path() const;
 
+    std::string persistent_ap_composite_disk_path() const;
+
+    std::string os_composite_disk_path() const;
+
+    std::string ap_composite_disk_path() const;
+
     std::string uboot_env_image_path() const;
 
+    std::string ap_uboot_env_image_path() const;
+
+    std::string ap_esp_image_path() const;
+
+    std::string otheros_esp_image_path() const;
+
+    std::string otheros_esp_grub_config() const;
+
+    std::string ap_esp_grub_config() const;
+
     std::string audio_server_path() const;
 
+    enum class BootFlow {
+      Android,
+      Linux,
+      Fuchsia
+    };
+
+    BootFlow boot_flow() const;
+
     // modem simulator related
     std::string modem_simulator_ports() const;
 
@@ -493,8 +425,28 @@
     // Whether this instance should start a rootcanal instance
     bool start_rootcanal() const;
 
-    // Whether this instance should start an ap instance
-    bool start_ap() const;
+    // Whether this instance should start a pica instance
+    bool start_pica() const;
+
+    // Whether this instance should start a netsim instance
+    bool start_netsim() const;
+
+    enum class APBootFlow {
+      // Not starting AP at all (for example not the 1st instance)
+      None,
+      // Generating ESP and using U-BOOT to boot AP
+      Grub,
+      // Using legacy way to boot AP in case we cannot generate ESP image.
+      // Currently we have only one case when we cannot do it. When users
+      // have ubuntu bionic which doesn't have monolith binaris in the
+      // grub-efi-arm64-bin (for arm64) and grub-efi-ia32-bin (x86) deb packages.
+      // TODO(b/260337906): check is it possible to add grub binaries into the AOSP
+      // to deliver the proper grub environment
+      // TODO(b/260338443): use grub-mkimage from grub-common in case we cannot overcome
+      // legal issues
+      LegacyDirect
+    };
+    APBootFlow ap_boot_flow() const;
 
     // Wifi MAC address inside the guest
     int wifi_mac_prefix() const;
@@ -505,7 +457,136 @@
 
     std::string vbmeta_path() const;
 
+    std::string ap_vbmeta_path() const;
+
     std::string id() const;
+
+    std::string gem5_binary_dir() const;
+
+    std::string gem5_checkpoint_dir() const;
+
+    // Serial console
+    bool console() const;
+    std::string console_dev() const;
+    bool enable_sandbox() const;
+
+    // KGDB configuration for kernel debugging
+    bool kgdb() const;
+
+    // TODO (b/163575714) add virtio console support to the bootloader so the
+    // virtio console path for the console device can be taken again. When that
+    // happens, this function can be deleted along with all the code paths it
+    // forces.
+    bool use_bootloader() const;
+
+    Arch target_arch() const;
+
+    int cpus() const;
+
+    std::string data_policy() const;
+
+    int blank_data_image_mb() const;
+
+    int gdb_port() const;
+
+    std::vector<DisplayConfig> display_configs() const;
+
+    std::string grpc_socket_path() const;
+    int memory_mb() const;
+    int ddr_mem_mb() const;
+    std::string setupwizard_mode() const;
+    std::string userdata_format() const;
+    bool guest_enforce_security() const;
+    bool use_sdcard() const;
+    bool pause_in_bootloader() const;
+    bool run_as_daemon() const;
+    bool enable_audio() const;
+    bool enable_gnss_grpc_proxy() const;
+    bool enable_bootanimation() const;
+    bool record_screen() const;
+    std::string gem5_debug_file() const;
+    bool protected_vm() const;
+    bool mte() const;
+    std::string boot_slot() const;
+
+    // Kernel and bootloader logging
+    bool enable_kernel_log() const;
+    bool vhost_net() const;
+
+    // The dns address of mobile network (RIL)
+    std::string ril_dns() const;
+
+    bool enable_webrtc() const;
+    std::string webrtc_assets_dir() const;
+
+    // The range of TCP ports available for webrtc sessions.
+    std::pair<uint16_t, uint16_t> webrtc_tcp_port_range() const;
+
+    // The range of UDP ports available for webrtc sessions.
+    std::pair<uint16_t, uint16_t> webrtc_udp_port_range() const;
+
+    bool smt() const;
+    std::string crosvm_binary() const;
+    std::string seccomp_policy_dir() const;
+    std::string qemu_binary_dir() const;
+
+    // Configuration flags for a minimal device
+    bool enable_minimal_mode() const;
+    bool enable_modem_simulator() const;
+    int modem_simulator_instance_number() const;
+    int modem_simulator_sim_type() const;
+
+    std::string gpu_mode() const;
+    std::string gpu_angle_feature_overrides_enabled() const;
+    std::string gpu_angle_feature_overrides_disabled() const;
+    std::string gpu_capture_binary() const;
+    bool enable_gpu_udmabuf() const;
+
+    std::string hwcomposer() const;
+
+    bool restart_subprocesses() const;
+
+    // android artifacts
+    std::string boot_image() const;
+    std::string new_boot_image() const;
+    std::string init_boot_image() const;
+    std::string data_image() const;
+    std::string super_image() const;
+    std::string new_super_image() const;
+    std::string misc_image() const;
+    std::string new_misc_image() const;
+    std::string misc_info_txt() const;
+    std::string metadata_image() const;
+    std::string new_metadata_image() const;
+    std::string vendor_boot_image() const;
+    std::string new_vendor_boot_image() const;
+    std::string vbmeta_image() const;
+    std::string vbmeta_system_image() const;
+    std::string vbmeta_vendor_dlkm_image() const;
+    std::string new_vbmeta_vendor_dlkm_image() const;
+
+    // otheros artifacts
+    std::string otheros_esp_image() const;
+
+    // linux artifacts for otheros flow
+    std::string linux_kernel_path() const;
+    std::string linux_initramfs_path() const;
+    std::string linux_root_image() const;
+
+    std::string fuchsia_zedboot_path() const;
+    std::string fuchsia_multiboot_bin_path() const;
+    std::string fuchsia_root_image() const;
+
+    std::string custom_partition_path() const;
+
+    int blank_metadata_image_mb() const;
+    int blank_sdcard_image_mb() const;
+    std::string bootloader() const;
+    std::string initramfs_path() const;
+    std::string kernel_path() const;
+    std::string guest_android_version() const;
+    bool bootconfig_supported() const;
+    std::string filename_encryption_mode() const;
   };
 
   // A view into an existing CuttlefishConfig object for a particular instance.
@@ -527,17 +608,23 @@
     void set_keyboard_server_port(int config_server_port);
     void set_gatekeeper_vsock_port(int gatekeeper_vsock_port);
     void set_keymaster_vsock_port(int keymaster_vsock_port);
-    void set_vehicle_hal_server_port(int vehicle_server_port);
     void set_audiocontrol_server_port(int audiocontrol_server_port);
     void set_adb_host_port(int adb_host_port);
     void set_modem_simulator_host_id(int modem_simulator_id);
     void set_adb_ip_and_port(const std::string& ip_port);
-    void set_confui_host_vsock_port(int confui_host_port);
+    void set_fastboot_host_port(int fastboot_host_port);
     void set_camera_server_port(int camera_server_port);
     void set_mobile_bridge_name(const std::string& mobile_bridge_name);
     void set_mobile_tap_name(const std::string& mobile_tap_name);
+    void set_mobile_mac(const std::string& mac);
+    void set_wifi_bridge_name(const std::string& wifi_bridge_name);
     void set_wifi_tap_name(const std::string& wifi_tap_name);
+    void set_wifi_mac(const std::string& mac);
+    void set_use_bridged_wifi_tap(bool use_bridged_wifi_tap);
     void set_ethernet_tap_name(const std::string& ethernet_tap_name);
+    void set_ethernet_bridge_name(const std::string& set_ethernet_bridge_name);
+    void set_ethernet_mac(const std::string& mac);
+    void set_ethernet_ipv6(const std::string& ip);
     void set_session_id(uint32_t session_id);
     void set_use_allocd(bool use_allocd);
     void set_vsock_guest_cid(int vsock_guest_cid);
@@ -550,19 +637,129 @@
     void set_start_webrtc_sig_server_proxy(bool start);
     void set_start_wmediumd(bool start);
     void set_start_rootcanal(bool start);
-    void set_start_ap(bool start);
+    void set_start_pica(bool start);
+    void set_start_netsim(bool start);
+    void set_ap_boot_flow(InstanceSpecific::APBootFlow flow);
     // Wifi MAC address inside the guest
     void set_wifi_mac_prefix(const int wifi_mac_prefix);
     // Gnss grpc proxy server port inside the host
     void set_gnss_grpc_proxy_server_port(int gnss_grpc_proxy_server_port);
     // Gnss grpc proxy local file path
     void set_gnss_file_path(const std::string &gnss_file_path);
+    void set_fixed_location_file_path(
+        const std::string& fixed_location_file_path);
+    void set_gem5_binary_dir(const std::string& gem5_binary_dir);
+    void set_gem5_checkpoint_dir(const std::string& gem5_checkpoint_dir);
+    // Serial console
+    void set_console(bool console);
+    void set_enable_sandbox(const bool enable_sandbox);
+    void set_kgdb(bool kgdb);
+    void set_target_arch(Arch target_arch);
+    void set_cpus(int cpus);
+    void set_data_policy(const std::string& data_policy);
+    void set_blank_data_image_mb(int blank_data_image_mb);
+    void set_gdb_port(int gdb_port);
+    void set_display_configs(const std::vector<DisplayConfig>& display_configs);
+    void set_memory_mb(int memory_mb);
+    void set_ddr_mem_mb(int ddr_mem_mb);
+    Result<void> set_setupwizard_mode(const std::string& title);
+    void set_userdata_format(const std::string& userdata_format);
+    void set_guest_enforce_security(bool guest_enforce_security);
+    void set_use_sdcard(bool use_sdcard);
+    void set_pause_in_bootloader(bool pause_in_bootloader);
+    void set_run_as_daemon(bool run_as_daemon);
+    void set_enable_audio(bool enable);
+    void set_enable_gnss_grpc_proxy(const bool enable_gnss_grpc_proxy);
+    void set_enable_bootanimation(const bool enable_bootanimation);
+    void set_record_screen(bool record_screen);
+    void set_gem5_debug_file(const std::string& gem5_debug_file);
+    void set_protected_vm(bool protected_vm);
+    void set_mte(bool mte);
+    void set_boot_slot(const std::string& boot_slot);
+    void set_grpc_socket_path(const std::string& sockets);
+
+    // Kernel and bootloader logging
+    void set_enable_kernel_log(bool enable_kernel_log);
+
+    void set_enable_webrtc(bool enable_webrtc);
+    void set_webrtc_assets_dir(const std::string& webrtc_assets_dir);
+
+    // The range of TCP ports available for webrtc sessions.
+    void set_webrtc_tcp_port_range(std::pair<uint16_t, uint16_t> range);
+
+    // The range of UDP ports available for webrtc sessions.
+    void set_webrtc_udp_port_range(std::pair<uint16_t, uint16_t> range);
+
+    void set_smt(bool smt);
+    void set_crosvm_binary(const std::string& crosvm_binary);
+    void set_seccomp_policy_dir(const std::string& seccomp_policy_dir);
+    void set_qemu_binary_dir(const std::string& qemu_binary_dir);
+
+    void set_vhost_net(bool vhost_net);
+
+    // The dns address of mobile network (RIL)
+    void set_ril_dns(const std::string& ril_dns);
+
+    // Configuration flags for a minimal device
+    void set_enable_minimal_mode(bool enable_minimal_mode);
+    void set_enable_modem_simulator(bool enable_modem_simulator);
+    void set_modem_simulator_instance_number(int instance_numbers);
+    void set_modem_simulator_sim_type(int sim_type);
+
+    void set_gpu_mode(const std::string& name);
+    void set_gpu_angle_feature_overrides_enabled(const std::string& overrides);
+    void set_gpu_angle_feature_overrides_disabled(const std::string& overrides);
+    void set_gpu_capture_binary(const std::string&);
+    void set_enable_gpu_udmabuf(const bool enable_gpu_udmabuf);
+
+    void set_hwcomposer(const std::string&);
+
+    void set_restart_subprocesses(bool restart_subprocesses);
+
+    // system image files
+    void set_boot_image(const std::string& boot_image);
+    void set_new_boot_image(const std::string& new_boot_image);
+    void set_init_boot_image(const std::string& init_boot_image);
+    void set_data_image(const std::string& data_image);
+    void set_super_image(const std::string& super_image);
+    void set_new_super_image(const std::string& super_image);
+    void set_misc_image(const std::string& misc_image);
+    void set_new_misc_image(const std::string& new_misc_image);
+    void set_misc_info_txt(const std::string& misc_image);
+    void set_metadata_image(const std::string& metadata_image);
+    void set_new_metadata_image(const std::string& new_metadata_image);
+    void set_vendor_boot_image(const std::string& vendor_boot_image);
+    void set_new_vendor_boot_image(const std::string& new_vendor_boot_image);
+    void set_vbmeta_image(const std::string& vbmeta_image);
+    void set_vbmeta_system_image(const std::string& vbmeta_system_image);
+    void set_vbmeta_vendor_dlkm_image(
+        const std::string& vbmeta_vendor_dlkm_image);
+    void set_new_vbmeta_vendor_dlkm_image(
+        const std::string& vbmeta_vendor_dlkm_image);
+    void set_otheros_esp_image(const std::string& otheros_esp_image);
+    void set_linux_kernel_path(const std::string& linux_kernel_path);
+    void set_linux_initramfs_path(const std::string& linux_initramfs_path);
+    void set_linux_root_image(const std::string& linux_root_image);
+    void set_fuchsia_zedboot_path(const std::string& fuchsia_zedboot_path);
+    void set_fuchsia_multiboot_bin_path(const std::string& fuchsia_multiboot_bin_path);
+    void set_fuchsia_root_image(const std::string& fuchsia_root_image);
+    void set_custom_partition_path(const std::string& custom_partition_path);
+    void set_blank_metadata_image_mb(int blank_metadata_image_mb);
+    void set_blank_sdcard_image_mb(int blank_sdcard_image_mb);
+    void set_bootloader(const std::string& bootloader);
+    void set_initramfs_path(const std::string& initramfs_path);
+    void set_kernel_path(const std::string& kernel_path);
+    void set_guest_android_version(const std::string& guest_android_version);
+    void set_bootconfig_supported(bool bootconfig_supported);
+    void set_filename_encryption_mode(const std::string& userdata_format);
+
+   private:
+    void SetPath(const std::string& key, const std::string& path);
   };
 
  private:
   std::unique_ptr<Json::Value> dictionary_;
 
-  void SetPath(const std::string& key, const std::string& path);
   bool LoadFromFile(const char* file);
   static CuttlefishConfig* BuildConfigImpl(const std::string& path);
 
@@ -570,8 +767,8 @@
   CuttlefishConfig& operator=(const CuttlefishConfig&) = delete;
 };
 
-// Returns the instance number as obtained from the CUTTLEFISH_INSTANCE
-// environment variable or the username.
+// Returns the instance number as obtained from the
+// *kCuttlefishInstanceEnvVarName environment variable or the username.
 int GetInstance();
 
 // Returns default Vsock CID, which is
@@ -608,12 +805,15 @@
 
 // GPU modes
 extern const char* const kGpuModeAuto;
-extern const char* const kGpuModeGuestSwiftshader;
 extern const char* const kGpuModeDrmVirgl;
-extern const char* const kGpuModeGfxStream;
+extern const char* const kGpuModeGfxstream;
+extern const char* const kGpuModeGfxstreamGuestAngle;
+extern const char* const kGpuModeGuestSwiftshader;
+extern const char* const kGpuModeNone;
 
 // HwComposer modes
 extern const char* const kHwComposerAuto;
 extern const char* const kHwComposerDrm;
 extern const char* const kHwComposerRanchu;
+extern const char* const kHwComposerNone;
 }  // namespace cuttlefish
diff --git a/host/libs/config/cuttlefish_config_instance.cpp b/host/libs/config/cuttlefish_config_instance.cpp
index 4f1dd33..cb6b732 100644
--- a/host/libs/config/cuttlefish_config_instance.cpp
+++ b/host/libs/config/cuttlefish_config_instance.cpp
@@ -20,10 +20,15 @@
 #include <json/json.h>
 
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/flags_validator.h"
+#include "host/libs/vm_manager/crosvm_manager.h"
+#include "host/libs/vm_manager/gem5_manager.h"
 
 namespace cuttlefish {
 namespace {
 
+using APBootFlow = CuttlefishConfig::InstanceSpecific::APBootFlow;
+
 const char* kInstances = "instances";
 
 std::string IdToName(const std::string& id) { return kCvdNamePrefix + id; }
@@ -54,6 +59,269 @@
   return PerInstancePath(kInternalDirName);
 }
 
+std::string CuttlefishConfig::InstanceSpecific::instance_uds_dir() const {
+  return config_->InstancesUdsPath(IdToName(id_));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::instance_internal_uds_dir()
+    const {
+  return PerInstanceUdsPath(kInternalDirName);
+}
+
+// TODO (b/163575714) add virtio console support to the bootloader so the
+// virtio console path for the console device can be taken again. When that
+// happens, this function can be deleted along with all the code paths it
+// forces.
+bool CuttlefishConfig::InstanceSpecific::use_bootloader() const {
+  return true;
+};
+
+// vectorized and moved system image files into instance specific
+static constexpr char kBootImage[] = "boot_image";
+std::string CuttlefishConfig::InstanceSpecific::boot_image() const {
+  return (*Dictionary())[kBootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_boot_image(
+    const std::string& boot_image) {
+  (*Dictionary())[kBootImage] = boot_image;
+}
+static constexpr char kNewBootImage[] = "new_boot_image";
+std::string CuttlefishConfig::InstanceSpecific::new_boot_image() const {
+  return (*Dictionary())[kNewBootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_new_boot_image(
+    const std::string& new_boot_image) {
+  (*Dictionary())[kNewBootImage] = new_boot_image;
+}
+static constexpr char kInitBootImage[] = "init_boot_image";
+std::string CuttlefishConfig::InstanceSpecific::init_boot_image() const {
+  return (*Dictionary())[kInitBootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_init_boot_image(
+    const std::string& init_boot_image) {
+  (*Dictionary())[kInitBootImage] = init_boot_image;
+}
+static constexpr char kDataImage[] = "data_image";
+std::string CuttlefishConfig::InstanceSpecific::data_image() const {
+  return (*Dictionary())[kDataImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_data_image(
+    const std::string& data_image) {
+  (*Dictionary())[kDataImage] = data_image;
+}
+static constexpr char kSuperImage[] = "super_image";
+std::string CuttlefishConfig::InstanceSpecific::super_image() const {
+  return (*Dictionary())[kSuperImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_super_image(
+    const std::string& super_image) {
+  (*Dictionary())[kSuperImage] = super_image;
+}
+static constexpr char kNewSuperImage[] = "new_super_image";
+std::string CuttlefishConfig::InstanceSpecific::new_super_image() const {
+  return (*Dictionary())[kNewSuperImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_new_super_image(
+    const std::string& super_image) {
+  (*Dictionary())[kNewSuperImage] = super_image;
+}
+static constexpr char kMiscImage[] = "misc_image";
+std::string CuttlefishConfig::InstanceSpecific::misc_image() const {
+  return (*Dictionary())[kMiscImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_misc_image(
+    const std::string& misc_image) {
+  (*Dictionary())[kMiscImage] = misc_image;
+}
+static constexpr char kNewMiscImage[] = "new_misc_image";
+std::string CuttlefishConfig::InstanceSpecific::new_misc_image() const {
+  return (*Dictionary())[kNewMiscImage].asString();
+}
+static constexpr char kMiscInfoTxt[] = "misc_info_txt";
+std::string CuttlefishConfig::InstanceSpecific::misc_info_txt() const {
+  return (*Dictionary())[kMiscInfoTxt].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_misc_info_txt(
+    const std::string& misc_info) {
+  (*Dictionary())[kMiscInfoTxt] = misc_info;
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_new_misc_image(
+    const std::string& new_misc_image) {
+  (*Dictionary())[kNewMiscImage] = new_misc_image;
+}
+static constexpr char kMetadataImage[] = "metadata_image";
+std::string CuttlefishConfig::InstanceSpecific::metadata_image() const {
+  return (*Dictionary())[kMetadataImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_metadata_image(
+    const std::string& metadata_image) {
+  (*Dictionary())[kMetadataImage] = metadata_image;
+}
+static constexpr char kNewMetadataImage[] = "new_metadata_image";
+std::string CuttlefishConfig::InstanceSpecific::new_metadata_image() const {
+  return (*Dictionary())[kNewMetadataImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_new_metadata_image(
+    const std::string& new_metadata_image) {
+  (*Dictionary())[kNewMetadataImage] = new_metadata_image;
+}
+static constexpr char kVendorBootImage[] = "vendor_boot_image";
+std::string CuttlefishConfig::InstanceSpecific::vendor_boot_image() const {
+  return (*Dictionary())[kVendorBootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_vendor_boot_image(
+    const std::string& vendor_boot_image) {
+  (*Dictionary())[kVendorBootImage] = vendor_boot_image;
+}
+static constexpr char kNewVendorBootImage[] = "new_vendor_boot_image";
+std::string CuttlefishConfig::InstanceSpecific::new_vendor_boot_image() const {
+  return (*Dictionary())[kNewVendorBootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_new_vendor_boot_image(
+    const std::string& new_vendor_boot_image) {
+  (*Dictionary())[kNewVendorBootImage] = new_vendor_boot_image;
+}
+static constexpr char kVbmetaImage[] = "vbmeta_image";
+std::string CuttlefishConfig::InstanceSpecific::vbmeta_image() const {
+  return (*Dictionary())[kVbmetaImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_vbmeta_image(
+    const std::string& vbmeta_image) {
+  (*Dictionary())[kVbmetaImage] = vbmeta_image;
+}
+static constexpr char kVbmetaSystemImage[] = "vbmeta_system_image";
+std::string CuttlefishConfig::InstanceSpecific::vbmeta_system_image() const {
+  return (*Dictionary())[kVbmetaSystemImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_vbmeta_system_image(
+    const std::string& vbmeta_system_image) {
+  (*Dictionary())[kVbmetaSystemImage] = vbmeta_system_image;
+}
+static constexpr char kVbmetaVendorDlkmImage[] = "vbmeta_vendor_dlkm_image";
+std::string CuttlefishConfig::InstanceSpecific::vbmeta_vendor_dlkm_image()
+    const {
+  return (*Dictionary())[kVbmetaVendorDlkmImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_vbmeta_vendor_dlkm_image(
+    const std::string& image) {
+  (*Dictionary())[kVbmetaVendorDlkmImage] = image;
+}
+static constexpr char kNewVbmetaVendorDlkmImage[] =
+    "new_vbmeta_vendor_dlkm_image";
+std::string CuttlefishConfig::InstanceSpecific::new_vbmeta_vendor_dlkm_image()
+    const {
+  return (*Dictionary())[kNewVbmetaVendorDlkmImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::
+    set_new_vbmeta_vendor_dlkm_image(const std::string& image) {
+  (*Dictionary())[kNewVbmetaVendorDlkmImage] = image;
+}
+static constexpr char kOtherosEspImage[] = "otheros_esp_image";
+std::string CuttlefishConfig::InstanceSpecific::otheros_esp_image() const {
+  return (*Dictionary())[kOtherosEspImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_otheros_esp_image(
+    const std::string& otheros_esp_image) {
+  (*Dictionary())[kOtherosEspImage] = otheros_esp_image;
+}
+static constexpr char kLinuxKernelPath[] = "linux_kernel_path";
+std::string CuttlefishConfig::InstanceSpecific::linux_kernel_path() const {
+  return (*Dictionary())[kLinuxKernelPath].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_linux_kernel_path(
+    const std::string& linux_kernel_path) {
+  (*Dictionary())[kLinuxKernelPath] = linux_kernel_path;
+}
+static constexpr char kLinuxInitramfsPath[] = "linux_initramfs_path";
+std::string CuttlefishConfig::InstanceSpecific::linux_initramfs_path() const {
+  return (*Dictionary())[kLinuxInitramfsPath].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_linux_initramfs_path(
+    const std::string& linux_initramfs_path) {
+  (*Dictionary())[kLinuxInitramfsPath] = linux_initramfs_path;
+}
+static constexpr char kLinuxRootImage[] = "linux_root_image";
+std::string CuttlefishConfig::InstanceSpecific::linux_root_image() const {
+  return (*Dictionary())[kLinuxRootImage].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_linux_root_image(
+    const std::string& linux_root_image) {
+  (*Dictionary())[kLinuxRootImage] = linux_root_image;
+}
+static constexpr char kFuchsiaZedbootPath[] = "fuchsia_zedboot_path";
+void CuttlefishConfig::MutableInstanceSpecific::set_fuchsia_zedboot_path(
+    const std::string& fuchsia_zedboot_path) {
+  (*Dictionary())[kFuchsiaZedbootPath] = fuchsia_zedboot_path;
+}
+std::string CuttlefishConfig::InstanceSpecific::fuchsia_zedboot_path() const {
+  return (*Dictionary())[kFuchsiaZedbootPath].asString();
+}
+static constexpr char kFuchsiaMultibootBinPath[] = "multiboot_bin_path";
+void CuttlefishConfig::MutableInstanceSpecific::set_fuchsia_multiboot_bin_path(
+    const std::string& fuchsia_multiboot_bin_path) {
+  (*Dictionary())[kFuchsiaMultibootBinPath] = fuchsia_multiboot_bin_path;
+}
+std::string CuttlefishConfig::InstanceSpecific::fuchsia_multiboot_bin_path() const {
+  return (*Dictionary())[kFuchsiaMultibootBinPath].asString();
+}
+static constexpr char kFuchsiaRootImage[] = "fuchsia_root_image";
+void CuttlefishConfig::MutableInstanceSpecific::set_fuchsia_root_image(
+    const std::string& fuchsia_root_image) {
+  (*Dictionary())[kFuchsiaRootImage] = fuchsia_root_image;
+}
+std::string CuttlefishConfig::InstanceSpecific::fuchsia_root_image() const {
+  return (*Dictionary())[kFuchsiaRootImage].asString();
+}
+static constexpr char kCustomPartitionPath[] = "custom_partition_path";
+void CuttlefishConfig::MutableInstanceSpecific::set_custom_partition_path(
+    const std::string& custom_partition_path) {
+  (*Dictionary())[kCustomPartitionPath] = custom_partition_path;
+}
+std::string CuttlefishConfig::InstanceSpecific::custom_partition_path() const {
+  return (*Dictionary())[kCustomPartitionPath].asString();
+}
+static constexpr char kBlankMetadataImageMb[] = "blank_metadata_image_mb";
+int CuttlefishConfig::InstanceSpecific::blank_metadata_image_mb() const {
+  return (*Dictionary())[kBlankMetadataImageMb].asInt();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_blank_metadata_image_mb(
+    int blank_metadata_image_mb) {
+  (*Dictionary())[kBlankMetadataImageMb] = blank_metadata_image_mb;
+}
+static constexpr char kBlankSdcardImageMb[] = "blank_sdcard_image_mb";
+int CuttlefishConfig::InstanceSpecific::blank_sdcard_image_mb() const {
+  return (*Dictionary())[kBlankSdcardImageMb].asInt();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_blank_sdcard_image_mb(
+    int blank_sdcard_image_mb) {
+  (*Dictionary())[kBlankSdcardImageMb] = blank_sdcard_image_mb;
+}
+static constexpr char kBootloader[] = "bootloader";
+std::string CuttlefishConfig::InstanceSpecific::bootloader() const {
+  return (*Dictionary())[kBootloader].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_bootloader(
+    const std::string& bootloader) {
+  (*Dictionary())[kBootloader] = bootloader;
+}
+static constexpr char kInitramfsPath[] = "initramfs_path";
+std::string CuttlefishConfig::InstanceSpecific::initramfs_path() const {
+  return (*Dictionary())[kInitramfsPath].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_initramfs_path(
+    const std::string& initramfs_path) {
+  (*Dictionary())[kInitramfsPath] = initramfs_path;
+}
+static constexpr char kKernelPath[] = "kernel_path";
+std::string CuttlefishConfig::InstanceSpecific::kernel_path() const {
+  return (*Dictionary())[kKernelPath].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_kernel_path(
+    const std::string& kernel_path) {
+  (*Dictionary())[kKernelPath] = kernel_path;
+}
+// end of system image files
+
 static constexpr char kSerialNumber[] = "serial_number";
 std::string CuttlefishConfig::InstanceSpecific::serial_number() const {
   return (*Dictionary())[kSerialNumber].asString();
@@ -81,6 +349,35 @@
   (*Dictionary())[kVirtualDiskPaths] = virtual_disks_json_obj;
 }
 
+static constexpr char kGuestAndroidVersion[] = "guest_android_version";
+std::string CuttlefishConfig::InstanceSpecific::guest_android_version() const {
+  return (*Dictionary())[kGuestAndroidVersion].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_guest_android_version(
+    const std::string& guest_android_version) {
+  (*Dictionary())[kGuestAndroidVersion] = guest_android_version;
+}
+
+static constexpr char kBootconfigSupported[] = "bootconfig_supported";
+bool CuttlefishConfig::InstanceSpecific::bootconfig_supported() const {
+  return (*Dictionary())[kBootconfigSupported].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_bootconfig_supported(
+    bool bootconfig_supported) {
+  (*Dictionary())[kBootconfigSupported] = bootconfig_supported;
+}
+
+static constexpr char kFilenameEncryptionMode[] = "filename_encryption_mode";
+std::string CuttlefishConfig::InstanceSpecific::filename_encryption_mode() const {
+  return (*Dictionary())[kFilenameEncryptionMode].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_filename_encryption_mode(
+    const std::string& filename_encryption_mode) {
+  auto fmt = filename_encryption_mode;
+  std::transform(fmt.begin(), fmt.end(), fmt.begin(), ::tolower);
+  (*Dictionary())[kFilenameEncryptionMode] = fmt;
+}
+
 std::string CuttlefishConfig::InstanceSpecific::kernel_log_pipe_name() const {
   return AbsolutePath(PerInstanceInternalPath("kernel-log-pipe"));
 }
@@ -128,6 +425,518 @@
   (*Dictionary())[kGnssFilePath] = gnss_file_path;
 }
 
+static constexpr char kFixedLocationFilePath[] = "fixed_location_file_path";
+std::string CuttlefishConfig::InstanceSpecific::fixed_location_file_path()
+    const {
+  return (*Dictionary())[kFixedLocationFilePath].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_fixed_location_file_path(
+    const std::string& fixed_location_file_path) {
+  (*Dictionary())[kFixedLocationFilePath] = fixed_location_file_path;
+}
+
+static constexpr char kGem5BinaryDir[] = "gem5_binary_dir";
+std::string CuttlefishConfig::InstanceSpecific::gem5_binary_dir() const {
+  return (*Dictionary())[kGem5BinaryDir].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_gem5_binary_dir(
+    const std::string& gem5_binary_dir) {
+  (*Dictionary())[kGem5BinaryDir] = gem5_binary_dir;
+}
+
+static constexpr char kGem5CheckpointDir[] = "gem5_checkpoint_dir";
+std::string CuttlefishConfig::InstanceSpecific::gem5_checkpoint_dir() const {
+  return (*Dictionary())[kGem5CheckpointDir].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_gem5_checkpoint_dir(
+    const std::string& gem5_checkpoint_dir) {
+  (*Dictionary())[kGem5CheckpointDir] = gem5_checkpoint_dir;
+}
+
+static constexpr char kKgdb[] = "kgdb";
+void CuttlefishConfig::MutableInstanceSpecific::set_kgdb(bool kgdb) {
+  (*Dictionary())[kKgdb] = kgdb;
+}
+bool CuttlefishConfig::InstanceSpecific::kgdb() const {
+  return (*Dictionary())[kKgdb].asBool();
+}
+
+static constexpr char kCpus[] = "cpus";
+void CuttlefishConfig::MutableInstanceSpecific::set_cpus(int cpus) { (*Dictionary())[kCpus] = cpus; }
+int CuttlefishConfig::InstanceSpecific::cpus() const { return (*Dictionary())[kCpus].asInt(); }
+
+static constexpr char kDataPolicy[] = "data_policy";
+void CuttlefishConfig::MutableInstanceSpecific::set_data_policy(
+    const std::string& data_policy) {
+  (*Dictionary())[kDataPolicy] = data_policy;
+}
+std::string CuttlefishConfig::InstanceSpecific::data_policy() const {
+  return (*Dictionary())[kDataPolicy].asString();
+}
+
+static constexpr char kBlankDataImageMb[] = "blank_data_image_mb";
+void CuttlefishConfig::MutableInstanceSpecific::set_blank_data_image_mb(
+    int blank_data_image_mb) {
+  (*Dictionary())[kBlankDataImageMb] = blank_data_image_mb;
+}
+int CuttlefishConfig::InstanceSpecific::blank_data_image_mb() const {
+  return (*Dictionary())[kBlankDataImageMb].asInt();
+}
+
+static constexpr char kGdbPort[] = "gdb_port";
+void CuttlefishConfig::MutableInstanceSpecific::set_gdb_port(int port) {
+  (*Dictionary())[kGdbPort] = port;
+}
+int CuttlefishConfig::InstanceSpecific::gdb_port() const {
+  return (*Dictionary())[kGdbPort].asInt();
+}
+
+static constexpr char kMemoryMb[] = "memory_mb";
+int CuttlefishConfig::InstanceSpecific::memory_mb() const {
+  return (*Dictionary())[kMemoryMb].asInt();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_memory_mb(int memory_mb) {
+  (*Dictionary())[kMemoryMb] = memory_mb;
+}
+
+static constexpr char kDdrMemMb[] = "ddr_mem_mb";
+int CuttlefishConfig::InstanceSpecific::ddr_mem_mb() const {
+  return (*Dictionary())[kDdrMemMb].asInt();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_ddr_mem_mb(int ddr_mem_mb) {
+  (*Dictionary())[kDdrMemMb] = ddr_mem_mb;
+}
+
+static constexpr char kSetupWizardMode[] = "setupwizard_mode";
+std::string CuttlefishConfig::InstanceSpecific::setupwizard_mode() const {
+  return (*Dictionary())[kSetupWizardMode].asString();
+}
+Result<void> CuttlefishConfig::MutableInstanceSpecific::set_setupwizard_mode(
+    const std::string& mode) {
+  CF_EXPECT(ValidateStupWizardMode(mode),
+            "setupwizard_mode flag has invalid value: " << mode);
+  (*Dictionary())[kSetupWizardMode] = mode;
+  return {};
+}
+
+static constexpr char kUserdataFormat[] = "userdata_format";
+std::string CuttlefishConfig::InstanceSpecific::userdata_format() const {
+  return (*Dictionary())[kUserdataFormat].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_userdata_format(const std::string& userdata_format) {
+  auto fmt = userdata_format;
+  std::transform(fmt.begin(), fmt.end(), fmt.begin(), ::tolower);
+  (*Dictionary())[kUserdataFormat] = fmt;
+}
+
+static constexpr char kGuestEnforceSecurity[] = "guest_enforce_security";
+void CuttlefishConfig::MutableInstanceSpecific::set_guest_enforce_security(bool guest_enforce_security) {
+  (*Dictionary())[kGuestEnforceSecurity] = guest_enforce_security;
+}
+bool CuttlefishConfig::InstanceSpecific::guest_enforce_security() const {
+  return (*Dictionary())[kGuestEnforceSecurity].asBool();
+}
+
+static constexpr char kUseSdcard[] = "use_sdcard";
+void CuttlefishConfig::MutableInstanceSpecific::set_use_sdcard(bool use_sdcard) {
+  (*Dictionary())[kUseSdcard] = use_sdcard;
+}
+bool CuttlefishConfig::InstanceSpecific::use_sdcard() const {
+  return (*Dictionary())[kUseSdcard].asBool();
+}
+
+static constexpr char kPauseInBootloader[] = "pause_in_bootloader";
+void CuttlefishConfig::MutableInstanceSpecific::set_pause_in_bootloader(bool pause_in_bootloader) {
+  (*Dictionary())[kPauseInBootloader] = pause_in_bootloader;
+}
+bool CuttlefishConfig::InstanceSpecific::pause_in_bootloader() const {
+  return (*Dictionary())[kPauseInBootloader].asBool();
+}
+
+static constexpr char kRunAsDaemon[] = "run_as_daemon";
+bool CuttlefishConfig::InstanceSpecific::run_as_daemon() const {
+  return (*Dictionary())[kRunAsDaemon].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_run_as_daemon(bool run_as_daemon) {
+  (*Dictionary())[kRunAsDaemon] = run_as_daemon;
+}
+
+static constexpr char kEnableMinimalMode[] = "enable_minimal_mode";
+bool CuttlefishConfig::InstanceSpecific::enable_minimal_mode() const {
+  return (*Dictionary())[kEnableMinimalMode].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_minimal_mode(
+    bool enable_minimal_mode) {
+  (*Dictionary())[kEnableMinimalMode] = enable_minimal_mode;
+}
+
+static constexpr char kRunModemSimulator[] = "enable_modem_simulator";
+bool CuttlefishConfig::InstanceSpecific::enable_modem_simulator() const {
+  return (*Dictionary())[kRunModemSimulator].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_modem_simulator(
+    bool enable_modem_simulator) {
+  (*Dictionary())[kRunModemSimulator] = enable_modem_simulator;
+}
+
+static constexpr char kModemSimulatorInstanceNumber[] =
+    "modem_simulator_instance_number";
+void CuttlefishConfig::MutableInstanceSpecific::
+    set_modem_simulator_instance_number(int instance_number) {
+  (*Dictionary())[kModemSimulatorInstanceNumber] = instance_number;
+}
+int CuttlefishConfig::InstanceSpecific::modem_simulator_instance_number()
+    const {
+  return (*Dictionary())[kModemSimulatorInstanceNumber].asInt();
+}
+
+static constexpr char kModemSimulatorSimType[] = "modem_simulator_sim_type";
+void CuttlefishConfig::MutableInstanceSpecific::set_modem_simulator_sim_type(
+    int sim_type) {
+  (*Dictionary())[kModemSimulatorSimType] = sim_type;
+}
+int CuttlefishConfig::InstanceSpecific::modem_simulator_sim_type() const {
+  return (*Dictionary())[kModemSimulatorSimType].asInt();
+}
+
+static constexpr char kGpuMode[] = "gpu_mode";
+std::string CuttlefishConfig::InstanceSpecific::gpu_mode() const {
+  return (*Dictionary())[kGpuMode].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_gpu_mode(const std::string& name) {
+  (*Dictionary())[kGpuMode] = name;
+}
+
+static constexpr char kGpuAngleFeatureOverridesEnabled[] =
+    "gpu_angle_feature_overrides_enabled";
+std::string
+CuttlefishConfig::InstanceSpecific::gpu_angle_feature_overrides_enabled()
+    const {
+  return (*Dictionary())[kGpuAngleFeatureOverridesEnabled].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::
+    set_gpu_angle_feature_overrides_enabled(const std::string& overrides) {
+  (*Dictionary())[kGpuAngleFeatureOverridesEnabled] = overrides;
+}
+
+static constexpr char kGpuAngleFeatureOverridesDisabled[] =
+    "gpu_angle_feature_overrides_disabled";
+std::string
+CuttlefishConfig::InstanceSpecific::gpu_angle_feature_overrides_disabled()
+    const {
+  return (*Dictionary())[kGpuAngleFeatureOverridesDisabled].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::
+    set_gpu_angle_feature_overrides_disabled(const std::string& overrides) {
+  (*Dictionary())[kGpuAngleFeatureOverridesDisabled] = overrides;
+}
+
+static constexpr char kGpuCaptureBinary[] = "gpu_capture_binary";
+std::string CuttlefishConfig::InstanceSpecific::gpu_capture_binary() const {
+  return (*Dictionary())[kGpuCaptureBinary].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_gpu_capture_binary(const std::string& name) {
+  (*Dictionary())[kGpuCaptureBinary] = name;
+}
+
+static constexpr char kRestartSubprocesses[] = "restart_subprocesses";
+bool CuttlefishConfig::InstanceSpecific::restart_subprocesses() const {
+  return (*Dictionary())[kRestartSubprocesses].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_restart_subprocesses(bool restart_subprocesses) {
+  (*Dictionary())[kRestartSubprocesses] = restart_subprocesses;
+}
+
+static constexpr char kHWComposer[] = "hwcomposer";
+std::string CuttlefishConfig::InstanceSpecific::hwcomposer() const {
+  return (*Dictionary())[kHWComposer].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_hwcomposer(const std::string& name) {
+  (*Dictionary())[kHWComposer] = name;
+}
+
+static constexpr char kEnableGpuUdmabuf[] = "enable_gpu_udmabuf";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_gpu_udmabuf(const bool enable_gpu_udmabuf) {
+  (*Dictionary())[kEnableGpuUdmabuf] = enable_gpu_udmabuf;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_gpu_udmabuf() const {
+  return (*Dictionary())[kEnableGpuUdmabuf].asBool();
+}
+
+static constexpr char kEnableAudio[] = "enable_audio";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_audio(bool enable) {
+  (*Dictionary())[kEnableAudio] = enable;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_audio() const {
+  return (*Dictionary())[kEnableAudio].asBool();
+}
+
+static constexpr char kEnableGnssGrpcProxy[] = "enable_gnss_grpc_proxy";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_gnss_grpc_proxy(const bool enable_gnss_grpc_proxy) {
+  (*Dictionary())[kEnableGnssGrpcProxy] = enable_gnss_grpc_proxy;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_gnss_grpc_proxy() const {
+  return (*Dictionary())[kEnableGnssGrpcProxy].asBool();
+}
+
+static constexpr char kEnableBootAnimation[] = "enable_bootanimation";
+bool CuttlefishConfig::InstanceSpecific::enable_bootanimation() const {
+  return (*Dictionary())[kEnableBootAnimation].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_bootanimation(
+    bool enable_bootanimation) {
+  (*Dictionary())[kEnableBootAnimation] = enable_bootanimation;
+}
+
+static constexpr char kRecordScreen[] = "record_screen";
+void CuttlefishConfig::MutableInstanceSpecific::set_record_screen(
+    bool record_screen) {
+  (*Dictionary())[kRecordScreen] = record_screen;
+}
+bool CuttlefishConfig::InstanceSpecific::record_screen() const {
+  return (*Dictionary())[kRecordScreen].asBool();
+}
+
+static constexpr char kGem5DebugFile[] = "gem5_debug_file";
+std::string CuttlefishConfig::InstanceSpecific::gem5_debug_file() const {
+  return (*Dictionary())[kGem5DebugFile].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_gem5_debug_file(const std::string& gem5_debug_file) {
+  (*Dictionary())[kGem5DebugFile] = gem5_debug_file;
+}
+
+static constexpr char kProtectedVm[] = "protected_vm";
+void CuttlefishConfig::MutableInstanceSpecific::set_protected_vm(bool protected_vm) {
+  (*Dictionary())[kProtectedVm] = protected_vm;
+}
+bool CuttlefishConfig::InstanceSpecific::protected_vm() const {
+  return (*Dictionary())[kProtectedVm].asBool();
+}
+
+static constexpr char kMte[] = "mte";
+void CuttlefishConfig::MutableInstanceSpecific::set_mte(bool mte) {
+  (*Dictionary())[kMte] = mte;
+}
+bool CuttlefishConfig::InstanceSpecific::mte() const {
+  return (*Dictionary())[kMte].asBool();
+}
+
+static constexpr char kEnableKernelLog[] = "enable_kernel_log";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_kernel_log(bool enable_kernel_log) {
+  (*Dictionary())[kEnableKernelLog] = enable_kernel_log;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_kernel_log() const {
+  return (*Dictionary())[kEnableKernelLog].asBool();
+}
+
+static constexpr char kBootSlot[] = "boot_slot";
+void CuttlefishConfig::MutableInstanceSpecific::set_boot_slot(const std::string& boot_slot) {
+  (*Dictionary())[kBootSlot] = boot_slot;
+}
+std::string CuttlefishConfig::InstanceSpecific::boot_slot() const {
+  return (*Dictionary())[kBootSlot].asString();
+}
+
+static constexpr char kEnableWebRTC[] = "enable_webrtc";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_webrtc(bool enable_webrtc) {
+  (*Dictionary())[kEnableWebRTC] = enable_webrtc;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_webrtc() const {
+  return (*Dictionary())[kEnableWebRTC].asBool();
+}
+
+static constexpr char kWebRTCAssetsDir[] = "webrtc_assets_dir";
+void CuttlefishConfig::MutableInstanceSpecific::set_webrtc_assets_dir(const std::string& webrtc_assets_dir) {
+  (*Dictionary())[kWebRTCAssetsDir] = webrtc_assets_dir;
+}
+std::string CuttlefishConfig::InstanceSpecific::webrtc_assets_dir() const {
+  return (*Dictionary())[kWebRTCAssetsDir].asString();
+}
+
+static constexpr char kWebrtcTcpPortRange[] = "webrtc_tcp_port_range";
+void CuttlefishConfig::MutableInstanceSpecific::set_webrtc_tcp_port_range(
+    std::pair<uint16_t, uint16_t> range) {
+  Json::Value arr(Json::ValueType::arrayValue);
+  arr[0] = range.first;
+  arr[1] = range.second;
+  (*Dictionary())[kWebrtcTcpPortRange] = arr;
+}
+std::pair<uint16_t, uint16_t> CuttlefishConfig::InstanceSpecific::webrtc_tcp_port_range() const {
+  std::pair<uint16_t, uint16_t> ret;
+  ret.first = (*Dictionary())[kWebrtcTcpPortRange][0].asInt();
+  ret.second = (*Dictionary())[kWebrtcTcpPortRange][1].asInt();
+  return ret;
+}
+
+static constexpr char kWebrtcUdpPortRange[] = "webrtc_udp_port_range";
+void CuttlefishConfig::MutableInstanceSpecific::set_webrtc_udp_port_range(
+    std::pair<uint16_t, uint16_t> range) {
+  Json::Value arr(Json::ValueType::arrayValue);
+  arr[0] = range.first;
+  arr[1] = range.second;
+  (*Dictionary())[kWebrtcUdpPortRange] = arr;
+}
+std::pair<uint16_t, uint16_t> CuttlefishConfig::InstanceSpecific::webrtc_udp_port_range() const {
+  std::pair<uint16_t, uint16_t> ret;
+  ret.first = (*Dictionary())[kWebrtcUdpPortRange][0].asInt();
+  ret.second = (*Dictionary())[kWebrtcUdpPortRange][1].asInt();
+  return ret;
+}
+
+static constexpr char kGrpcConfig[] = "grpc_config";
+std::string CuttlefishConfig::InstanceSpecific::grpc_socket_path() const {
+  return (*Dictionary())[kGrpcConfig].asString();
+}
+
+void CuttlefishConfig::MutableInstanceSpecific::set_grpc_socket_path(
+    const std::string& socket_path) {
+  (*Dictionary())[kGrpcConfig] = socket_path;
+}
+
+static constexpr char kSmt[] = "smt";
+void CuttlefishConfig::MutableInstanceSpecific::set_smt(bool smt) {
+  (*Dictionary())[kSmt] = smt;
+}
+bool CuttlefishConfig::InstanceSpecific::smt() const {
+  return (*Dictionary())[kSmt].asBool();
+}
+
+static constexpr char kCrosvmBinary[] = "crosvm_binary";
+std::string CuttlefishConfig::InstanceSpecific::crosvm_binary() const {
+  return (*Dictionary())[kCrosvmBinary].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_crosvm_binary(
+    const std::string& crosvm_binary) {
+  (*Dictionary())[kCrosvmBinary] = crosvm_binary;
+}
+
+void CuttlefishConfig::MutableInstanceSpecific::SetPath(
+    const std::string& key, const std::string& path) {
+  if (!path.empty()) {
+    (*Dictionary())[key] = AbsolutePath(path);
+  }
+}
+
+static constexpr char kSeccompPolicyDir[] = "seccomp_policy_dir";
+void CuttlefishConfig::MutableInstanceSpecific::set_seccomp_policy_dir(
+    const std::string& seccomp_policy_dir) {
+  if (seccomp_policy_dir.empty()) {
+    (*Dictionary())[kSeccompPolicyDir] = seccomp_policy_dir;
+    return;
+  }
+  SetPath(kSeccompPolicyDir, seccomp_policy_dir);
+}
+std::string CuttlefishConfig::InstanceSpecific::seccomp_policy_dir() const {
+  return (*Dictionary())[kSeccompPolicyDir].asString();
+}
+
+static constexpr char kQemuBinaryDir[] = "qemu_binary_dir";
+std::string CuttlefishConfig::InstanceSpecific::qemu_binary_dir() const {
+  return (*Dictionary())[kQemuBinaryDir].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_qemu_binary_dir(
+    const std::string& qemu_binary_dir) {
+  (*Dictionary())[kQemuBinaryDir] = qemu_binary_dir;
+}
+
+static constexpr char kVhostNet[] = "vhost_net";
+void CuttlefishConfig::MutableInstanceSpecific::set_vhost_net(bool vhost_net) {
+  (*Dictionary())[kVhostNet] = vhost_net;
+}
+bool CuttlefishConfig::InstanceSpecific::vhost_net() const {
+  return (*Dictionary())[kVhostNet].asBool();
+}
+
+static constexpr char kRilDns[] = "ril_dns";
+void CuttlefishConfig::MutableInstanceSpecific::set_ril_dns(const std::string& ril_dns) {
+  (*Dictionary())[kRilDns] = ril_dns;
+}
+std::string CuttlefishConfig::InstanceSpecific::ril_dns() const {
+  return (*Dictionary())[kRilDns].asString();
+}
+
+static constexpr char kDisplayConfigs[] = "display_configs";
+static constexpr char kXRes[] = "x_res";
+static constexpr char kYRes[] = "y_res";
+static constexpr char kDpi[] = "dpi";
+static constexpr char kRefreshRateHz[] = "refresh_rate_hz";
+std::vector<CuttlefishConfig::DisplayConfig>
+CuttlefishConfig::InstanceSpecific::display_configs() const {
+  std::vector<DisplayConfig> display_configs;
+  for (auto& display_config_json : (*Dictionary())[kDisplayConfigs]) {
+    DisplayConfig display_config = {};
+    display_config.width = display_config_json[kXRes].asInt();
+    display_config.height = display_config_json[kYRes].asInt();
+    display_config.dpi = display_config_json[kDpi].asInt();
+    display_config.refresh_rate_hz =
+        display_config_json[kRefreshRateHz].asInt();
+    display_configs.emplace_back(display_config);
+  }
+  return display_configs;
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_display_configs(
+    const std::vector<DisplayConfig>& display_configs) {
+  Json::Value display_configs_json(Json::arrayValue);
+
+  for (const DisplayConfig& display_configs : display_configs) {
+    Json::Value display_config_json(Json::objectValue);
+    display_config_json[kXRes] = display_configs.width;
+    display_config_json[kYRes] = display_configs.height;
+    display_config_json[kDpi] = display_configs.dpi;
+    display_config_json[kRefreshRateHz] = display_configs.refresh_rate_hz;
+    display_configs_json.append(display_config_json);
+  }
+
+  (*Dictionary())[kDisplayConfigs] = display_configs_json;
+}
+
+
+static constexpr char kTargetArch[] = "target_arch";
+void CuttlefishConfig::MutableInstanceSpecific::set_target_arch(
+    Arch target_arch) {
+  (*Dictionary())[kTargetArch] = static_cast<int>(target_arch);
+}
+Arch CuttlefishConfig::InstanceSpecific::target_arch() const {
+  return static_cast<Arch>((*Dictionary())[kTargetArch].asInt());
+}
+
+static constexpr char kEnableSandbox[] = "enable_sandbox";
+void CuttlefishConfig::MutableInstanceSpecific::set_enable_sandbox(const bool enable_sandbox) {
+  (*Dictionary())[kEnableSandbox] = enable_sandbox;
+}
+bool CuttlefishConfig::InstanceSpecific::enable_sandbox() const {
+  return (*Dictionary())[kEnableSandbox].asBool();
+}
+static constexpr char kConsole[] = "console";
+void CuttlefishConfig::MutableInstanceSpecific::set_console(bool console) {
+  (*Dictionary())[kConsole] = console;
+}
+bool CuttlefishConfig::InstanceSpecific::console() const {
+  return (*Dictionary())[kConsole].asBool();
+}
+std::string CuttlefishConfig::InstanceSpecific::console_dev() const {
+  auto can_use_virtio_console = !kgdb() && !use_bootloader();
+  std::string console_dev;
+  if (can_use_virtio_console ||
+      config_->vm_manager() == vm_manager::Gem5Manager::name()) {
+    // If kgdb and the bootloader are disabled, the Android serial console
+    // spawns on a virtio-console port. If the bootloader is enabled, virtio
+    // console can't be used since uboot doesn't support it.
+    console_dev = "hvc1";
+  } else {
+    // QEMU and Gem5 emulate pl011 on ARM/ARM64, but QEMU and crosvm on other
+    // architectures emulate ns16550a/uart8250 instead.
+    Arch target = target_arch();
+    if ((target == Arch::Arm64 || target == Arch::Arm) &&
+        config_->vm_manager() != vm_manager::CrosvmManager::name()) {
+      console_dev = "ttyAMA0";
+    } else {
+      console_dev = "ttyS0";
+    }
+  }
+  return console_dev;
+}
+
 std::string CuttlefishConfig::InstanceSpecific::logcat_pipe_name() const {
   return AbsolutePath(PerInstanceInternalPath("logcat-pipe"));
 }
@@ -154,7 +963,7 @@
 
 std::string CuttlefishConfig::InstanceSpecific::launcher_monitor_socket_path()
     const {
-  return AbsolutePath(PerInstancePath("launcher_monitor.sock"));
+  return AbsolutePath(PerInstanceUdsPath("launcher_monitor.sock"));
 }
 
 static constexpr char kModemSimulatorPorts[] = "modem_simulator_ports";
@@ -179,20 +988,78 @@
   return AbsolutePath(PerInstancePath("persistent_composite.img"));
 }
 
+std::string CuttlefishConfig::InstanceSpecific::persistent_ap_composite_disk_path()
+    const {
+  return AbsolutePath(PerInstancePath("ap_persistent_composite.img"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::os_composite_disk_path()
+    const {
+  return AbsolutePath(PerInstancePath("os_composite.img"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::ap_composite_disk_path()
+    const {
+  return AbsolutePath(PerInstancePath("ap_composite.img"));
+}
+
 std::string CuttlefishConfig::InstanceSpecific::vbmeta_path() const {
   return AbsolutePath(PerInstancePath("vbmeta.img"));
 }
 
+std::string CuttlefishConfig::InstanceSpecific::ap_vbmeta_path() const {
+  return AbsolutePath(PerInstancePath("ap_vbmeta.img"));
+}
+
 std::string CuttlefishConfig::InstanceSpecific::uboot_env_image_path() const {
   return AbsolutePath(PerInstancePath("uboot_env.img"));
 }
 
+std::string CuttlefishConfig::InstanceSpecific::ap_uboot_env_image_path() const {
+  return AbsolutePath(PerInstancePath("ap_uboot_env.img"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::otheros_esp_image_path() const {
+  return AbsolutePath(PerInstancePath("esp.img"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::ap_esp_image_path() const {
+  return AbsolutePath(PerInstancePath("ap_esp.img"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::otheros_esp_grub_config() const {
+  return AbsolutePath(PerInstancePath("grub.cfg"));
+}
+
+std::string CuttlefishConfig::InstanceSpecific::ap_esp_grub_config() const {
+  return AbsolutePath(PerInstancePath("ap_grub.cfg"));
+}
+
 static constexpr char kMobileBridgeName[] = "mobile_bridge_name";
 
 std::string CuttlefishConfig::InstanceSpecific::audio_server_path() const {
-  return AbsolutePath(PerInstanceInternalPath("audio_server.sock"));
+  return AbsolutePath(PerInstanceInternalUdsPath("audio_server.sock"));
 }
 
+CuttlefishConfig::InstanceSpecific::BootFlow CuttlefishConfig::InstanceSpecific::boot_flow() const {
+  const bool linux_flow_used = !linux_kernel_path().empty()
+    || !linux_initramfs_path().empty()
+    || !linux_root_image().empty();
+
+  const bool fuchsia_flow_used = !fuchsia_zedboot_path().empty()
+    || !fuchsia_root_image().empty()
+    || !fuchsia_multiboot_bin_path().empty();
+
+  if (linux_flow_used) {
+    return BootFlow::Linux;
+  }
+  if (fuchsia_flow_used) {
+    return BootFlow::Fuchsia;
+  }
+
+  return BootFlow::Android;
+ }
+
 std::string CuttlefishConfig::InstanceSpecific::mobile_bridge_name() const {
   return (*Dictionary())[kMobileBridgeName].asString();
 }
@@ -210,16 +1077,17 @@
   (*Dictionary())[kMobileTapName] = mobile_tap_name;
 }
 
-static constexpr char kConfUiHostPort[] = "confirmation_ui_host_port";
-int CuttlefishConfig::InstanceSpecific::confui_host_vsock_port() const {
-  return (*Dictionary())[kConfUiHostPort].asInt();
+static constexpr char kMobileMac[] = "mobile_mac";
+std::string CuttlefishConfig::InstanceSpecific::mobile_mac() const {
+  return (*Dictionary())[kMobileMac].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_mobile_mac(
+    const std::string& mac) {
+  (*Dictionary())[kMobileMac] = mac;
 }
 
-void CuttlefishConfig::MutableInstanceSpecific::set_confui_host_vsock_port(
-    int port) {
-  (*Dictionary())[kConfUiHostPort] = port;
-}
-
+// TODO(b/199103204): remove this as well when
+// PRODUCT_ENFORCE_MAC80211_HWSIM is removed
 static constexpr char kWifiTapName[] = "wifi_tap_name";
 std::string CuttlefishConfig::InstanceSpecific::wifi_tap_name() const {
   return (*Dictionary())[kWifiTapName].asString();
@@ -229,6 +1097,33 @@
   (*Dictionary())[kWifiTapName] = wifi_tap_name;
 }
 
+static constexpr char kWifiBridgeName[] = "wifi_bridge_name";
+std::string CuttlefishConfig::InstanceSpecific::wifi_bridge_name() const {
+  return (*Dictionary())[kWifiBridgeName].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_wifi_bridge_name(
+    const std::string& wifi_bridge_name) {
+  (*Dictionary())[kWifiBridgeName] = wifi_bridge_name;
+}
+
+static constexpr char kWifiMac[] = "wifi_mac";
+std::string CuttlefishConfig::InstanceSpecific::wifi_mac() const {
+  return (*Dictionary())[kWifiMac].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_wifi_mac(
+    const std::string& mac) {
+  (*Dictionary())[kWifiMac] = mac;
+}
+
+static constexpr char kUseBridgedWifiTap[] = "use_bridged_wifi_tap";
+bool CuttlefishConfig::InstanceSpecific::use_bridged_wifi_tap() const {
+  return (*Dictionary())[kUseBridgedWifiTap].asBool();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_use_bridged_wifi_tap(
+    bool use_bridged_wifi_tap) {
+  (*Dictionary())[kUseBridgedWifiTap] = use_bridged_wifi_tap;
+}
+
 static constexpr char kEthernetTapName[] = "ethernet_tap_name";
 std::string CuttlefishConfig::InstanceSpecific::ethernet_tap_name() const {
   return (*Dictionary())[kEthernetTapName].asString();
@@ -238,6 +1133,33 @@
   (*Dictionary())[kEthernetTapName] = ethernet_tap_name;
 }
 
+static constexpr char kEthernetBridgeName[] = "ethernet_bridge_name";
+std::string CuttlefishConfig::InstanceSpecific::ethernet_bridge_name() const {
+  return (*Dictionary())[kEthernetBridgeName].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_ethernet_bridge_name(
+    const std::string& ethernet_bridge_name) {
+  (*Dictionary())[kEthernetBridgeName] = ethernet_bridge_name;
+}
+
+static constexpr char kEthernetMac[] = "ethernet_mac";
+std::string CuttlefishConfig::InstanceSpecific::ethernet_mac() const {
+  return (*Dictionary())[kEthernetMac].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_ethernet_mac(
+    const std::string& mac) {
+  (*Dictionary())[kEthernetMac] = mac;
+}
+
+static constexpr char kEthernetIPV6[] = "ethernet_ipv6";
+std::string CuttlefishConfig::InstanceSpecific::ethernet_ipv6() const {
+  return (*Dictionary())[kEthernetIPV6].asString();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_ethernet_ipv6(
+    const std::string& ip) {
+  (*Dictionary())[kEthernetIPV6] = ip;
+}
+
 static constexpr char kUseAllocd[] = "use_allocd";
 bool CuttlefishConfig::InstanceSpecific::use_allocd() const {
   return (*Dictionary())[kUseAllocd].asBool();
@@ -281,6 +1203,14 @@
   (*Dictionary())[kHostPort] = port;
 }
 
+static constexpr char kFastbootHostPort[] = "fastboot_host_port";
+int CuttlefishConfig::InstanceSpecific::fastboot_host_port() const {
+  return (*Dictionary())[kFastbootHostPort].asInt();
+}
+void CuttlefishConfig::MutableInstanceSpecific::set_fastboot_host_port(int port) {
+  (*Dictionary())[kFastbootHostPort] = port;
+}
+
 static constexpr char kModemSimulatorId[] = "modem_simulator_host_id";
 int CuttlefishConfig::InstanceSpecific::modem_simulator_host_id() const {
   return (*Dictionary())[kModemSimulatorId].asInt();
@@ -341,14 +1271,6 @@
   (*Dictionary())[kTombstoneReceiverPort] = tombstone_receiver_port;
 }
 
-static constexpr char kVehicleHalServerPort[] = "vehicle_hal_server_port";
-int CuttlefishConfig::InstanceSpecific::vehicle_hal_server_port() const {
-  return (*Dictionary())[kVehicleHalServerPort].asInt();
-}
-void CuttlefishConfig::MutableInstanceSpecific::set_vehicle_hal_server_port(int vehicle_hal_server_port) {
-  (*Dictionary())[kVehicleHalServerPort] = vehicle_hal_server_port;
-}
-
 static constexpr char kAudioControlServerPort[] = "audiocontrol_server_port";
 int CuttlefishConfig::InstanceSpecific::audiocontrol_server_port() const {
   return (*Dictionary())[kAudioControlServerPort].asInt();
@@ -417,30 +1339,47 @@
   return (*Dictionary())[kStartRootcanal].asBool();
 }
 
-static constexpr char kStartAp[] = "start_ap";
-void CuttlefishConfig::MutableInstanceSpecific::set_start_ap(bool start) {
-  (*Dictionary())[kStartAp] = start;
+static constexpr char kStartPica[] = "start_pica";
+void CuttlefishConfig::MutableInstanceSpecific::set_start_pica(
+    bool start) {
+  (*Dictionary())[kStartPica] = start;
 }
-bool CuttlefishConfig::InstanceSpecific::start_ap() const {
-  return (*Dictionary())[kStartAp].asBool();
+bool CuttlefishConfig::InstanceSpecific::start_pica() const {
+  return (*Dictionary())[kStartPica].asBool();
+}
+
+static constexpr char kStartNetsim[] = "start_netsim";
+void CuttlefishConfig::MutableInstanceSpecific::set_start_netsim(bool start) {
+  (*Dictionary())[kStartNetsim] = start;
+}
+bool CuttlefishConfig::InstanceSpecific::start_netsim() const {
+  return (*Dictionary())[kStartNetsim].asBool();
+}
+
+static constexpr char kApBootFlow[] = "ap_boot_flow";
+void CuttlefishConfig::MutableInstanceSpecific::set_ap_boot_flow(APBootFlow flow) {
+  (*Dictionary())[kApBootFlow] = static_cast<int>(flow);
+}
+APBootFlow CuttlefishConfig::InstanceSpecific::ap_boot_flow() const {
+  return static_cast<APBootFlow>((*Dictionary())[kApBootFlow].asInt());
 }
 
 std::string CuttlefishConfig::InstanceSpecific::touch_socket_path(
     int screen_idx) const {
-  return PerInstanceInternalPath(
+  return PerInstanceInternalUdsPath(
       ("touch_" + std::to_string(screen_idx) + ".sock").c_str());
 }
 
 std::string CuttlefishConfig::InstanceSpecific::keyboard_socket_path() const {
-  return PerInstanceInternalPath("keyboard.sock");
+  return PerInstanceInternalUdsPath("keyboard.sock");
 }
 
 std::string CuttlefishConfig::InstanceSpecific::switches_socket_path() const {
-  return PerInstanceInternalPath("switches.sock");
+  return PerInstanceInternalUdsPath("switches.sock");
 }
 
 std::string CuttlefishConfig::InstanceSpecific::frames_socket_path() const {
-  return PerInstanceInternalPath("frames.sock");
+  return PerInstanceInternalUdsPath("frames.sock");
 }
 
 static constexpr char kWifiMacPrefix[] = "wifi_mac_prefix";
@@ -462,12 +1401,12 @@
 }
 
 std::string CuttlefishConfig::InstanceSpecific::PerInstancePath(
-    const char* file_name) const {
+    const std::string& file_name) const {
   return (instance_dir() + "/") + file_name;
 }
 
 std::string CuttlefishConfig::InstanceSpecific::PerInstanceInternalPath(
-    const char* file_name) const {
+    const std::string& file_name) const {
   if (file_name[0] == '\0') {
     // Don't append a / if file_name is empty.
     return PerInstancePath(kInternalDirName);
@@ -476,6 +1415,31 @@
   return PerInstancePath(relative_path.c_str());
 }
 
+std::string CuttlefishConfig::InstanceSpecific::PerInstanceUdsPath(
+    const std::string& file_name) const {
+  return (instance_uds_dir() + "/") + file_name;
+}
+
+std::string CuttlefishConfig::InstanceSpecific::PerInstanceInternalUdsPath(
+    const std::string& file_name) const {
+  if (file_name[0] == '\0') {
+    // Don't append a / if file_name is empty.
+    return PerInstanceUdsPath(kInternalDirName);
+  }
+  auto relative_path = (std::string(kInternalDirName) + "/") + file_name;
+  return PerInstanceUdsPath(relative_path.c_str());
+}
+
+std::string CuttlefishConfig::InstanceSpecific::PerInstanceGrpcSocketPath(
+    const std::string& socket_name) const {
+  if (socket_name.size() == 0) {
+    // Don't append a / if file_name is empty.
+    return PerInstanceUdsPath(kGrpcSocketDirName);
+  }
+  auto relative_path = (std::string(kGrpcSocketDirName) + "/") + socket_name;
+  return PerInstanceUdsPath(relative_path.c_str());
+}
+
 std::string CuttlefishConfig::InstanceSpecific::PerInstanceLogPath(
     const std::string& file_name) const {
   if (file_name.size() == 0) {
diff --git a/host/libs/config/data_image.cpp b/host/libs/config/data_image.cpp
index 8932c7c..45dbad8 100644
--- a/host/libs/config/data_image.cpp
+++ b/host/libs/config/data_image.cpp
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "host/libs/config/data_image.h"
 
 #include <android-base/logging.h>
@@ -7,9 +22,12 @@
 
 #include "common/libs/fs/shared_buf.h"
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/network.h"
 #include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
+#include "host/libs/config/esp.h"
 #include "host/libs/config/mbr.h"
+#include "host/libs/config/openwrt_args.h"
 #include "host/libs/vm_manager/gem5_manager.h"
 
 namespace cuttlefish {
@@ -23,33 +41,12 @@
 const int FSCK_ERROR_CORRECTED = 1;
 const int FSCK_ERROR_CORRECTED_REQUIRES_REBOOT = 2;
 
-// Currently the Cuttlefish bootloaders are built only for x86 (32-bit),
-// ARM (QEMU only, 32-bit) and AArch64 (64-bit), and U-Boot will hard-code
-// these search paths. Install all bootloaders to one of these paths.
-// NOTE: For now, just ignore the 32-bit ARM version, as Debian doesn't
-//       build an EFI monolith for this architecture.
-const std::string kBootPathIA32 = "EFI/BOOT/BOOTIA32.EFI";
-const std::string kBootPathAA64 = "EFI/BOOT/BOOTAA64.EFI";
-const std::string kM5 = "";
-
-// These are the paths Debian installs the monoliths to. If another distro
-// uses an alternative monolith path, add it to this table
-const std::pair<std::string, std::string> kGrubBlobTable[] = {
-    {"/usr/lib/grub/i386-efi/monolithic/grubia32.efi", kBootPathIA32},
-    {"/usr/lib/grub/arm64-efi/monolithic/grubaa64.efi", kBootPathAA64},
-};
-
-// M5 checkpoint required binary file
-const std::pair<std::string, std::string> kM5BlobTable[] = {
-    {"/tmp/m5", kM5},
-};
-
-bool ForceFsckImage(const CuttlefishConfig& config,
-                    const std::string& data_image) {
+bool ForceFsckImage(const std::string& data_image,
+                    const CuttlefishConfig::InstanceSpecific& instance) {
   std::string fsck_path;
-  if (config.userdata_format() == "f2fs") {
+  if (instance.userdata_format() == "f2fs") {
     fsck_path = HostBinaryPath("fsck.f2fs");
-  } else if (config.userdata_format() == "ext4") {
+  } else if (instance.userdata_format() == "ext4") {
     fsck_path = "/sbin/e2fsck";
   }
   int fsck_status = execute({fsck_path, "-y", "-f", data_image});
@@ -61,39 +58,8 @@
   return true;
 }
 
-bool NewfsMsdos(const std::string& data_image, int data_image_mb,
-                int offset_num_mb) {
-  off_t image_size_bytes = static_cast<off_t>(data_image_mb) << 20;
-  off_t offset_size_bytes = static_cast<off_t>(offset_num_mb) << 20;
-  image_size_bytes -= offset_size_bytes;
-  off_t image_size_sectors = image_size_bytes / 512;
-  auto newfs_msdos_path = HostBinaryPath("newfs_msdos");
-  return execute({newfs_msdos_path,
-                         "-F",
-                         "32",
-                         "-m",
-                         "0xf8",
-                         "-o",
-                         "0",
-                         "-c",
-                         "8",
-                         "-h",
-                         "255",
-                         "-u",
-                         "63",
-                         "-S",
-                         "512",
-                         "-s",
-                         std::to_string(image_size_sectors),
-                         "-C",
-                         std::to_string(data_image_mb) + "M",
-                         "-@",
-                         std::to_string(offset_size_bytes),
-                         data_image}) == 0;
-}
-
-bool ResizeImage(const CuttlefishConfig& config, const std::string& data_image,
-                 int data_image_mb) {
+bool ResizeImage(const std::string& data_image, int data_image_mb,
+                 const CuttlefishConfig::InstanceSpecific& instance) {
   auto file_mb = FileSize(data_image) >> 20;
   if (file_mb > data_image_mb) {
     LOG(ERROR) << data_image << " is already " << file_mb << " MB, will not "
@@ -110,14 +76,14 @@
                   << data_image << "` failed:" << fd->StrError();
       return false;
     }
-    bool fsck_success = ForceFsckImage(config, data_image);
+    bool fsck_success = ForceFsckImage(data_image, instance);
     if (!fsck_success) {
       return false;
     }
     std::string resize_path;
-    if (config.userdata_format() == "f2fs") {
+    if (instance.userdata_format() == "f2fs") {
       resize_path = HostBinaryPath("resize.f2fs");
-    } else if (config.userdata_format() == "ext4") {
+    } else if (instance.userdata_format() == "ext4") {
       resize_path = "/sbin/resize2fs";
     }
     int resize_status = execute({resize_path, data_image});
@@ -126,7 +92,7 @@
                  << resize_status;
       return false;
     }
-    fsck_success = ForceFsckImage(config, data_image);
+    fsck_success = ForceFsckImage(data_image, instance);
     if (!fsck_success) {
       return false;
     }
@@ -156,9 +122,9 @@
       return false;
     }
   } else if (image_fmt == "f2fs") {
-    auto make_f2fs_path = cuttlefish::HostBinaryPath("make_f2fs");
-    if (execute({make_f2fs_path, "-t", image_fmt, image, "-C", "utf8", "-O",
-             "compression,extra_attr,project_quota", "-g", "android"}) != 0) {
+    auto make_f2fs_path = HostBinaryPath("make_f2fs");
+    if (execute({make_f2fs_path, "-l", "data", image, "-C", "utf8", "-O",
+     "compression,extra_attr,project_quota,casefold", "-g", "android"}) != 0) {
       return false;
     }
   } else if (image_fmt == "sdcard") {
@@ -218,31 +184,11 @@
   return fs_type;
 }
 
-struct DataImageTag {};
-
-class FixedDataImagePath : public DataImagePath {
- public:
-  INJECT(FixedDataImagePath(ANNOTATED(DataImageTag, std::string) path))
-      : path_(path) {}
-
-  const std::string& Path() const override { return path_; }
-
- private:
-  std::string path_;
-};
-
-fruit::Component<DataImagePath> FixedDataImagePathComponent(
-    const std::string* path) {
-  return fruit::createComponent()
-      .bind<DataImagePath, FixedDataImagePath>()
-      .bindInstance<fruit::Annotated<DataImageTag, std::string>>(*path);
-}
-
 class InitializeDataImageImpl : public InitializeDataImage {
  public:
-  INJECT(InitializeDataImageImpl(const CuttlefishConfig& config,
-                                 DataImagePath& data_path))
-      : config_(config), data_path_(data_path) {}
+  INJECT(InitializeDataImageImpl(
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeDataImageImpl"; }
@@ -254,12 +200,17 @@
     auto action = ChooseAction();
     if (!action.ok()) {
       LOG(ERROR) << "Failed to select a userdata processing action: "
-                 << action.error();
+                 << action.error().Message();
+      LOG(DEBUG) << "Failed to select a userdata processing action: "
+                 << action.error().Trace();
       return false;
     }
     auto result = EvaluateAction(*action);
     if (!result.ok()) {
-      LOG(ERROR) << "Failed to evaluate userdata action: " << result.error();
+      LOG(ERROR) << "Failed to evaluate userdata action: "
+                 << result.error().Message();
+      LOG(DEBUG) << "Failed to evaluate userdata action: "
+                 << result.error().Trace();
       return false;
     }
     return true;
@@ -269,26 +220,31 @@
   enum class DataImageAction { kNoAction, kCreateImage, kResizeImage };
 
   Result<DataImageAction> ChooseAction() {
-    if (config_.data_policy() == kDataPolicyAlwaysCreate) {
+    if (instance_.data_policy() == kDataPolicyAlwaysCreate) {
       return DataImageAction::kCreateImage;
     }
-    if (!FileHasContent(data_path_.Path())) {
-      if (config_.data_policy() == kDataPolicyUseExisting) {
+    if (!FileHasContent(instance_.data_image())) {
+      if (instance_.data_policy() == kDataPolicyUseExisting) {
         return CF_ERR("A data image must exist to use -data_policy="
                       << kDataPolicyUseExisting);
-      } else if (config_.data_policy() == kDataPolicyResizeUpTo) {
-        return CF_ERR(data_path_.Path()
+      } else if (instance_.data_policy() == kDataPolicyResizeUpTo) {
+        return CF_ERR(instance_.data_image()
                       << " does not exist, but resizing was requested");
       }
       return DataImageAction::kCreateImage;
     }
-    if (GetFsType(data_path_.Path()) != config_.userdata_format()) {
-      CF_EXPECT(config_.data_policy() == kDataPolicyResizeUpTo,
+    if (instance_.data_policy() == kDataPolicyUseExisting) {
+      return DataImageAction::kNoAction;
+    }
+    auto current_fs_type = GetFsType(instance_.data_image());
+    if (current_fs_type != instance_.userdata_format()) {
+      CF_EXPECT(instance_.data_policy() != kDataPolicyResizeUpTo,
                 "Changing the fs format is incompatible with -data_policy="
-                    << kDataPolicyResizeUpTo);
+                    << kDataPolicyResizeUpTo << " (\"" << current_fs_type
+                    << "\" != \"" << instance_.userdata_format() << "\")");
       return DataImageAction::kCreateImage;
     }
-    if (config_.data_policy() == kDataPolicyResizeUpTo) {
+    if (instance_.data_policy() == kDataPolicyResizeUpTo) {
       return DataImageAction::kResizeImage;
     }
     return DataImageAction::kNoAction;
@@ -297,41 +253,40 @@
   Result<void> EvaluateAction(DataImageAction action) {
     switch (action) {
       case DataImageAction::kNoAction:
-        LOG(DEBUG) << data_path_.Path() << " exists. Not creating it.";
+        LOG(DEBUG) << instance_.data_image() << " exists. Not creating it.";
         return {};
       case DataImageAction::kCreateImage: {
-        RemoveFile(data_path_.Path());
-        CF_EXPECT(config_.blank_data_image_mb() != 0,
+        RemoveFile(instance_.data_image());
+        CF_EXPECT(instance_.blank_data_image_mb() != 0,
                   "Expected `-blank_data_image_mb` to be set for "
                   "image creation.");
-        CF_EXPECT(
-            CreateBlankImage(data_path_.Path(), config_.blank_data_image_mb(),
-                             config_.userdata_format()),
-            "Failed to create a blank image at \""
-                << data_path_.Path() << "\" with size "
-                << config_.blank_data_image_mb() << " and format \""
-                << config_.userdata_format() << "\"");
+        CF_EXPECT(CreateBlankImage(instance_.data_image(),
+                                   instance_.blank_data_image_mb(),
+                                   instance_.userdata_format()),
+                  "Failed to create a blank image at \""
+                      << instance_.data_image() << "\" with size "
+                      << instance_.blank_data_image_mb() << " and format \""
+                      << instance_.userdata_format() << "\"");
         return {};
       }
       case DataImageAction::kResizeImage: {
-        CF_EXPECT(config_.blank_data_image_mb() != 0,
+        CF_EXPECT(instance_.blank_data_image_mb() != 0,
                   "Expected `-blank_data_image_mb` to be set for "
                   "image resizing.");
-        CF_EXPECT(ResizeImage(config_, data_path_.Path(),
-                              config_.blank_data_image_mb()),
-                  "Failed to resize \"" << data_path_.Path() << "\" to "
-                                        << config_.blank_data_image_mb()
+        CF_EXPECT(ResizeImage(instance_.data_image(),
+                              instance_.blank_data_image_mb(), instance_),
+                  "Failed to resize \"" << instance_.data_image() << "\" to "
+                                        << instance_.blank_data_image_mb()
                                         << " MB");
         return {};
       }
     }
   }
 
-  const CuttlefishConfig& config_;
-  DataImagePath& data_path_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
-fruit::Component<fruit::Required<const CuttlefishConfig, DataImagePath>,
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
                  InitializeDataImage>
 InitializeDataImageComponent() {
   return fruit::createComponent()
@@ -339,23 +294,11 @@
       .bind<InitializeDataImage, InitializeDataImageImpl>();
 }
 
-struct MiscImageTag {};
-
-class FixedMiscImagePath : public MiscImagePath {
- public:
-  INJECT(FixedMiscImagePath(ANNOTATED(MiscImageTag, std::string) path))
-      : path_(path) {}
-
-  const std::string& Path() const override { return path_; }
-
- private:
-  std::string path_;
-};
-
 class InitializeMiscImageImpl : public InitializeMiscImage {
  public:
-  INJECT(InitializeMiscImageImpl(MiscImagePath& misc_path))
-      : misc_path_(misc_path) {}
+  INJECT(InitializeMiscImageImpl(
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeMiscImageImpl"; }
@@ -364,17 +307,17 @@
  private:
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
   bool Setup() override {
-    bool misc_exists = FileHasContent(misc_path_.Path());
+    bool misc_exists = FileHasContent(instance_.misc_image());
 
     if (misc_exists) {
       LOG(DEBUG) << "misc partition image: use existing at \""
-                 << misc_path_.Path() << "\"";
+                 << instance_.misc_image() << "\"";
       return true;
     }
 
     LOG(DEBUG) << "misc partition image: creating empty at \""
-               << misc_path_.Path() << "\"";
-    if (!CreateBlankImage(misc_path_.Path(), 1 /* mb */, "none")) {
+               << instance_.misc_image() << "\"";
+    if (!CreateBlankImage(instance_.new_misc_image(), 1 /* mb */, "none")) {
       LOG(ERROR) << "Failed to create misc image";
       return false;
     }
@@ -382,185 +325,147 @@
   }
 
  private:
-  MiscImagePath& misc_path_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
-fruit::Component<MiscImagePath> FixedMiscImagePathComponent(
-    const std::string* path) {
-  return fruit::createComponent()
-      .bind<MiscImagePath, FixedMiscImagePath>()
-      .bindInstance<fruit::Annotated<MiscImageTag, std::string>>(*path);
-}
-
-fruit::Component<fruit::Required<MiscImagePath>, InitializeMiscImage>
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
+                 InitializeMiscImage>
 InitializeMiscImageComponent() {
   return fruit::createComponent()
       .addMultibinding<SetupFeature, InitializeMiscImage>()
       .bind<InitializeMiscImage, InitializeMiscImageImpl>();
 }
 
-struct EspImageTag {};
-struct KernelPathTag {};
-struct InitRamFsTag {};
-struct RootFsTag {};
-struct ConfigTag {};
-
 class InitializeEspImageImpl : public InitializeEspImage {
  public:
-  INJECT(InitializeEspImageImpl(ANNOTATED(EspImageTag, std::string) esp_image,
-                                ANNOTATED(KernelPathTag, std::string)
-                                    kernel_path,
-                                ANNOTATED(InitRamFsTag, std::string)
-                                    initramfs_path,
-                                ANNOTATED(RootFsTag, std::string) rootfs_path,
-                                ANNOTATED(ConfigTag, const CuttlefishConfig *) config))
-      : esp_image_(esp_image),
-        kernel_path_(kernel_path),
-        initramfs_path_(initramfs_path),
-        rootfs_path_(rootfs_path),
-        config_(config){}
+  INJECT(InitializeEspImageImpl(
+      const CuttlefishConfig& config,
+      const CuttlefishConfig::InstanceSpecific& instance))
+      : config_(config), instance_(instance) {}
 
   // SetupFeature
   std::string Name() const override { return "InitializeEspImageImpl"; }
   std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
-  bool Enabled() const override { return !rootfs_path_.empty(); }
+
+  bool Enabled() const override {
+    return EspRequiredForBootFlow() || EspRequiredForAPBootFlow();
+  }
 
  protected:
   bool Setup() override {
-    bool esp_exists = FileHasContent(esp_image_);
-    if (esp_exists) {
-      LOG(DEBUG) << "esp partition image: use existing";
-      return true;
-    }
-
-    LOG(DEBUG) << "esp partition image: creating default";
-
-    // newfs_msdos won't make a partition smaller than 257 mb
-    // this should be enough for anybody..
-    auto tmp_esp_image = esp_image_ + ".tmp";
-    if (!NewfsMsdos(tmp_esp_image, 257 /* mb */, 0 /* mb (offset) */)) {
-      LOG(ERROR) << "Failed to create filesystem for " << tmp_esp_image;
-      return false;
-    }
-
-    // For licensing and build reproducibility reasons, pick up the bootloaders
-    // from the host Linux distribution (if present) and pack them into the
-    // automatically generated ESP. If the user wants their own bootloaders,
-    // they can use -esp_image=/path/to/esp.img to override, so we don't need
-    // to accommodate customizations of this packing process.
-
-    int success;
-    const std::pair<std::string, std::string> *kBlobTable;
-    std::size_t size;
-    // Skip GRUB on Gem5
-    if (config_->vm_manager() != vm_manager::Gem5Manager::name()){
-      // Currently we only support Debian based distributions, and GRUB is built
-      // for those distros to always load grub.cfg from EFI/debian/grub.cfg, and
-      // nowhere else. If you want to add support for other distros, make the
-      // extra directories below and copy the initial grub.cfg there as well
-      auto mmd = HostBinaryPath("mmd");
-      success =
-          execute({mmd, "-i", tmp_esp_image, "EFI", "EFI/BOOT", "EFI/debian"});
-      if (success != 0) {
-        LOG(ERROR) << "Failed to create directories in " << tmp_esp_image;
+    if (EspRequiredForAPBootFlow()) {
+      LOG(DEBUG) << "creating esp_image: " << instance_.ap_esp_image_path();
+      if (!BuildAPImage()) {
         return false;
       }
-      size = sizeof(kGrubBlobTable)/sizeof(const std::pair<std::string, std::string>);
-      kBlobTable = kGrubBlobTable;
-    } else {
-      size = sizeof(kM5BlobTable)/sizeof(const std::pair<std::string, std::string>);
-      kBlobTable = kM5BlobTable;
     }
-
-    // The grub binaries are small, so just copy all the architecture blobs
-    // we can find, which minimizes complexity. If the user removed the grub bin
-    // package from their system, the ESP will be empty and Other OS will not be
-    // supported
-    auto mcopy = HostBinaryPath("mcopy");
-    bool copied = false;
-    for (int i=0; i<size; i++) {
-      auto grub = kBlobTable[i];
-      if (!FileExists(grub.first)) {
-        continue;
-      }
-      success = execute({mcopy, "-o", "-i", tmp_esp_image, "-s", grub.first,
-                         "::" + grub.second});
-      if (success != 0) {
-        LOG(ERROR) << "Failed to copy " << grub.first << " to " << grub.second
-                   << " in " << tmp_esp_image;
-        return false;
-      }
-      copied = true;
-    }
-
-    if (!copied) {
-      LOG(ERROR) << "Binary dependencies were not found on this system; Other OS "
-                    "support will be broken";
-      return false;
-    }
-
-    // Skip Gem5 case. Gem5 will never be able to use bootloaders like grub.
-    if (config_->vm_manager() != vm_manager::Gem5Manager::name()){
-      auto grub_cfg = DefaultHostArtifactsPath("etc/grub/grub.cfg");
-      CHECK(FileExists(grub_cfg)) << "Missing file " << grub_cfg << "!";
-      success =
-          execute({mcopy, "-i", tmp_esp_image, "-s", grub_cfg, "::EFI/debian/"});
-      if (success != 0) {
-        LOG(ERROR) << "Failed to copy " << grub_cfg << " to " << tmp_esp_image;
+    const auto is_not_gem5 = config_.vm_manager() != vm_manager::Gem5Manager::name();
+    const auto esp_required_for_boot_flow = EspRequiredForBootFlow();
+    if (is_not_gem5 && esp_required_for_boot_flow) {
+      LOG(DEBUG) << "creating esp_image: " << instance_.otheros_esp_image_path();
+      if (!BuildOSImage()) {
         return false;
       }
     }
 
-    if (!kernel_path_.empty()) {
-      success = execute(
-          {mcopy, "-i", tmp_esp_image, "-s", kernel_path_, "::vmlinuz"});
-      if (success != 0) {
-        LOG(ERROR) << "Failed to copy " << kernel_path_ << " to "
-                   << tmp_esp_image;
-        return false;
-      }
-
-      if (!initramfs_path_.empty()) {
-        success = execute({mcopy, "-i", tmp_esp_image, "-s", initramfs_path_,
-                           "::initrd.img"});
-        if (success != 0) {
-          LOG(ERROR) << "Failed to copy " << initramfs_path_ << " to "
-                     << tmp_esp_image;
-          return false;
-        }
-      }
-    }
-
-    if (!cuttlefish::RenameFile(tmp_esp_image, esp_image_)) {
-      LOG(ERROR) << "Renaming " << tmp_esp_image << " to " << esp_image_
-                 << " failed";
-      return false;
-    }
     return true;
   }
 
  private:
-  std::string esp_image_;
-  std::string kernel_path_;
-  std::string initramfs_path_;
-  std::string rootfs_path_;
-  const CuttlefishConfig* config_;
+
+  bool EspRequiredForBootFlow() const {
+    const auto flow = instance_.boot_flow();
+    return flow == CuttlefishConfig::InstanceSpecific::BootFlow::Linux ||
+        flow == CuttlefishConfig::InstanceSpecific::BootFlow::Fuchsia;
+  }
+
+  bool EspRequiredForAPBootFlow() const {
+    return instance_.ap_boot_flow() == CuttlefishConfig::InstanceSpecific::APBootFlow::Grub;
+  }
+
+  bool BuildAPImage() {
+    auto linux = LinuxEspBuilder(instance_.ap_esp_image_path());
+    InitLinuxArgs(linux);
+
+    auto openwrt_args = OpenwrtArgsFromConfig(instance_);
+    for (auto& openwrt_arg : openwrt_args) {
+      linux.Argument(openwrt_arg.first, openwrt_arg.second);
+    }
+
+    linux.Root("/dev/vda2")
+         .Architecture(instance_.target_arch())
+         .Kernel(config_.ap_kernel_image());
+
+    return linux.Build();
+  }
+
+  bool BuildOSImage() {
+    switch (instance_.boot_flow()) {
+      case CuttlefishConfig::InstanceSpecific::BootFlow::Linux: {
+        auto linux = LinuxEspBuilder(instance_.otheros_esp_image_path());
+        InitLinuxArgs(linux);
+
+        linux.Root("/dev/vda2")
+             .Architecture(instance_.target_arch())
+             .Kernel(instance_.linux_kernel_path());
+
+        if (!instance_.linux_initramfs_path().empty()) {
+          linux.Initrd(instance_.linux_initramfs_path());
+        }
+
+        return linux.Build();
+      }
+      case CuttlefishConfig::InstanceSpecific::BootFlow::Fuchsia: {
+        auto fuchsia = FuchsiaEspBuilder(instance_.otheros_esp_image_path());
+        return fuchsia.Architecture(instance_.target_arch())
+                      .Zedboot(instance_.fuchsia_zedboot_path())
+                      .MultibootBinary(instance_.fuchsia_multiboot_bin_path())
+                      .Build();
+      }
+      default:
+        break;
+    }
+
+    return true;
+  }
+
+  void InitLinuxArgs(LinuxEspBuilder& linux) {
+    linux.Root("/dev/vda2");
+
+    linux.Argument("console", "hvc0")
+         .Argument("panic", "-1")
+         .Argument("noefi");
+
+    switch (instance_.target_arch()) {
+      case Arch::Arm:
+      case Arch::Arm64:
+        linux.Argument("console", "ttyAMA0");
+        break;
+      case Arch::RiscV64:
+        linux.Argument("console", "ttyS0");
+        break;
+      case Arch::X86:
+      case Arch::X86_64:
+        linux.Argument("console", "ttyS0")
+             .Argument("pnpacpi", "off")
+             .Argument("acpi", "noirq")
+             .Argument("reboot", "k")
+             .Argument("noexec", "off");
+        break;
+    }
+  }
+
+  const CuttlefishConfig& config_;
+  const CuttlefishConfig::InstanceSpecific& instance_;
 };
 
-fruit::Component<fruit::Required<const CuttlefishConfig>,
-    InitializeEspImage> InitializeEspImageComponent(
-    const std::string* esp_image, const std::string* kernel_path,
-    const std::string* initramfs_path, const std::string* rootfs_path,
-    const CuttlefishConfig* config) {
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
+                 InitializeEspImage>
+InitializeEspImageComponent() {
   return fruit::createComponent()
       .addMultibinding<SetupFeature, InitializeEspImage>()
-      .bind<InitializeEspImage, InitializeEspImageImpl>()
-      .bindInstance<fruit::Annotated<EspImageTag, std::string>>(*esp_image)
-      .bindInstance<fruit::Annotated<KernelPathTag, std::string>>(*kernel_path)
-      .bindInstance<fruit::Annotated<InitRamFsTag, std::string>>(
-          *initramfs_path)
-      .bindInstance<fruit::Annotated<RootFsTag, std::string>>(*rootfs_path)
-      .bindInstance<fruit::Annotated<ConfigTag, CuttlefishConfig>>(*config);
+      .bind<InitializeEspImage, InitializeEspImageImpl>();
 }
 
 } // namespace cuttlefish
diff --git a/host/libs/config/data_image.h b/host/libs/config/data_image.h
index 0ae9fcc..aa608d7 100644
--- a/host/libs/config/data_image.h
+++ b/host/libs/config/data_image.h
@@ -1,7 +1,22 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #pragma once
 
 #include <string>
-//
+
 #include <fruit/fruit.h>
 
 #include "host/libs/config/cuttlefish_config.h"
@@ -9,42 +24,26 @@
 
 namespace cuttlefish {
 
-class DataImagePath {
- public:
-  virtual ~DataImagePath() = default;
-  virtual const std::string& Path() const = 0;
-};
-
 class InitializeDataImage : public SetupFeature {};
 
-fruit::Component<DataImagePath> FixedDataImagePathComponent(
-    const std::string* path);
-fruit::Component<fruit::Required<const CuttlefishConfig, DataImagePath>,
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
                  InitializeDataImage>
 InitializeDataImageComponent();
 
 class InitializeEspImage : public SetupFeature {};
 
-fruit::Component<fruit::Required<const CuttlefishConfig>,
-    InitializeEspImage> InitializeEspImageComponent(
-    const std::string* esp_image, const std::string* kernel_path,
-    const std::string* initramfs_path, const std::string* root_fs,
-    const CuttlefishConfig* config);
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
+                 InitializeEspImage>
+InitializeEspImageComponent();
 
 bool CreateBlankImage(
     const std::string& image, int num_mb, const std::string& image_fmt);
 
-class MiscImagePath {
- public:
-  virtual ~MiscImagePath() = default;
-  virtual const std::string& Path() const = 0;
-};
-
 class InitializeMiscImage : public SetupFeature {};
 
-fruit::Component<MiscImagePath> FixedMiscImagePathComponent(
-    const std::string* path);
-fruit::Component<fruit::Required<MiscImagePath>, InitializeMiscImage>
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific>,
+                 InitializeMiscImage>
 InitializeMiscImageComponent();
 
 } // namespace cuttlefish
diff --git a/host/libs/config/esp.cpp b/host/libs/config/esp.cpp
new file mode 100644
index 0000000..12241eb
--- /dev/null
+++ b/host/libs/config/esp.cpp
@@ -0,0 +1,415 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <sstream>
+
+#include "host/libs/config/esp.h"
+#include "common/libs/fs/shared_buf.h"
+#include "common/libs/utils/subprocess.h"
+#include "common/libs/utils/files.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+bool NewfsMsdos(const std::string& data_image, int data_image_mb,
+                int offset_num_mb) {
+  off_t image_size_bytes = static_cast<off_t>(data_image_mb) << 20;
+  off_t offset_size_bytes = static_cast<off_t>(offset_num_mb) << 20;
+  image_size_bytes -= offset_size_bytes;
+  off_t image_size_sectors = image_size_bytes / 512;
+  auto newfs_msdos_path = HostBinaryPath("newfs_msdos");
+  return execute({newfs_msdos_path,
+                  "-F",
+                  "32",
+                  "-m",
+                  "0xf8",
+                  "-o",
+                  "0",
+                  "-c",
+                  "8",
+                  "-h",
+                  "255",
+                  "-u",
+                  "63",
+                  "-S",
+                  "512",
+                  "-s",
+                  std::to_string(image_size_sectors),
+                  "-C",
+                  std::to_string(data_image_mb) + "M",
+                  "-@",
+                  std::to_string(offset_size_bytes),
+                  data_image}) == 0;
+}
+
+bool CanGenerateEsp(Arch arch) {
+  switch (arch) {
+    case Arch::Arm:
+    case Arch::Arm64:
+    case Arch::RiscV64:
+      // TODO(b/260960328) : Migrate openwrt image for arm64 into
+      // APBootFlow::Grub.
+      return false;
+    case Arch::X86:
+    case Arch::X86_64: {
+      const auto x86_modules = std::string(kGrubModulesPath) + std::string(kGrubModulesX86Name);
+      const auto modules_presented = all_of(kGrubModulesX86.begin(), kGrubModulesX86.end(),
+                                            [&](const std::string& m) {
+                                              return FileExists(x86_modules + m);
+                                            });
+      if (modules_presented) return true;
+
+      const auto monolith_presented = FileExists(kBootSrcPathIA32);
+      return monolith_presented;
+    }
+  }
+
+  return false;
+}
+
+bool MsdosMakeDirectories(const std::string& image_path,
+                          const std::vector<std::string>& directories) {
+  auto mmd = HostBinaryPath("mmd");
+  std::vector<std::string> command {mmd, "-i", image_path};
+  command.insert(command.end(), directories.begin(), directories.end());
+
+  const auto success = execute(command);
+  if (success != 0) {
+    return false;
+  }
+  return true;
+}
+
+bool CopyToMsdos(const std::string& image, const std::string& path,
+                 const std::string& destination) {
+  const auto mcopy = HostBinaryPath("mcopy");
+  const auto success = execute({mcopy, "-o", "-i", image, "-s", path, destination});
+  if (success != 0) {
+    return false;
+  }
+  return true;
+}
+
+bool GrubMakeImage(const std::string& prefix, const std::string& format,
+                   const std::string& directory, const std::string& output,
+                   std::vector<std::string> modules) {
+  std::vector<std::string> command = {"grub-mkimage", "--prefix", prefix,
+                                      "--format", format, "--directory", directory,
+                                      "--output", output};
+  std::move(modules.begin(), modules.end(), std::back_inserter(command));
+
+  const auto success = execute(command);
+  return success == 0;
+}
+
+class EspBuilder final {
+ public:
+  EspBuilder() {}
+  EspBuilder(std::string image_path): image_path_(std::move(image_path)) {}
+
+  EspBuilder& File(std::string from, std::string to, bool required) & {
+    files_.push_back(FileToAdd {std::move(from), std::move(to), required});
+    return *this;
+  }
+
+  EspBuilder& File(std::string from, std::string to) & {
+    return File(std::move(from), std::move(to), false);
+  }
+
+  EspBuilder& Directory(std::string path) & {
+    directories_.push_back(std::move(path));
+    return *this;
+  }
+
+  EspBuilder& Merge(EspBuilder builder) & {
+    std::move(builder.directories_.begin(), builder.directories_.end(),
+              std::back_inserter(directories_));
+    std::move(builder.files_.begin(), builder.files_.end(),
+              std::back_inserter(files_));
+    return *this;
+  }
+
+  bool Build() {
+    if (image_path_.empty()) {
+      LOG(ERROR) << "Image path is required to build ESP. Empty constructor is intended to "
+                 << "be used only for the merge functionality";
+      return false;
+    }
+
+    // newfs_msdos won't make a partition smaller than 257 mb
+    // this should be enough for anybody..
+    const auto tmp_esp_image = image_path_ + ".tmp";
+    if (!NewfsMsdos(tmp_esp_image, 257 /* mb */, 0 /* mb (offset) */)) {
+      LOG(ERROR) << "Failed to create filesystem for " << tmp_esp_image;
+      return false;
+    }
+
+    if (!MsdosMakeDirectories(tmp_esp_image, directories_)) {
+      LOG(ERROR) << "Failed to create directories in " << tmp_esp_image;
+      return false;
+    }
+
+    for (const FileToAdd& file : files_) {
+      if (!FileExists(file.from)) {
+        if (file.required) {
+          LOG(ERROR) << "Failed to copy " << file.from << " to " << tmp_esp_image
+                    << ": File does not exist";
+          return false;
+        }
+        continue;
+      }
+
+      if (!CopyToMsdos(tmp_esp_image, file.from, "::" + file.to)) {
+        LOG(ERROR) << "Failed to copy " << file.from << " to " << tmp_esp_image
+                  << ": mcopy execution failed";
+        return false;
+      }
+    }
+
+    if (!RenameFile(tmp_esp_image, image_path_).ok()) {
+      LOG(ERROR) << "Renaming " << tmp_esp_image << " to "
+                  << image_path_ << " failed";
+      return false;
+    }
+
+    return true;
+  }
+
+ private:
+  const std::string image_path_;
+
+  struct FileToAdd {
+    std::string from;
+    std::string to;
+    bool required;
+  };
+  std::vector<std::string> directories_;
+  std::vector<FileToAdd> files_;
+};
+
+EspBuilder PrepareESP(const std::string& image_path, Arch arch) {
+  auto builder = EspBuilder(image_path);
+  builder.Directory("EFI")
+         .Directory("EFI/BOOT")
+         .Directory("EFI/modules");
+
+  const auto efi_path = image_path + ".efi";
+  switch (arch) {
+    case Arch::Arm:
+    case Arch::Arm64:
+      builder.File(kBootSrcPathAA64, kBootDestPathAA64, /* required */ true);
+      // Not required for arm64 due missing it in deb package, so fuchsia is
+      // not supported for it.
+      builder.File(kMultibootModuleSrcPathAA64, kMultibootModuleDestPathAA64,
+                    /* required */ false);
+      break;
+    case Arch::RiscV64:
+      // FIXME: Implement
+      break;
+    case Arch::X86:
+    case Arch::X86_64: {
+      const auto x86_modules = std::string(kGrubModulesPath) + std::string(kGrubModulesX86Name);
+
+      if (GrubMakeImage(kGrubConfigDestDirectoryPath, kGrubModulesX86Name,
+                        x86_modules, efi_path, kGrubModulesX86)) {
+        LOG(INFO) << "Loading grub_mkimage generated EFI binary";
+        builder.File(efi_path, kBootDestPathIA32, /* required */ true);
+      } else {
+        LOG(INFO) << "Loading prebuilt monolith EFI binary";
+        builder.File(kBootSrcPathIA32, kBootDestPathIA32, /* required */ true);
+        builder.File(kMultibootModuleSrcPathIA32, kMultibootModuleDestPathIA32,
+                     /* required */ true);
+      }
+      break;
+    }
+  }
+
+  return std::move(builder);
+}
+
+// TODO(b/260338443, b/260337906) remove ubuntu and debian variations
+// after migrating to grub-mkimage or adding grub binaries as a prebuilt
+EspBuilder AddGrubConfig(const std::string& config) {
+  auto builder = EspBuilder();
+
+  builder.Directory("boot")
+         .Directory("EFI/debian")
+         .Directory("EFI/ubuntu")
+         .Directory("boot/grub");
+
+  builder.File(config, kGrubDebianConfigDestPath, /*required*/ true)
+         .File(config, kGrubUbuntuConfigDestPath, /*required*/ true)
+         .File(config, kGrubConfigDestPath, /*required*/ true);
+
+  return builder;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Argument(std::string key, std::string value) & {
+  arguments_.push_back({std::move(key), std::move(value)});
+  return *this;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Argument(std::string value) & {
+  single_arguments_.push_back(std::move(value));
+  return *this;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Root(std::string root) & {
+  root_ = std::move(root);
+  return *this;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Kernel(std::string kernel) & {
+  kernel_ = std::move(kernel);
+  return *this;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Initrd(std::string initrd) & {
+  initrd_ = std::move(initrd);
+  return *this;
+}
+
+LinuxEspBuilder& LinuxEspBuilder::Architecture(Arch arch) & {
+  arch_ = arch;
+  return *this;
+}
+
+bool LinuxEspBuilder::Build() const {
+  if (root_.empty()) {
+    LOG(ERROR) << "Root is required argument for LinuxEspBuilder";
+    return false;
+  }
+  if (kernel_.empty()) {
+    LOG(ERROR) << "Kernel esp path is required argument for LinuxEspBuilder";
+    return false;
+  }
+  if (!arch_) {
+    LOG(ERROR) << "Architecture is required argument for LinuxEspBuilder";
+    return false;
+  }
+
+  auto builder = PrepareESP(image_path_, *arch_);
+
+  const auto tmp_grub_config = image_path_ + ".grub.cfg";
+  const auto config_file = SharedFD::Creat(tmp_grub_config, 0644);
+  if (!config_file->IsOpen()) {
+    LOG(ERROR) << "Cannot create temporary grub config: " << tmp_grub_config;
+    return false;
+  }
+
+  const auto dumped = DumpConfig();
+  if (WriteAll(config_file, dumped) != dumped.size()) {
+    LOG(ERROR) << "Failed to write grub config content to: " << tmp_grub_config;
+    return false;
+  }
+
+  builder.Merge(AddGrubConfig(tmp_grub_config));
+  builder.File(kernel_, kKernelDestPath, /*required*/ true);
+  if (!initrd_.empty()) {
+    builder.File(initrd_, kInitrdDestPath, /*required*/ true);
+  }
+
+  return builder.Build();
+}
+
+std::string LinuxEspBuilder::DumpConfig() const {
+  std::ostringstream o;
+
+  o << "set timeout=0" << std::endl
+    << "menuentry \"Linux\" {" << std::endl
+    << "  linux " << kKernelDestPath << " ";
+
+  for (int i = 0; i < arguments_.size(); i++) {
+    o << arguments_[i].first << "=" << arguments_[i].second << " ";
+  }
+  for (int i = 0; i < single_arguments_.size(); i++) {
+    o << single_arguments_[i] << " ";
+  }
+  o << "root=" << root_ << std::endl;
+  if (!initrd_.empty()) {
+    o << "  if [ -e " << kInitrdDestPath << " ]; then" << std::endl;
+    o << "    initrd " << kInitrdDestPath << std::endl;
+    o << "  fi" << std::endl;
+  }
+  o << "}" << std::endl;
+
+  return o.str();
+}
+
+FuchsiaEspBuilder& FuchsiaEspBuilder::MultibootBinary(std::string multiboot) & {
+  multiboot_bin_ = std::move(multiboot);
+  return *this;
+}
+
+FuchsiaEspBuilder& FuchsiaEspBuilder::Zedboot(std::string zedboot) & {
+  zedboot_ = std::move(zedboot);
+  return *this;
+}
+
+FuchsiaEspBuilder& FuchsiaEspBuilder::Architecture(Arch arch) & {
+  arch_ = arch;
+  return *this;
+}
+
+bool FuchsiaEspBuilder::Build() const {
+  if (multiboot_bin_.empty()) {
+    LOG(ERROR) << "Multiboot esp path is required argument for FuchsiaEspBuilder";
+    return false;
+  }
+  if (zedboot_.empty()) {
+    LOG(ERROR) << "Zedboot esp path is required argument for FuchsiaEspBuilder";
+    return false;
+  }
+  if (!arch_) {
+    LOG(ERROR) << "Architecture is required argument for FuchsiaEspBuilder";
+    return false;
+  }
+
+  auto builder = PrepareESP(image_path_, *arch_);
+
+  const auto tmp_grub_config = image_path_ + ".grub.cfg";
+  const auto config_file = SharedFD::Creat(tmp_grub_config, 0644);
+  if (!config_file->IsOpen()) {
+    LOG(ERROR) << "Cannot create temporary grub config: " << tmp_grub_config;
+    return false;
+  }
+
+  const auto dumped = DumpConfig();
+  if (WriteAll(config_file, dumped) != dumped.size()) {
+    LOG(ERROR) << "Failed to write grub config content to: " << tmp_grub_config;
+    return false;
+  }
+
+  builder.Merge(AddGrubConfig(tmp_grub_config));
+  builder.File(multiboot_bin_, kMultibootBinDestPath, /*required*/ true);
+  builder.File(zedboot_, kZedbootDestPath, /*required*/ true);
+
+  return builder.Build();
+}
+
+std::string FuchsiaEspBuilder::DumpConfig() const {
+  std::ostringstream o;
+
+  o << "set timeout=0" << std::endl
+    << "menuentry \"Fuchsia\" {" << std::endl
+    << "  insmod " << kMultibootModuleDestPathIA32 << std::endl
+    << "  multiboot " << kMultibootBinDestPath << std::endl
+    << "  module " << kZedbootDestPath << std::endl
+    << "}" << std::endl;
+
+  return o.str();
+}
+
+} // namespace cuttlefish
diff --git a/host/libs/config/esp.h b/host/libs/config/esp.h
new file mode 100644
index 0000000..8cb782b
--- /dev/null
+++ b/host/libs/config/esp.h
@@ -0,0 +1,128 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <utility>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include "host/libs/config/cuttlefish_config.h"
+#include "common/libs/utils/environment.h"
+
+namespace cuttlefish {
+
+// For licensing and build reproducibility reasons, pick up the bootloaders
+// from the host Linux distribution (if present) and pack them into the
+// automatically generated ESP. If the user wants their own bootloaders,
+// they can use -esp_image=/path/to/esp.img to override, so we don't need
+// to accommodate customizations of this packing process.
+
+// Currently we only support Debian based distributions, and GRUB is built
+// for those distros to always load grub.cfg from EFI/debian/grub.cfg, and
+// nowhere else. If you want to add support for other distros, make the
+// extra directories below and copy the initial grub.cfg there as well
+//
+// Currently the Cuttlefish bootloaders are built only for x86 (32-bit),
+// ARM (QEMU only, 32-bit) and AArch64 (64-bit), and U-Boot will hard-code
+// these search paths. Install all bootloaders to one of these paths.
+// NOTE: For now, just ignore the 32-bit ARM version, as Debian doesn't
+//       build an EFI monolith for this architecture.
+// These are the paths Debian installs the monoliths to. If another distro
+// uses an alternative monolith path, add it to this table
+static constexpr char kBootSrcPathIA32[] = "/usr/lib/grub/i386-efi/monolithic/grubia32.efi";
+static constexpr char kBootDestPathIA32[] = "/EFI/BOOT/BOOTIA32.EFI";
+
+static constexpr char kBootSrcPathAA64[] = "/usr/lib/grub/arm64-efi/monolithic/grubaa64.efi";
+static constexpr char kBootDestPathAA64[] = "/EFI/BOOT/BOOTAA64.EFI";
+
+static constexpr char kMultibootModuleSrcPathIA32[] = "/usr/lib/grub/i386-efi/multiboot.mod";
+static constexpr char kMultibootModuleDestPathIA32[] = "/EFI/modules/multiboot.mod";
+
+static constexpr char kMultibootModuleSrcPathAA64[] = "/usr/lib/grub/arm64-efi/multiboot.mod";
+static constexpr char kMultibootModuleDestPathAA64[] = "/EFI/modules/multiboot.mod";
+
+static constexpr char kKernelDestPath[] = "/vmlinuz";
+static constexpr char kInitrdDestPath[] = "/initrd";
+static constexpr char kZedbootDestPath[] = "/zedboot.zbi";
+static constexpr char kMultibootBinDestPath[] = "/multiboot.bin";
+
+// TODO(b/260338443, b/260337906) remove ubuntu and debian variations
+// after migrating to grub-mkimage or adding grub binaries as a prebuilt
+static constexpr char kGrubDebianConfigDestPath[] = "/EFI/debian/grub.cfg";
+static constexpr char kGrubUbuntuConfigDestPath[] = "/EFI/ubuntu/grub.cfg";
+static constexpr char kGrubConfigDestDirectoryPath[] = "/boot/grub";
+static constexpr char kGrubConfigDestPath[] = "/boot/grub/grub.cfg";
+
+const std::vector<std::string> kGrubModulesX86 =
+    {"normal", "configfile", "linux", "linuxefi", "multiboot",
+     "ls", "cat", "help", "fat", "part_msdos", "part_gpt"};
+static constexpr char kGrubModulesPath[] = "/usr/lib/grub/";
+static constexpr char kGrubModulesX86Name[] = "i386-efi";
+static constexpr char kGrubModulesArm64Name[] = "arm64-efi";
+
+class LinuxEspBuilder final {
+ public:
+  LinuxEspBuilder() = delete;
+  LinuxEspBuilder(std::string image_path): image_path_(std::move(image_path)) {}
+
+  LinuxEspBuilder& Argument(std::string key, std::string value) &;
+  LinuxEspBuilder& Argument(std::string value) &;
+  LinuxEspBuilder& Root(std::string root) &;
+  LinuxEspBuilder& Kernel(std::string kernel) &;
+  LinuxEspBuilder& Initrd(std::string initrd) &;
+  LinuxEspBuilder& Architecture(Arch arch) &;
+
+  bool Build() const;
+
+ private:
+  std::string DumpConfig() const;
+
+  const std::string image_path_;
+  std::vector<std::pair<std::string, std::string>> arguments_;
+  std::vector<std::string> single_arguments_;
+  std::string root_;
+  std::string kernel_;
+  std::string initrd_;
+  std::optional<Arch> arch_;
+};
+
+class FuchsiaEspBuilder {
+ public:
+  FuchsiaEspBuilder() = delete;
+  FuchsiaEspBuilder(std::string image_path): image_path_(std::move(image_path)) {}
+
+  FuchsiaEspBuilder& MultibootBinary(std::string multiboot) &;
+  FuchsiaEspBuilder& Zedboot(std::string zedboot) &;
+  FuchsiaEspBuilder& Architecture(Arch arch) &;
+
+  bool Build() const;
+
+ private:
+  std::string DumpConfig() const;
+
+  const std::string image_path_;
+  std::string multiboot_bin_;
+  std::string zedboot_;
+  std::optional<Arch> arch_;
+};
+
+bool NewfsMsdos(const std::string& data_image, int data_image_mb,
+                int offset_num_mb);
+
+bool CanGenerateEsp(Arch arch);
+
+} // namespace cuttlefish
diff --git a/host/libs/config/fastboot/Android.bp b/host/libs/config/fastboot/Android.bp
new file mode 100644
index 0000000..45a4b89
--- /dev/null
+++ b/host/libs/config/fastboot/Android.bp
@@ -0,0 +1,34 @@
+//
+// Copyright (C) 2017 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_host_config_fastboot",
+    srcs: [
+        "config.cpp",
+        "data.cpp",
+        "flags.cpp",
+        "launch.cpp"
+    ],
+    shared_libs: [
+        "libbase",
+        "libfruit",
+        "libjsoncpp",
+    ],
+    defaults: ["cuttlefish_host"],
+}
diff --git a/host/libs/config/fastboot/config.cpp b/host/libs/config/fastboot/config.cpp
new file mode 100644
index 0000000..e23bc36
--- /dev/null
+++ b/host/libs/config/fastboot/config.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/libs/config/fastboot/fastboot.h"
+
+#include <android-base/logging.h>
+#include <json/json.h>
+
+#include "host/libs/config/config_fragment.h"
+
+namespace cuttlefish {
+namespace {
+
+class FastbootConfigFragmentImpl : public FastbootConfigFragment {
+ public:
+  INJECT(FastbootConfigFragmentImpl(FastbootConfig& config)) : config_(config) {}
+
+  std::string Name() const override { return "FastbootConfigFragmentImpl"; }
+
+  Json::Value Serialize() const override {
+    Json::Value json;
+    json[kProxyFastboot] = config_.ProxyFastboot();
+    return json;
+  }
+
+  bool Deserialize(const Json::Value& json) override {
+    if (!json.isMember(kProxyFastboot) ||
+        json[kProxyFastboot].type() != Json::booleanValue) {
+      LOG(ERROR) << "Invalid value for " << kProxyFastboot;
+      return false;
+    }
+    if (!config_.SetProxyFastboot(json[kProxyFastboot].asBool())) {
+      LOG(ERROR) << "Failed to set whether to run the fastboot proxy";
+    }
+    return true;
+  }
+
+ private:
+  static constexpr char kProxyFastboot[] = "proxy_fastboot";
+  FastbootConfig& config_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<FastbootConfig>, FastbootConfigFragment>
+FastbootConfigFragmentComponent() {
+  return fruit::createComponent()
+      .bind<FastbootConfigFragment, FastbootConfigFragmentImpl>()
+      .addMultibinding<ConfigFragment, FastbootConfigFragment>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/fastboot/data.cpp b/host/libs/config/fastboot/data.cpp
new file mode 100644
index 0000000..684a0b1
--- /dev/null
+++ b/host/libs/config/fastboot/data.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/libs/config/fastboot/fastboot.h"
+
+namespace cuttlefish {
+namespace {
+
+class FastbootConfigImpl : public FastbootConfig {
+ public:
+  INJECT(FastbootConfigImpl()) {}
+
+  bool ProxyFastboot() const override { return proxy_fastboot_; }
+
+  bool SetProxyFastboot(bool proxy) override {
+    proxy_fastboot_ = proxy;
+    return true;
+  }
+
+ private:
+  bool proxy_fastboot_;
+};
+
+}  // namespace
+
+fruit::Component<FastbootConfig> FastbootConfigComponent() {
+  return fruit::createComponent().bind<FastbootConfig, FastbootConfigImpl>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/fastboot/fastboot.h b/host/libs/config/fastboot/fastboot.h
new file mode 100644
index 0000000..ddccf1d
--- /dev/null
+++ b/host/libs/config/fastboot/fastboot.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <fruit/fruit.h>
+
+#include "host/libs/config/config_flag.h"
+#include "host/libs/config/config_fragment.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/feature.h"
+#include "host/libs/config/kernel_log_pipe_provider.h"
+
+namespace cuttlefish {
+
+class FastbootConfig {
+ public:
+  virtual ~FastbootConfig() = default;
+
+  virtual bool ProxyFastboot() const = 0;
+  virtual bool SetProxyFastboot(bool) = 0;
+};
+
+class FastbootConfigFragment : public ConfigFragment {};
+class FastbootConfigFlag : public FlagFeature {};
+
+fruit::Component<FastbootConfig>
+FastbootConfigComponent();
+fruit::Component<fruit::Required<FastbootConfig, ConfigFlag>, FastbootConfigFlag>
+FastbootConfigFlagComponent();
+fruit::Component<fruit::Required<FastbootConfig>, FastbootConfigFragment>
+FastbootConfigFragmentComponent();
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 const FastbootConfig>>
+LaunchFastbootComponent();
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/fastboot/flags.cpp b/host/libs/config/fastboot/flags.cpp
new file mode 100644
index 0000000..0247538
--- /dev/null
+++ b/host/libs/config/fastboot/flags.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/libs/config/fastboot/fastboot.h"
+
+#include "common/libs/utils/flag_parser.h"
+#include "host/libs/config/config_flag.h"
+#include "host/libs/config/feature.h"
+
+namespace cuttlefish {
+namespace {
+
+class FastbootConfigFlagImpl : public FastbootConfigFlag {
+ public:
+  INJECT(FastbootConfigFlagImpl(FastbootConfig& config, ConfigFlag& config_flag))
+      : config_(config), config_flag_(config_flag) {}
+
+  std::string Name() const override { return "FastbootConfigFlagImpl"; }
+
+  std::unordered_set<FlagFeature*> Dependencies() const override {
+    return {static_cast<FlagFeature*>(&config_flag_)};
+  }
+
+  bool Process(std::vector<std::string>& args) override {
+    bool proxy_fastboot = true;
+    Flag proxy_fastboot_flag = GflagsCompatFlag(kName, proxy_fastboot);
+    if (!ParseFlags({proxy_fastboot_flag}, args)) {
+      LOG(ERROR) << "Failed to parse proxy_fastboot config flags";
+      return false;
+    }
+    config_.SetProxyFastboot(proxy_fastboot);
+    return true;
+  }
+
+  bool WriteGflagsCompatHelpXml(std::ostream& out) const override {
+    bool proxy_fastboot = config_.ProxyFastboot();
+    Flag proxy_fastboot_flag = GflagsCompatFlag(kName, proxy_fastboot).Help(kHelp);
+    return WriteGflagsCompatXml({proxy_fastboot_flag}, out);
+  }
+
+ private:
+  static constexpr char kName[] = "proxy_fastboot";
+  static constexpr char kHelp[] = "Enstablish fastboot TCP proxy";
+
+  FastbootConfig& config_;
+  ConfigFlag& config_flag_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<FastbootConfig, ConfigFlag>, FastbootConfigFlag>
+FastbootConfigFlagComponent() {
+  return fruit::createComponent()
+      .bind<FastbootConfigFlag, FastbootConfigFlagImpl>()
+      .addMultibinding<FlagFeature, FastbootConfigFlag>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/fastboot/launch.cpp b/host/libs/config/fastboot/launch.cpp
new file mode 100644
index 0000000..d035ed3
--- /dev/null
+++ b/host/libs/config/fastboot/launch.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "host/libs/config/fastboot/fastboot.h"
+
+#include <utility>
+#include <vector>
+
+#include "common/libs/utils/result.h"
+#include "host/commands/kernel_log_monitor/utils.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
+
+namespace cuttlefish {
+namespace {
+
+class FastbootProxy : public CommandSource {
+ public:
+  INJECT(FastbootProxy(const CuttlefishConfig::InstanceSpecific& instance,
+                       const FastbootConfig& fastboot_config))
+      : instance_(instance),
+        fastboot_config_(fastboot_config) {}
+
+  Result<std::vector<MonitorCommand>> Commands() override {
+    const std::string ethernet_host = instance_.ethernet_ipv6() + "%" +
+                                      instance_.ethernet_bridge_name();
+
+    Command tunnel(SocketVsockProxyBinary());
+    tunnel.AddParameter("--server_type=", "tcp");
+    tunnel.AddParameter("--server_tcp_port=", instance_.fastboot_host_port());
+    tunnel.AddParameter("--client_type=", "tcp");
+    tunnel.AddParameter("--client_tcp_host=", ethernet_host);
+    tunnel.AddParameter("--client_tcp_port=", "5554");
+    tunnel.AddParameter("--label=", "fastboot");
+
+    std::vector<MonitorCommand> commands;
+    commands.emplace_back(std::move(tunnel));
+    return commands;
+  }
+
+  std::string Name() const override { return "FastbootProxy"; }
+  bool Enabled() const override {
+    return instance_.boot_flow() == CuttlefishConfig::InstanceSpecific::BootFlow::Android &&
+           fastboot_config_.ProxyFastboot();
+  }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override {
+    return {};
+  }
+
+  bool Setup() override {
+    return true;
+  }
+
+  const CuttlefishConfig::InstanceSpecific& instance_;
+  const FastbootConfig& fastboot_config_;
+};
+
+}  // namespace
+
+fruit::Component<fruit::Required<const CuttlefishConfig::InstanceSpecific,
+                                 const FastbootConfig>>
+LaunchFastbootComponent() {
+  return fruit::createComponent()
+      .addMultibinding<CommandSource, FastbootProxy>()
+      .addMultibinding<SetupFeature, FastbootProxy>();
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/fetcher_config.cpp b/host/libs/config/fetcher_config.cpp
index e58c378..9bad58d 100644
--- a/host/libs/config/fetcher_config.cpp
+++ b/host/libs/config/fetcher_config.cpp
@@ -52,6 +52,12 @@
     return FileSource::LOCAL_FILE;
   } else if (source == "generated") {
     return FileSource::GENERATED;
+  } else if (source == "bootloader_build") {
+    return FileSource::BOOTLOADER_BUILD;
+  } else if (source == "boot_build") {
+    return FileSource::BOOT_BUILD;
+  } else if (source == "host_package_build") {
+    return FileSource::HOST_PACKAGE_BUILD;
   } else {
     return FileSource::UNKNOWN_PURPOSE;
   }
@@ -68,6 +74,12 @@
     return "local_file";
   } else if (source == FileSource::GENERATED) {
     return "generated";
+  } else if (source == FileSource::BOOTLOADER_BUILD) {
+    return "bootloader_build";
+  } else if (source == FileSource::BOOT_BUILD) {
+    return "boot_build";
+  } else if (source == FileSource::HOST_PACKAGE_BUILD) {
+    return "host_package_build";
   } else {
     return "unknown";
   }
diff --git a/host/libs/config/fetcher_config.h b/host/libs/config/fetcher_config.h
index c15134c..397c8f6 100644
--- a/host/libs/config/fetcher_config.h
+++ b/host/libs/config/fetcher_config.h
@@ -34,6 +34,8 @@
   LOCAL_FILE,
   GENERATED,
   BOOTLOADER_BUILD,
+  BOOT_BUILD,
+  HOST_PACKAGE_BUILD,
 };
 
 /*
diff --git a/host/libs/config/host_tools_version.cpp b/host/libs/config/host_tools_version.cpp
index 17fceb7..825eeeb 100644
--- a/host/libs/config/host_tools_version.cpp
+++ b/host/libs/config/host_tools_version.cpp
@@ -45,7 +45,9 @@
   if (!DirectoryExists(full_path)) {
     return {};
   }
-  std::vector<std::string> files = DirectoryContents(full_path);
+  auto files_result = DirectoryContents(full_path);
+  CHECK(files_result.ok()) << files_result.error().Trace();
+  std::vector<std::string> files = std::move(*files_result);
   for (auto it = files.begin(); it != files.end();) {
     if (*it == "." || *it == "..") {
       it = files.erase(it);
@@ -54,6 +56,7 @@
     }
   }
   std::vector<std::future<uint32_t>> calculations;
+  calculations.reserve(files.size());
   for (auto& file : files) {
     file = path + "/" + file; // mutate in place in files vector
     calculations.emplace_back(
diff --git a/host/libs/config/inject.h b/host/libs/config/inject.h
index 4e2a6e0..10664b1 100644
--- a/host/libs/config/inject.h
+++ b/host/libs/config/inject.h
@@ -16,64 +16,18 @@
 
 #pragma once
 
-#include <fruit/fruit.h>
 #include <type_traits>
 
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+
 namespace cuttlefish {
 
-/**
- * This is a template helper to add bindings for a set of implementation
- * classes that may each be part of multiple multibindings. To be more specific,
- * for these example classes:
- *
- *   class ImplementationA : public IntX, IntY {};
- *   class ImplementationB : public IntY, IntZ {};
- *
- * can be installed with
- *
- *   using Deps = fruit::Required<...>;
- *   using Bases = Multibindings<Deps>::Bases<IntX, IntY, IntZ>;
- *   return fruit::createComponent()
- *     .install(Bases::Impls<ImplementationA, ImplementationB>);
- *
- * Note that not all implementations have to implement all interfaces. Invalid
- * combinations are filtered out at compile-time through SFINAE.
- */
-template <typename Deps>
-struct Multibindings {
-  /* SFINAE logic for an individual interface binding. The class does implement
-   * the interface, so add a multibinding. */
-  template <typename Base, typename Impl,
-            std::enable_if_t<std::is_base_of<Base, Impl>::value, bool> = true>
-  static fruit::Component<Deps> OneBaseOneImpl() {
-    return fruit::createComponent().addMultibinding<Base, Impl>();
-  }
-  /* SFINAE logic for an individual interface binding. The class does not
-   * implement the interface, so do not add a multibinding. */
-  template <typename Base, typename Impl,
-            std::enable_if_t<!std::is_base_of<Base, Impl>::value, bool> = true>
-  static fruit::Component<Deps> OneBaseOneImpl() {
-    return fruit::createComponent();
-  }
-
-  template <typename Base>
-  struct OneBase {
-    template <typename... ImplTypes>
-    static fruit::Component<Deps> Impls() {
-      return fruit::createComponent().installComponentFunctions(
-          fruit::componentFunction(OneBaseOneImpl<Base, ImplTypes>)...);
-    }
-  };
-
-  template <typename... BaseTypes>
-  struct Bases {
-    template <typename... ImplTypes>
-    static fruit::Component<Deps> Impls() {
-      return fruit::createComponent().installComponentFunctions(
-          fruit::componentFunction(
-              OneBase<BaseTypes>::template Impls<ImplTypes...>)...);
-    }
-  };
+class LateInjected {
+ public:
+  virtual ~LateInjected() = default;
+  virtual Result<void> LateInject(fruit::Injector<>& injector) = 0;
 };
 
 }  // namespace cuttlefish
diff --git a/host/libs/config/instance_nums.cpp b/host/libs/config/instance_nums.cpp
new file mode 100644
index 0000000..de641d5
--- /dev/null
+++ b/host/libs/config/instance_nums.cpp
@@ -0,0 +1,255 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/libs/config/instance_nums.h"
+
+#include <android-base/parseint.h>
+#include <android-base/strings.h>
+#include <gflags/gflags.h>
+
+#include "common/libs/utils/contains.h"
+#include "common/libs/utils/flag_parser.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+// Failed result: The flag was specified in an invalid way
+// Empty optional: The flag was not specified
+// Present optional: The flag was specified with a valid value
+static Result<std::optional<std::int32_t>> ParseBaseInstanceFlag(
+    std::vector<std::string>& flags) {
+  int value = -1;
+  auto flag = GflagsCompatFlag("base_instance_num", value);
+  CF_EXPECT(flag.Parse(flags), "Flag parsing error");
+  return value > 0 ? value : std::optional<std::int32_t>();
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty optional: The flag was not specified
+// Present optional: The flag was specified with a valid value
+static Result<std::optional<std::int32_t>> ParseNumInstancesFlag(
+    std::vector<std::string>& flags) {
+  int value = -1;
+  auto flag = GflagsCompatFlag("num_instances", value);
+  CF_EXPECT(flag.Parse(flags), "Flag parsing error");
+  return value > 0 ? value : std::optional<std::int32_t>();
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty set: The flag was not specified
+// Set with members: The flag was specified with a valid value
+static Result<std::vector<std::int32_t>> ParseInstanceNums(
+    const std::string& instance_nums_str) {
+  if (instance_nums_str == "") {
+    return {};
+  }
+  std::vector<std::int32_t> instance_nums;
+  std::vector<std::string> split_str =
+      android::base::Split(instance_nums_str, ",");
+  std::set<std::int32_t> duplication_check_set;
+  for (const auto& instance_num_str : split_str) {
+    std::int32_t instance_num;
+    CF_EXPECT(android::base::ParseInt(instance_num_str.c_str(), &instance_num),
+              "Unable to parse \"" << instance_num_str << "\" in "
+                                   << "`--instance_nums=\"" << instance_nums_str
+                                   << "\"`");
+    CF_EXPECT(!Contains(duplication_check_set, instance_num),
+              instance_num << " is duplicated in -instance_nums flag.");
+    duplication_check_set.insert(instance_num);
+    instance_nums.push_back(instance_num);
+  }
+  return instance_nums;
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty set: The flag was not specified
+// Set with members: The flag was specified with a valid value
+static Result<std::vector<std::int32_t>> ParseInstanceNumsFlag(
+    std::vector<std::string>& flags) {
+  std::string value;
+  auto flag = GflagsCompatFlag("instance_nums", value);
+  CF_EXPECT(flag.Parse(flags), "Flag parsing error");
+  if (!value.empty()) {
+    return CF_EXPECT(ParseInstanceNums(value));
+  } else {
+    return {};
+  }
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::FromFlags(
+    const std::vector<std::string>& flags) & {
+  std::vector<std::string> flags_copy = flags;
+  TrySet(base_instance_num_, ParseBaseInstanceFlag(flags_copy));
+  TrySet(num_instances_, ParseNumInstancesFlag(flags_copy));
+  TrySet(instance_nums_, ParseInstanceNumsFlag(flags_copy));
+  return *this;
+}
+
+InstanceNumsCalculator InstanceNumsCalculator::FromFlags(
+    const std::vector<std::string>& flags) && {
+  return FromFlags(flags);
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty optional: The flag was not specified
+// Present optional: The flag was specified with a valid value
+static Result<std::optional<std::int32_t>> GflagsBaseInstanceFlag() {
+  gflags::CommandLineFlagInfo info;
+  if (!gflags::GetCommandLineFlagInfo("base_instance_num", &info)) {
+    return {};
+  }
+  if (info.is_default) {
+    return {};
+  }
+  CF_EXPECT(info.type == "int32");
+  return *reinterpret_cast<const std::int32_t*>(info.flag_ptr);
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty optional: The flag was not specified
+// Present optional: The flag was specified with a valid value
+static Result<std::optional<std::int32_t>> GflagsNumInstancesFlag() {
+  gflags::CommandLineFlagInfo info;
+  if (!gflags::GetCommandLineFlagInfo("num_instances", &info)) {
+    return {};
+  }
+  if (info.is_default) {
+    return {};
+  }
+  CF_EXPECT(info.type == "int32");
+  return *reinterpret_cast<const std::int32_t*>(info.flag_ptr);
+}
+
+// Failed result: The flag was specified in an invalid way
+// Empty set: The flag was not specified
+// Set with members: The flag was specified with a valid value
+static Result<std::vector<std::int32_t>> GflagsInstanceNumsFlag() {
+  gflags::CommandLineFlagInfo info;
+  if (!gflags::GetCommandLineFlagInfo("instance_nums", &info)) {
+    return {};
+  }
+  if (info.is_default) {
+    return {};
+  }
+  CF_EXPECT(info.type == "string");
+  auto contents = *reinterpret_cast<const std::string*>(info.flag_ptr);
+  return CF_EXPECT(ParseInstanceNums(contents));
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::FromGlobalGflags() & {
+  TrySet(base_instance_num_, GflagsBaseInstanceFlag());
+  TrySet(num_instances_, GflagsNumInstancesFlag());
+  TrySet(instance_nums_, GflagsInstanceNumsFlag());
+  return *this;
+}
+
+InstanceNumsCalculator InstanceNumsCalculator::FromGlobalGflags() && {
+  return FromGlobalGflags();
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::BaseInstanceNum(
+    std::int32_t num) & {
+  base_instance_num_ = num;
+  return *this;
+}
+InstanceNumsCalculator InstanceNumsCalculator::BaseInstanceNum(
+    std::int32_t num) && {
+  return BaseInstanceNum(num);
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::NumInstances(
+    std::int32_t num) & {
+  num_instances_ = num;
+  return *this;
+}
+InstanceNumsCalculator InstanceNumsCalculator::NumInstances(
+    std::int32_t num) && {
+  return NumInstances(num);
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::InstanceNums(
+    const std::string& nums) & {
+  TrySet(instance_nums_, ParseInstanceNums(nums));
+  return *this;
+}
+InstanceNumsCalculator InstanceNumsCalculator::InstanceNums(
+    const std::string& nums) && {
+  return InstanceNums(nums);
+}
+
+InstanceNumsCalculator& InstanceNumsCalculator::InstanceNums(
+    std::vector<std::int32_t> set) & {
+  instance_nums_ = std::move(set);
+  return *this;
+}
+InstanceNumsCalculator InstanceNumsCalculator::InstanceNums(
+    std::vector<std::int32_t> set) && {
+  return InstanceNums(std::move(set));
+}
+
+template <typename T>
+void InstanceNumsCalculator::TrySet(T& field, Result<T> result) {
+  if (result.ok()) {
+    field = std::move(*result);
+  } else {
+    // TODO(schuffelen): Combine both errors into one
+    setter_result_.error() = result.error();
+  }
+}
+
+Result<std::vector<std::int32_t>> InstanceNumsCalculator::CalculateFromFlags() {
+  CF_EXPECT(Result<void>(setter_result_));
+  std::optional<std::vector<std::int32_t>> instance_nums_opt;
+  if (!instance_nums_.empty()) {
+    instance_nums_opt = instance_nums_;
+  }
+  // exactly one of these two should be given
+  CF_EXPECT(!instance_nums_opt || !base_instance_num_,
+            "At least one of --instance_nums or --base_instance_num"
+                << "should be given to call CalculateFromFlags()");
+  CF_EXPECT(instance_nums_opt || base_instance_num_,
+            "InstanceNums and BaseInstanceNum are mutually exclusive");
+
+  if (instance_nums_opt) {
+    if (num_instances_) {
+      CF_EXPECT(instance_nums_.size() == *num_instances_);
+    }
+    CF_EXPECT(instance_nums_.size() > 0, "no instance nums");
+    return instance_nums_;
+  }
+
+  std::vector<std::int32_t> instance_nums;
+  for (int i = 0; i < num_instances_.value_or(1); i++) {
+    instance_nums.push_back(i + *base_instance_num_);
+  }
+  return instance_nums;
+}
+
+Result<std::vector<std::int32_t>> InstanceNumsCalculator::Calculate() {
+  CF_EXPECT(Result<void>(setter_result_));
+
+  if (!instance_nums_.empty() || base_instance_num_) {
+    return CalculateFromFlags();
+  }
+
+  std::vector<std::int32_t> instance_nums;
+  for (int i = 0; i < num_instances_.value_or(1); i++) {
+    instance_nums.push_back(i + GetInstance());
+  }
+  CF_EXPECT(instance_nums.size() > 0, "no instance nums");
+  return instance_nums;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/instance_nums.h b/host/libs/config/instance_nums.h
new file mode 100644
index 0000000..548292d
--- /dev/null
+++ b/host/libs/config/instance_nums.h
@@ -0,0 +1,77 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <cstdint>
+#include <optional>
+#include <set>
+#include <string>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+class InstanceNumsCalculator {
+ public:
+  InstanceNumsCalculator& FromFlags(const std::vector<std::string>&) &;
+  InstanceNumsCalculator FromFlags(const std::vector<std::string>&) &&;
+
+  InstanceNumsCalculator& FromGlobalGflags() &;
+  InstanceNumsCalculator FromGlobalGflags() &&;
+
+  InstanceNumsCalculator& BaseInstanceNum(std::int32_t) &;
+  InstanceNumsCalculator BaseInstanceNum(std::int32_t) &&;
+
+  InstanceNumsCalculator& NumInstances(std::int32_t) &;
+  InstanceNumsCalculator NumInstances(std::int32_t) &&;
+
+  InstanceNumsCalculator& InstanceNums(const std::string&) &;
+  InstanceNumsCalculator InstanceNums(const std::string&) &&;
+
+  // if any element is duplicated, only the first one of them is taken.
+  //   E.g. InstanceNums({1, 2, 3, 2}) == InstanceNums({1, 2, 3})
+  // That is how the code was implemented in Android 14
+  InstanceNumsCalculator& InstanceNums(std::vector<std::int32_t>) &;
+  InstanceNumsCalculator InstanceNums(std::vector<std::int32_t>) &&;
+
+  /**
+   * Finds set of ids using the flags only.
+   *
+   * Especially, this calculates the base from --instance_nums and
+   * --base_instance_num only
+   *
+   * Processes such as cvd clients may see different user accounts,
+   * CUTTLEFISH_INSTANCE environment variable, etc, than the launcher
+   * effectively sees. This util method is still helpful for that.
+   */
+  Result<std::vector<std::int32_t>> CalculateFromFlags();
+
+  // Calculates the base from the --instance_nums, --base_instance_num,
+  // CUTTLEFISH_INSTANCE, suffix of the user account, and the default value.
+  // Then, figures out the set if ids.
+  Result<std::vector<std::int32_t>> Calculate();
+
+ private:
+  template <typename T>
+  void TrySet(T& field, Result<T> result);
+
+  Result<void> setter_result_;
+  std::optional<std::int32_t> base_instance_num_;
+  std::optional<std::int32_t> num_instances_;
+  std::vector<std::int32_t> instance_nums_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/kernel_args.cpp b/host/libs/config/kernel_args.cpp
index cbfdd5e..4046929 100644
--- a/host/libs/config/kernel_args.cpp
+++ b/host/libs/config/kernel_args.cpp
@@ -22,10 +22,8 @@
 #include <vector>
 
 #include "common/libs/utils/environment.h"
-#include "common/libs/utils/files.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/vm_manager/qemu_manager.h"
-#include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 
@@ -40,22 +38,40 @@
 
 // TODO(schuffelen): Move more of this into host/libs/vm_manager, as a
 // substitute for the vm_manager comparisons.
-std::vector<std::string> VmManagerKernelCmdline(const CuttlefishConfig& config) {
+std::vector<std::string> VmManagerKernelCmdline(
+    const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance) {
   std::vector<std::string> vm_manager_cmdline;
   if (config.vm_manager() == QemuManager::name()) {
-    vm_manager_cmdline.push_back("console=hvc0");
-    Arch target_arch = config.target_arch();
+    Arch target_arch = instance.target_arch();
     if (target_arch == Arch::Arm64 || target_arch == Arch::Arm) {
-      // To update the pl011 address:
-      // $ qemu-system-aarch64 -machine virt -cpu cortex-a57 -machine dumpdtb=virt.dtb
-      // $ dtc -O dts -o virt.dts -I dtb virt.dtb
-      // In the virt.dts file, look for a uart node
-      vm_manager_cmdline.push_back("earlycon=pl011,mmio32,0x9000000");
+      if (instance.enable_kernel_log()) {
+        vm_manager_cmdline.push_back("console=hvc0");
+
+        // To update the pl011 address:
+        // $ qemu-system-aarch64 -machine virt -cpu cortex-a57 -machine dumpdtb=virt.dtb
+        // $ dtc -O dts -o virt.dts -I dtb virt.dtb
+        // In the virt.dts file, look for a uart node
+        vm_manager_cmdline.push_back("earlycon=pl011,mmio32,0x9000000");
+      }
+    } else if (target_arch == Arch::RiscV64) {
+        vm_manager_cmdline.push_back("console=hvc0");
+
+        // To update the uart8250 address:
+        // $ qemu-system-riscv64 -machine virt -machine dumpdtb=virt.dtb
+        // $ dtc -O dts -o virt.dts -I dtb virt.dtb
+        // In the virt.dts file, look for a uart node
+        // Only 'mmio' mode works; mmio32 does not
+        vm_manager_cmdline.push_back("earlycon=uart8250,mmio,0x10000000");
     } else {
-      // To update the uart8250 address:
-      // $ qemu-system-x86_64 -kernel bzImage -serial stdio | grep ttyS0
-      // Only 'io' mode works; mmio and mmio32 do not
-      vm_manager_cmdline.push_back("earlycon=uart8250,io,0x3f8");
+      if (instance.enable_kernel_log()) {
+        vm_manager_cmdline.push_back("console=hvc0");
+
+        // To update the uart8250 address:
+        // $ qemu-system-x86_64 -kernel bzImage -serial stdio | grep ttyS0
+        // Only 'io' mode works; mmio and mmio32 do not
+        vm_manager_cmdline.push_back("earlycon=uart8250,io,0x3f8");
+      }
 
       // crosvm doesn't support ACPI PNP, but QEMU does. We need to disable
       // it on QEMU so that the ISA serial ports aren't claimed by ACPI, so
@@ -65,7 +81,8 @@
       // crosvm sets up the ramoops.xx= flags for us, but QEMU does not.
       // See external/crosvm/x86_64/src/lib.rs
       // this feature is not supported on aarch64
-      vm_manager_cmdline.push_back("ramoops.mem_address=0x100000000");
+      // check guest's /proc/iomem when you need to change mem_address or mem_size
+      vm_manager_cmdline.push_back("ramoops.mem_address=0x150000000");
       vm_manager_cmdline.push_back("ramoops.mem_size=0x200000");
       vm_manager_cmdline.push_back("ramoops.console_size=0x80000");
       vm_manager_cmdline.push_back("ramoops.record_size=0x80000");
@@ -73,9 +90,9 @@
     }
   }
 
-  if (config.console() && config.kgdb()) {
+  if (instance.console() && instance.kgdb()) {
     AppendVector(&vm_manager_cmdline, {"kgdboc_earlycon", "kgdbcon",
-                                       "kgdboc=" + config.console_dev()});
+                                       "kgdboc=" + instance.console_dev()});
   }
   return vm_manager_cmdline;
 }
@@ -83,9 +100,10 @@
 } // namespace
 
 std::vector<std::string> KernelCommandLineFromConfig(
-    const CuttlefishConfig& config) {
+    const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance) {
   std::vector<std::string> kernel_cmdline;
-  AppendVector(&kernel_cmdline, VmManagerKernelCmdline(config));
+  AppendVector(&kernel_cmdline, VmManagerKernelCmdline(config, instance));
   AppendVector(&kernel_cmdline, config.extra_kernel_cmdline());
   return kernel_cmdline;
 }
diff --git a/host/libs/config/kernel_args.h b/host/libs/config/kernel_args.h
index 59b9e8e..f01ae1e 100644
--- a/host/libs/config/kernel_args.h
+++ b/host/libs/config/kernel_args.h
@@ -24,6 +24,7 @@
 namespace cuttlefish {
 
 std::vector<std::string> KernelCommandLineFromConfig(
-    const CuttlefishConfig& config);
+    const CuttlefishConfig& config,
+    const CuttlefishConfig::InstanceSpecific& instance);
 
 } // namespace cuttlefish
diff --git a/host/libs/config/kernel_log_pipe_provider.h b/host/libs/config/kernel_log_pipe_provider.h
index c9779ea..d91de1d 100644
--- a/host/libs/config/kernel_log_pipe_provider.h
+++ b/host/libs/config/kernel_log_pipe_provider.h
@@ -28,4 +28,7 @@
   virtual SharedFD KernelLogPipe() = 0;
 };
 
+/** Parent class tag for classes that inject KernelLogPipe. */
+class KernelLogPipeConsumer {};
+
 }  // namespace cuttlefish
diff --git a/host/libs/config/known_paths.cpp b/host/libs/config/known_paths.cpp
index f45137e..cccf7de 100644
--- a/host/libs/config/known_paths.cpp
+++ b/host/libs/config/known_paths.cpp
@@ -32,6 +32,8 @@
   return HostBinaryPath("console_forwarder");
 }
 
+std::string EchoServerBinary() { return HostBinaryPath("echo_server"); }
+
 std::string GnssGrpcProxyBinary() {
   return HostBinaryPath("gnss_grpc_proxy");
 }
@@ -52,23 +54,35 @@
   return HostBinaryPath("modem_simulator");
 }
 
-std::string RootCanalBinary() {
-  return HostBinaryPath("root-canal");
+std::string NetsimdBinary() { return HostBinaryPath("netsimd"); }
+
+std::string OpenwrtControlServerBinary() {
+  return HostBinaryPath("openwrt_control_server");
 }
 
+std::string PicaBinary() { return HostBinaryPath("pica"); }
+
+std::string ProcessRestarterBinary() {
+  return HostBinaryPath("process_restarter");
+}
+
+std::string RootCanalBinary() { return HostBinaryPath("root-canal"); }
+
+
+std::string SecureEnvBinary() { return HostBinaryPath("secure_env"); }
+
 std::string SocketVsockProxyBinary() {
   return HostBinaryPath("socket_vsock_proxy");
 }
 
+std::string StopCvdBinary() { return HostBinaryPath("stop_cvd"); }
+
+std::string TcpConnectorBinary() { return HostBinaryPath("tcp_connector"); }
+
 std::string TombstoneReceiverBinary() {
   return HostBinaryPath("tombstone_receiver");
 }
 
-std::string VehicleHalGrpcServerBinary() {
-  return HostBinaryPath(
-      "android.hardware.automotive.vehicle@2.0-virtualization-grpc-server");
-}
-
 std::string WebRtcBinary() {
   return HostBinaryPath("webRTC");
 }
diff --git a/host/libs/config/known_paths.h b/host/libs/config/known_paths.h
index 8c39e61..38d07d5 100644
--- a/host/libs/config/known_paths.h
+++ b/host/libs/config/known_paths.h
@@ -22,15 +22,22 @@
 std::string AdbConnectorBinary();
 std::string ConfigServerBinary();
 std::string ConsoleForwarderBinary();
+std::string EchoServerBinary();
 std::string GnssGrpcProxyBinary();
 std::string KernelLogMonitorBinary();
 std::string LogcatReceiverBinary();
 std::string MetricsBinary();
 std::string ModemSimulatorBinary();
+std::string NetsimdBinary();
+std::string OpenwrtControlServerBinary();
+std::string PicaBinary();
+std::string ProcessRestarterBinary();
 std::string RootCanalBinary();
+std::string SecureEnvBinary();
 std::string SocketVsockProxyBinary();
+std::string StopCvdBinary();
+std::string TcpConnectorBinary();
 std::string TombstoneReceiverBinary();
-std::string VehicleHalGrpcServerBinary();
 std::string WebRtcBinary();
 std::string WebRtcSigServerBinary();
 std::string WebRtcSigServerProxyBinary();
diff --git a/host/libs/config/logging.cpp b/host/libs/config/logging.cpp
index 9cf1705..cb2af81 100644
--- a/host/libs/config/logging.cpp
+++ b/host/libs/config/logging.cpp
@@ -17,14 +17,13 @@
 
 #include <android-base/logging.h>
 
-#include "common/libs/utils/tee_logging.h"
 #include "host/libs/config/cuttlefish_config.h"
 
 using android::base::SetLogger;
 
 namespace cuttlefish {
 
-void DefaultSubprocessLogging(char* argv[]) {
+void DefaultSubprocessLogging(char* argv[], MetadataLevel stderr_level) {
   ::android::base::InitLogging(argv, android::base::StderrLogger);
 
   auto config = CuttlefishConfig::Get();
@@ -38,10 +37,10 @@
     prefix = instance.instance_name() + ": ";
   }
 
-  if (config->run_as_daemon()) {
+  if (instance.run_as_daemon()) {
     SetLogger(LogToFiles({instance.launcher_log_path()}));
   } else {
-    SetLogger(LogToStderrAndFiles({instance.launcher_log_path()}, prefix));
+    SetLogger(LogToStderrAndFiles({instance.launcher_log_path()}, prefix, stderr_level));
   }
 }
 
diff --git a/host/libs/config/logging.h b/host/libs/config/logging.h
index 5d57c6f..8472539 100644
--- a/host/libs/config/logging.h
+++ b/host/libs/config/logging.h
@@ -15,8 +15,11 @@
 
 #pragma once
 
+#include "common/libs/utils/tee_logging.h"
+
 namespace cuttlefish {
 
-void DefaultSubprocessLogging(char* argv[]);
+void DefaultSubprocessLogging(char* argv[],
+                              MetadataLevel stderr_level = MetadataLevel::ONLY_MESSAGE);
 
 } // namespace cuttlefish
diff --git a/host/libs/config/openwrt_args.cpp b/host/libs/config/openwrt_args.cpp
new file mode 100644
index 0000000..8257559
--- /dev/null
+++ b/host/libs/config/openwrt_args.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/config/openwrt_args.h"
+
+namespace cuttlefish {
+
+namespace {
+
+std::string getIpAddress(int c_class, int d_class) {
+  return "192.168." + std::to_string(c_class) + "." + std::to_string(d_class);
+}
+
+}  // namespace
+
+std::unordered_map<std::string, std::string> OpenwrtArgsFromConfig(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  std::unordered_map<std::string, std::string> openwrt_args;
+  int instance_num = cuttlefish::GetInstance();
+
+  int c_class_base = (instance_num - 1) / 64;
+  int d_class_base = (instance_num - 1) % 64 * 4;
+
+  // IP address for OpenWRT is pre-defined in init script of android-cuttlefish
+  // github repository by using tap interfaces created with the script.
+  // (github) base/debian/cuttlefish-base.cuttlefish-host-resources.init
+  // The command 'crosvm run' uses openwrt_args for passing the arguments into
+  // /proc/cmdline of OpenWRT instance.
+  // (AOSP) device/google/cuttlefish/host/commands/run_cvd/launch/open_wrt.cpp
+  // In OpenWRT instance, the script 0_default_config reads /proc/cmdline so
+  // that it can apply arguments defined here.
+  // (AOSP) external/openwrt-prebuilts/shared/uci-defaults/0_default_config
+  if (instance.use_bridged_wifi_tap()) {
+    openwrt_args["bridged_wifi_tap"] = "true";
+    openwrt_args["wan_gateway"] = getIpAddress(96, 1);
+    // TODO(seungjaeyoo) : Remove config after using DHCP server outside OpenWRT
+    // instead.
+    openwrt_args["wan_ipaddr"] = getIpAddress(96, d_class_base + 2);
+    openwrt_args["wan_broadcast"] = getIpAddress(96, d_class_base + 3);
+
+  } else {
+    openwrt_args["bridged_wifi_tap"] = "false";
+    openwrt_args["wan_gateway"] =
+        getIpAddress(94 + c_class_base, d_class_base + 1);
+    openwrt_args["wan_ipaddr"] =
+        getIpAddress(94 + c_class_base, d_class_base + 2);
+    openwrt_args["wan_broadcast"] =
+        getIpAddress(94 + c_class_base, d_class_base + 3);
+  }
+
+  return openwrt_args;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/config/openwrt_args.h b/host/libs/config/openwrt_args.h
new file mode 100644
index 0000000..84b6dd3
--- /dev/null
+++ b/host/libs/config/openwrt_args.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unordered_map>
+
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+
+std::unordered_map<std::string, std::string> OpenwrtArgsFromConfig(
+    const CuttlefishConfig::InstanceSpecific& instance);
+
+}  // namespace cuttlefish
diff --git a/host/libs/confui/Android.bp b/host/libs/confui/Android.bp
index 585684e..7270b37 100644
--- a/host/libs/confui/Android.bp
+++ b/host/libs/confui/Android.bp
@@ -36,6 +36,7 @@
         "host_renderer.cc",
         "host_server.cc",
         "host_utils.cc",
+        "host_virtual_input.cc",
         "secure_input.cc",
         "server_common.cc",
         "session.cc",
@@ -46,6 +47,7 @@
         "libcn-cbor",
         "libcuttlefish_fs",
         "libbase",
+        "libfruit",
         "libjsoncpp",
         "liblog",
         "libcrypto",
diff --git a/host/libs/confui/host_mode_ctrl.h b/host/libs/confui/host_mode_ctrl.h
index bf8b575..4a5fb51 100644
--- a/host/libs/confui/host_mode_ctrl.h
+++ b/host/libs/confui/host_mode_ctrl.h
@@ -22,6 +22,8 @@
 #include <functional>
 #include <mutex>
 
+#include <fruit/fruit.h>
+
 #include "common/libs/confui/confui.h"
 #include "host/libs/confui/host_utils.h"
 
@@ -36,7 +38,7 @@
 class HostModeCtrl {
  public:
   enum class ModeType : std::uint8_t { kAndroidMode = 55, kConfUI_Mode = 77 };
-
+  INJECT(HostModeCtrl()) : atomic_mode_(ModeType::kAndroidMode) {}
   /**
    * The thread that enqueues Android frames will call this to wait until
    * the mode is kAndroidMode
@@ -109,7 +111,6 @@
   }
 
  private:
-  HostModeCtrl() : atomic_mode_(ModeType::kAndroidMode) {}
   std::mutex mode_mtx_;
   std::condition_variable and_mode_cv_;
   std::condition_variable confui_mode_cv_;
diff --git a/host/libs/confui/host_renderer.cc b/host/libs/confui/host_renderer.cc
index 8bf218b..fe89a45 100644
--- a/host/libs/confui/host_renderer.cc
+++ b/host/libs/confui/host_renderer.cc
@@ -33,21 +33,158 @@
   return result << shift;
 }
 
-std::unique_ptr<ConfUiRenderer> ConfUiRenderer::GenerateRenderer(
-    const std::uint32_t display, const std::string& confirmation_msg,
-    const std::string& locale, const bool inverted, const bool magnified) {
-  ConfUiRenderer* raw_ptr = new ConfUiRenderer(display, confirmation_msg,
-                                               locale, inverted, magnified);
-  if (raw_ptr && raw_ptr->IsSetUpSuccessful()) {
-    return std::unique_ptr<ConfUiRenderer>(raw_ptr);
+/**
+ * create a raw frame for confirmation UI dialog
+ *
+ * Many rendering code borrowed from the following source
+ *  https://android.googlesource.com/trusty/app/confirmationui/+/0429cc7/src
+ */
+class ConfUiRendererImpl {
+  friend class ConfUiRenderer;
+
+ public:
+  using LabelConfMsg = teeui::LabelBody;
+
+ private:
+  static Result<std::unique_ptr<ConfUiRendererImpl>> GenerateRenderer(
+      const std::uint32_t display, const std::string& confirmation_msg,
+      const std::string& locale, const bool inverted, const bool magnified);
+
+  /**
+   * this does not repaint from the scratch all the time
+   *
+   * It does repaint its frame buffer only when w/h of
+   * current display has changed
+   */
+  std::unique_ptr<TeeUiFrameWrapper>& RenderRawFrame();
+
+  bool IsFrameReady() const { return raw_frame_ && !raw_frame_->IsEmpty(); }
+
+  bool IsInConfirm(const std::uint32_t x, const std::uint32_t y) {
+    return IsInside<teeui::LabelOK>(x, y);
   }
-  return nullptr;
+  bool IsInCancel(const std::uint32_t x, const std::uint32_t y) {
+    return IsInside<teeui::LabelCancel>(x, y);
+  }
+
+  bool IsSetUpSuccessful() const { return is_setup_well_; }
+  ConfUiRendererImpl(const std::uint32_t display,
+                     const std::string& confirmation_msg,
+                     const std::string& locale, const bool inverted,
+                     const bool magnified);
+
+  struct Boundary {            // inclusive but.. LayoutElement's size is float
+    std::uint32_t x, y, w, h;  // (x, y) is the top left
+  };
+
+  template <typename LayoutElement>
+  Boundary GetBoundary(LayoutElement&& e) const {
+    auto box = e.bounds_;
+    Boundary b;
+    // (x,y) is left top. so floor() makes sense
+    // w, h are witdh and height in float. perhaps ceiling makes more
+    // sense
+    b.x = static_cast<std::uint32_t>(box.x().floor().count());
+    b.y = static_cast<std::uint32_t>(box.y().floor().count());
+    b.w = static_cast<std::uint32_t>(box.w().ceil().count());
+    b.h = static_cast<std::uint32_t>(box.h().ceil().count());
+    return b;
+  }
+
+  template <typename Element>
+  bool IsInside(const std::uint32_t x, const std::uint32_t y) const {
+    auto box = GetBoundary(std::get<Element>(layout_));
+    if (x >= box.x && x <= box.x + box.w && y >= box.y && y <= box.y + box.h) {
+      return true;
+    }
+    return false;
+  }
+  // essentially, to repaint from the scratch, so returns new frame
+  // when successful. Or, nullopt
+  std::unique_ptr<TeeUiFrameWrapper> RepaintRawFrame(const int w, const int h);
+
+  bool InitLayout(const std::string& lang_id);
+  teeui::Error UpdateTranslations();
+  teeui::Error UpdateLocale();
+  void SetDeviceContext(const unsigned long long w, const unsigned long long h,
+                        bool is_inverted, bool is_magnified);
+
+  // a callback function to be effectively sent to TeeUI library
+  teeui::Error UpdatePixels(TeeUiFrameWrapper& buffer, std::uint32_t x,
+                            std::uint32_t y, teeui::Color color);
+
+  // second param is for type deduction
+  template <typename... Elements>
+  static teeui::Error drawElements(std::tuple<Elements...>& layout,
+                                   const teeui::PixelDrawer& drawPixel) {
+    // Error::operator|| is overloaded, so we don't get short circuit
+    // evaluation. But we get the first error that occurs. We will still try and
+    // draw the remaining elements in the order they appear in the layout tuple.
+    return (std::get<Elements>(layout).draw(drawPixel) || ...);
+  }
+  void UpdateColorScheme(const bool is_inverted);
+  template <typename Label>
+  auto SetText(const std::string& text) {
+    return std::get<Label>(layout_).setText(
+        {text.c_str(), text.c_str() + text.size()});
+  }
+
+  template <typename Label>
+  teeui::Error UpdateString();
+
+  std::uint32_t display_num_;
+  teeui::layout_t<teeui::ConfUILayout> layout_;
+  std::string lang_id_;
+  std::string prompt_text_;  // confirmation ui message
+
+  /**
+   * Potentially, the same frame could be requested multiple times.
+   *
+   * While another thread/caller is using this frame, the frame should
+   * be kept here, too, to be returned upon future requests.
+   *
+   */
+  std::unique_ptr<TeeUiFrameWrapper> raw_frame_;
+  std::uint32_t current_height_;
+  std::uint32_t current_width_;
+  teeui::Color color_bg_;
+  teeui::Color color_text_;
+  teeui::Color shield_color_;
+  bool is_inverted_;
+  bool is_magnified_;
+  teeui::context<teeui::ConfUIParameters> ctx_;
+  bool is_setup_well_;
+
+  static constexpr const teeui::Color kColorBackground = 0xffffffff;
+  static constexpr const teeui::Color kColorBackgroundInv = 0xff212121;
+  static constexpr const teeui::Color kColorDisabled = 0xffbdbdbd;
+  static constexpr const teeui::Color kColorDisabledInv = 0xff424242;
+  static constexpr const teeui::Color kColorEnabled = 0xff212121;
+  static constexpr const teeui::Color kColorEnabledInv = 0xffdedede;
+  static constexpr const teeui::Color kColorShield = 0xff778500;
+  static constexpr const teeui::Color kColorShieldInv = 0xffc4cb80;
+  static constexpr const teeui::Color kColorText = 0xff212121;
+  static constexpr const teeui::Color kColorTextInv = 0xffdedede;
+};
+
+Result<std::unique_ptr<ConfUiRendererImpl>>
+ConfUiRendererImpl::GenerateRenderer(const std::uint32_t display,
+                                     const std::string& confirmation_msg,
+                                     const std::string& locale,
+                                     const bool inverted,
+                                     const bool magnified) {
+  ConfUiRendererImpl* raw_ptr = new ConfUiRendererImpl(
+      display, confirmation_msg, locale, inverted, magnified);
+  CF_EXPECT(raw_ptr && raw_ptr->IsSetUpSuccessful(),
+            "Failed to create ConfUiRendererImpl");
+  return std::unique_ptr<ConfUiRendererImpl>(raw_ptr);
 }
 
 static int GetDpi(const int display_num = 0) {
   auto config = CuttlefishConfig::Get();
   CHECK(config) << "Config is Missing";
-  auto display_configs = config->display_configs();
+  auto instance = config->ForDefaultInstance();
+  auto display_configs = instance.display_configs();
   CHECK_GT(display_configs.size(), display_num)
       << "Invalid display number " << display_num;
   return display_configs[display_num].dpi;
@@ -68,10 +205,11 @@
  * proportionally
  *
  */
-ConfUiRenderer::ConfUiRenderer(const std::uint32_t display,
-                               const std::string& confirmation_msg,
-                               const std::string& locale, const bool inverted,
-                               const bool magnified)
+ConfUiRendererImpl::ConfUiRendererImpl(const std::uint32_t display,
+                                       const std::string& confirmation_msg,
+                                       const std::string& locale,
+                                       const bool inverted,
+                                       const bool magnified)
     : display_num_{display},
       lang_id_{locale},
       prompt_text_{confirmation_msg},
@@ -95,7 +233,7 @@
   is_setup_well_ = true;
 }
 
-teeui::Error ConfUiRenderer::UpdateLocale() {
+teeui::Error ConfUiRendererImpl::UpdateLocale() {
   using teeui::Error;
   teeui::localization::selectLangId(lang_id_.c_str());
   if (auto error = UpdateTranslations()) {
@@ -105,7 +243,7 @@
 }
 
 template <typename Label>
-teeui::Error ConfUiRenderer::UpdateString() {
+teeui::Error ConfUiRendererImpl::UpdateString() {
   using namespace teeui;
   const char* str;
   auto& label = std::get<Label>(layout_);
@@ -118,7 +256,7 @@
   return Error::OK;
 }
 
-teeui::Error ConfUiRenderer::UpdateTranslations() {
+teeui::Error ConfUiRendererImpl::UpdateTranslations() {
   using namespace teeui;
   if (auto error = UpdateString<LabelOK>()) {
     return error;
@@ -135,10 +273,10 @@
   return Error::OK;
 }
 
-void ConfUiRenderer::SetDeviceContext(const unsigned long long w,
-                                      const unsigned long long h,
-                                      const bool is_inverted,
-                                      const bool is_magnified) {
+void ConfUiRendererImpl::SetDeviceContext(const unsigned long long w,
+                                          const unsigned long long h,
+                                          const bool is_inverted,
+                                          const bool is_magnified) {
   using namespace teeui;
   const auto screen_width = operator""_px(w);
   const auto screen_height = operator""_px(h);
@@ -164,9 +302,9 @@
   }
 }
 
-teeui::Error ConfUiRenderer::UpdatePixels(TeeUiFrameWrapper& raw_frame,
-                                          std::uint32_t x, std::uint32_t y,
-                                          teeui::Color color) {
+teeui::Error ConfUiRendererImpl::UpdatePixels(TeeUiFrameWrapper& raw_frame,
+                                              std::uint32_t x, std::uint32_t y,
+                                              teeui::Color color) {
   auto buffer = raw_frame.data();
   const auto height = raw_frame.Height();
   const auto width = raw_frame.Width();
@@ -183,7 +321,7 @@
   return teeui::Error::OK;
 }
 
-void ConfUiRenderer::UpdateColorScheme(const bool is_inverted) {
+void ConfUiRendererImpl::UpdateColorScheme(const bool is_inverted) {
   using namespace teeui;
   color_text_ = is_inverted ? kColorDisabledInv : kColorDisabled;
   shield_color_ = is_inverted ? kColorShieldInv : kColorShield;
@@ -195,7 +333,7 @@
   return;
 }
 
-std::shared_ptr<TeeUiFrameWrapper> ConfUiRenderer::RenderRawFrame() {
+std::unique_ptr<TeeUiFrameWrapper>& ConfUiRendererImpl::RenderRawFrame() {
   /* we repaint only if one or more of the followng meet:
    *
    *  1. raw_frame_ is empty
@@ -209,7 +347,7 @@
     if (!new_frame) {
       // must repaint but failed
       raw_frame_ = nullptr;
-      return nullptr;
+      return raw_frame_;
     }
     // repainting from the scratch successful in a new frame
     raw_frame_ = std::move(new_frame);
@@ -219,7 +357,7 @@
   return raw_frame_;
 }
 
-std::unique_ptr<TeeUiFrameWrapper> ConfUiRenderer::RepaintRawFrame(
+std::unique_ptr<TeeUiFrameWrapper> ConfUiRendererImpl::RepaintRawFrame(
     const int w, const int h) {
   std::get<teeui::LabelOK>(layout_).setTextColor(kColorEnabled);
   std::get<teeui::LabelCancel>(layout_).setTextColor(kColorEnabled);
@@ -248,5 +386,52 @@
   return new_raw_frame;
 }
 
+ConfUiRenderer::ConfUiRenderer(ScreenConnectorFrameRenderer& screen_connector)
+    : screen_connector_{screen_connector} {}
+
+ConfUiRenderer::~ConfUiRenderer() {}
+
+Result<void> ConfUiRenderer::RenderDialog(
+    const std::uint32_t display_num, const std::string& prompt_text,
+    const std::string& locale, const std::vector<teeui::UIOption>& ui_options) {
+  renderer_impl_ = CF_EXPECT(ConfUiRendererImpl::GenerateRenderer(
+      display_num, prompt_text, locale, IsInverted(ui_options),
+      IsMagnified(ui_options)));
+  auto& teeui_frame = renderer_impl_->RenderRawFrame();
+  CF_EXPECT(teeui_frame != nullptr, "RenderRawFrame() failed.");
+  ConfUiLog(VERBOSE) << "actually trying to render the frame"
+                     << thread::GetName();
+  auto frame_width = teeui_frame->Width();
+  auto frame_height = teeui_frame->Height();
+  auto frame_stride_bytes = teeui_frame->ScreenStrideBytes();
+  auto frame_bytes = reinterpret_cast<std::uint8_t*>(teeui_frame->data());
+  CF_EXPECT(screen_connector_.RenderConfirmationUi(
+      display_num, frame_width, frame_height, frame_stride_bytes, frame_bytes));
+  return {};
+}
+
+bool ConfUiRenderer::IsInverted(
+    const std::vector<teeui::UIOption>& ui_options) const {
+  return Contains(ui_options, teeui::UIOption::AccessibilityInverted);
+}
+
+bool ConfUiRenderer::IsMagnified(
+    const std::vector<teeui::UIOption>& ui_options) const {
+  return Contains(ui_options, teeui::UIOption::AccessibilityMagnified);
+}
+
+bool ConfUiRenderer::IsInConfirm(const std::uint32_t x, const std::uint32_t y) {
+  if (!renderer_impl_) {
+    ConfUiLog(INFO) << "renderer_impl_ is nullptr";
+  }
+  return renderer_impl_ && renderer_impl_->IsInConfirm(x, y);
+}
+bool ConfUiRenderer::IsInCancel(const std::uint32_t x, const std::uint32_t y) {
+  if (!renderer_impl_) {
+    ConfUiLog(INFO) << "renderer_impl_ is nullptr";
+  }
+  return renderer_impl_ && renderer_impl_->IsInCancel(x, y);
+}
+
 }  // end of namespace confui
 }  // end of namespace cuttlefish
diff --git a/host/libs/confui/host_renderer.h b/host/libs/confui/host_renderer.h
index 6dac3e3..d356933 100644
--- a/host/libs/confui/host_renderer.h
+++ b/host/libs/confui/host_renderer.h
@@ -23,13 +23,16 @@
 #include <tuple>
 #include <vector>
 
+#include <android-base/logging.h>
 #include <freetype/ftglyph.h>  // $(croot)/external/freetype
+#include <fruit/fruit.h>
 #include <teeui/utils.h>       // $(croot)/system/teeui/libteeui/.../include
 
 #include "common/libs/confui/confui.h"
+#include "common/libs/utils/result.h"
 #include "host/libs/confui/layouts/layout.h"
 #include "host/libs/confui/server_common.h"
-#include "host/libs/screen_connector/screen_connector_common.h"
+#include "host/libs/screen_connector/screen_connector.h"
 
 namespace cuttlefish {
 namespace confui {
@@ -58,135 +61,24 @@
   TeeUiFrame teeui_frame_;
 };
 
-/**
- * create a raw frame for confirmation UI dialog
- *
- * Many rendering code borrowed from the following source
- *  https://android.googlesource.com/trusty/app/confirmationui/+/0429cc7/src
- */
+class ConfUiRendererImpl;
 class ConfUiRenderer {
  public:
-  using LabelConfMsg = teeui::LabelBody;
-
-  static std::unique_ptr<ConfUiRenderer> GenerateRenderer(
-      const std::uint32_t display, const std::string& confirmation_msg,
-      const std::string& locale, const bool inverted, const bool magnified);
-
-  /**
-   * this does not repaint from the scratch all the time
-   *
-   * It does repaint its frame buffer only when w/h of
-   * current display has changed
-   */
-  std::shared_ptr<TeeUiFrameWrapper> RenderRawFrame();
-
-  bool IsFrameReady() const { return raw_frame_ && !raw_frame_->IsEmpty(); }
-
-  bool IsInConfirm(const std::uint32_t x, const std::uint32_t y) {
-    return IsInside<teeui::LabelOK>(x, y);
-  }
-  bool IsInCancel(const std::uint32_t x, const std::uint32_t y) {
-    return IsInside<teeui::LabelCancel>(x, y);
-  }
+  INJECT(ConfUiRenderer(ScreenConnectorFrameRenderer& screen_connector));
+  ~ConfUiRenderer();
+  Result<void> RenderDialog(const std::uint32_t display_num,
+                            const std::string& prompt_text,
+                            const std::string& locale,
+                            const std::vector<teeui::UIOption>& ui_options);
+  bool IsInConfirm(const std::uint32_t x, const std::uint32_t y);
+  bool IsInCancel(const std::uint32_t x, const std::uint32_t y);
 
  private:
-  bool IsSetUpSuccessful() const { return is_setup_well_; }
-  ConfUiRenderer(const std::uint32_t display,
-                 const std::string& confirmation_msg, const std::string& locale,
-                 const bool inverted, const bool magnified);
-
-  struct Boundary {            // inclusive but.. LayoutElement's size is float
-    std::uint32_t x, y, w, h;  // (x, y) is the top left
-  };
-
-  template <typename LayoutElement>
-  Boundary GetBoundary(LayoutElement&& e) const {
-    auto box = e.bounds_;
-    Boundary b;
-    // (x,y) is left top. so floor() makes sense
-    // w, h are witdh and height in float. perhaps ceiling makes more
-    // sense
-    b.x = static_cast<std::uint32_t>(box.x().floor().count());
-    b.y = static_cast<std::uint32_t>(box.y().floor().count());
-    b.w = static_cast<std::uint32_t>(box.w().ceil().count());
-    b.h = static_cast<std::uint32_t>(box.h().ceil().count());
-    return b;
-  }
-
-  template <typename Element>
-  bool IsInside(const std::uint32_t x, const std::uint32_t y) const {
-    auto box = GetBoundary(std::get<Element>(layout_));
-    if (x >= box.x && x <= box.x + box.w && y >= box.y && y <= box.y + box.h) {
-      return true;
-    }
-    return false;
-  }
-  // essentially, to repaint from the scratch, so returns new frame
-  // when successful. Or, nullopt
-  std::unique_ptr<TeeUiFrameWrapper> RepaintRawFrame(const int w, const int h);
-
-  bool InitLayout(const std::string& lang_id);
-  teeui::Error UpdateTranslations();
-  teeui::Error UpdateLocale();
-  void SetDeviceContext(const unsigned long long w, const unsigned long long h,
-                        bool is_inverted, bool is_magnified);
-
-  // a callback function to be effectively sent to TeeUI library
-  teeui::Error UpdatePixels(TeeUiFrameWrapper& buffer, std::uint32_t x,
-                            std::uint32_t y, teeui::Color color);
-
-  // second param is for type deduction
-  template <typename... Elements>
-  static teeui::Error drawElements(std::tuple<Elements...>& layout,
-                                   const teeui::PixelDrawer& drawPixel) {
-    // Error::operator|| is overloaded, so we don't get short circuit
-    // evaluation. But we get the first error that occurs. We will still try and
-    // draw the remaining elements in the order they appear in the layout tuple.
-    return (std::get<Elements>(layout).draw(drawPixel) || ...);
-  }
-  void UpdateColorScheme(const bool is_inverted);
-  template <typename Label>
-  auto SetText(const std::string& text) {
-    return std::get<Label>(layout_).setText(
-        {text.c_str(), text.c_str() + text.size()});
-  }
-
-  template <typename Label>
-  teeui::Error UpdateString();
-
-  std::uint32_t display_num_;
-  teeui::layout_t<teeui::ConfUILayout> layout_;
-  std::string lang_id_;
-  std::string prompt_text_;  // confirmation ui message
-
-  /**
-   * Potentially, the same frame could be requested multiple times.
-   *
-   * While another thread/caller is using this frame, the frame should
-   * be kept here, too, to be returned upon future requests.
-   *
-   */
-  std::shared_ptr<TeeUiFrameWrapper> raw_frame_;
-  std::uint32_t current_height_;
-  std::uint32_t current_width_;
-  teeui::Color color_bg_;
-  teeui::Color color_text_;
-  teeui::Color shield_color_;
-  bool is_inverted_;
-  bool is_magnified_;
-  teeui::context<teeui::ConfUIParameters> ctx_;
-  bool is_setup_well_;
-
-  static constexpr const teeui::Color kColorBackground = 0xffffffff;
-  static constexpr const teeui::Color kColorBackgroundInv = 0xff212121;
-  static constexpr const teeui::Color kColorDisabled = 0xffbdbdbd;
-  static constexpr const teeui::Color kColorDisabledInv = 0xff424242;
-  static constexpr const teeui::Color kColorEnabled = 0xff212121;
-  static constexpr const teeui::Color kColorEnabledInv = 0xffdedede;
-  static constexpr const teeui::Color kColorShield = 0xff778500;
-  static constexpr const teeui::Color kColorShieldInv = 0xffc4cb80;
-  static constexpr const teeui::Color kColorText = 0xff212121;
-  static constexpr const teeui::Color kColorTextInv = 0xffdedede;
+  bool IsInverted(const std::vector<teeui::UIOption>& ui_options) const;
+  bool IsMagnified(const std::vector<teeui::UIOption>& ui_options) const;
+  ScreenConnectorFrameRenderer& screen_connector_;
+  std::unique_ptr<ConfUiRendererImpl> renderer_impl_;
 };
+
 }  // end of namespace confui
 }  // end of namespace cuttlefish
diff --git a/host/libs/confui/host_server.cc b/host/libs/confui/host_server.cc
index b510759..68bf3aa 100644
--- a/host/libs/confui/host_server.cc
+++ b/host/libs/confui/host_server.cc
@@ -16,8 +16,8 @@
 
 #include "host/libs/confui/host_server.h"
 
-#include <chrono>
 #include <functional>
+#include <memory>
 #include <optional>
 #include <tuple>
 
@@ -29,15 +29,16 @@
 
 namespace cuttlefish {
 namespace confui {
-static auto CuttlefishConfigDefaultInstance() {
-  auto config = cuttlefish::CuttlefishConfig::Get();
-  CHECK(config) << "Config must not be null";
-  return config->ForDefaultInstance();
+namespace {
+
+template <typename Derived, typename Base>
+std::unique_ptr<Derived> DowncastTo(std::unique_ptr<Base>&& base) {
+  Base* tmp = base.release();
+  Derived* derived = static_cast<Derived*>(tmp);
+  return std::unique_ptr<Derived>(derived);
 }
 
-static int HalHostVsockPort() {
-  return CuttlefishConfigDefaultInstance().confui_host_vsock_port();
-}
+}  // namespace
 
 /**
  * null if not user/touch, or wrap it and ConfUiSecure{Selection,Touch}Message
@@ -45,39 +46,30 @@
  * ConfUiMessage must NOT ConfUiSecure{Selection,Touch}Message types
  */
 static std::unique_ptr<ConfUiMessage> WrapWithSecureFlag(
-    const ConfUiMessage& base_msg, const bool secure) {
-  switch (base_msg.GetType()) {
+    std::unique_ptr<ConfUiMessage>&& base_msg, const bool secure) {
+  switch (base_msg->GetType()) {
     case ConfUiCmd::kUserInputEvent: {
-      const ConfUiUserSelectionMessage& as_selection =
-          static_cast<const ConfUiUserSelectionMessage&>(base_msg);
-      return ToSecureSelectionMessage(as_selection, secure);
+      auto as_selection =
+          DowncastTo<ConfUiUserSelectionMessage>(std::move(base_msg));
+      return ToSecureSelectionMessage(std::move(as_selection), secure);
     }
     case ConfUiCmd::kUserTouchEvent: {
-      const ConfUiUserTouchMessage& as_touch =
-          static_cast<const ConfUiUserTouchMessage&>(base_msg);
-      return ToSecureTouchMessage(as_touch, secure);
+      auto as_touch = DowncastTo<ConfUiUserTouchMessage>(std::move(base_msg));
+      return ToSecureTouchMessage(std::move(as_touch), secure);
     }
     default:
       return nullptr;
   }
 }
 
-HostServer& HostServer::Get(
-    HostModeCtrl& host_mode_ctrl,
-    cuttlefish::ScreenConnectorFrameRenderer& screen_connector) {
-  static HostServer host_server{host_mode_ctrl, screen_connector};
-  return host_server;
-}
-
-HostServer::HostServer(
-    cuttlefish::HostModeCtrl& host_mode_ctrl,
-    cuttlefish::ScreenConnectorFrameRenderer& screen_connector)
+HostServer::HostServer(HostModeCtrl& host_mode_ctrl,
+                       ConfUiRenderer& host_renderer,
+                       const PipeConnectionPair& fd_pair)
     : display_num_(0),
+      host_renderer_{host_renderer},
       host_mode_ctrl_(host_mode_ctrl),
-      screen_connector_{screen_connector},
-      hal_vsock_port_(HalHostVsockPort()) {
-  ConfUiLog(DEBUG) << "Confirmation UI Host session is listening on: "
-                   << hal_vsock_port_;
+      from_guest_fifo_fd_(fd_pair.from_guest_),
+      to_guest_fifo_fd_(fd_pair.to_guest_) {
   const size_t max_elements = 20;
   auto ignore_new =
       [](ThreadSafeQueue<std::unique_ptr<ConfUiMessage>>::QueueImpl*) {
@@ -90,12 +82,18 @@
       HostServer::Multiplexer::CreateQueue(max_elements, ignore_new));
 }
 
+bool HostServer::IsVirtioConsoleOpen() const {
+  return from_guest_fifo_fd_->IsOpen() && to_guest_fifo_fd_->IsOpen();
+}
+
+bool HostServer::CheckVirtioConsole() {
+  if (IsVirtioConsoleOpen()) return true;
+  ConfUiLog(FATAL) << "Virtio console is not open";
+  return false;
+}
+
 void HostServer::Start() {
-  guest_hal_socket_ =
-      cuttlefish::SharedFD::VsockServer(hal_vsock_port_, SOCK_STREAM);
-  if (!guest_hal_socket_->IsOpen()) {
-    ConfUiLog(FATAL) << "Confirmation UI host service mandates a server socket"
-                     << "to which the guest HAL to connect.";
+  if (!CheckVirtioConsole()) {
     return;
   }
   auto hal_cmd_fetching = [this]() { this->HalCmdFetcherLoop(); };
@@ -103,24 +101,20 @@
   hal_input_fetcher_thread_ =
       thread::RunThread("HalInputLoop", hal_cmd_fetching);
   main_loop_thread_ = thread::RunThread("MainLoop", main);
-  ConfUiLog(DEBUG) << "configured internal vsock based input.";
+  ConfUiLog(DEBUG) << "host service started.";
   return;
 }
 
 void HostServer::HalCmdFetcherLoop() {
   while (true) {
-    if (!hal_cli_socket_->IsOpen()) {
-      ConfUiLog(DEBUG) << "client is disconnected";
-      std::unique_lock<std::mutex> lk(socket_flag_mtx_);
-      hal_cli_socket_ = EstablishHalConnection();
-      is_socket_ok_ = true;
-      continue;
+    if (!CheckVirtioConsole()) {
+      return;
     }
-    auto msg = RecvConfUiMsg(hal_cli_socket_);
+    auto msg = RecvConfUiMsg(from_guest_fifo_fd_);
     if (!msg) {
       ConfUiLog(ERROR) << "Error in RecvConfUiMsg from HAL";
-      hal_cli_socket_->Close();
-      is_socket_ok_ = false;
+      // TODO(kwstephenkim): error handling
+      // either file is not open, or ill-formatted message
       continue;
     }
     /*
@@ -130,7 +124,8 @@
      * always guaranteed to be picked up reasonably soon.
      */
     constexpr bool is_secure = false;
-    auto to_override_if_user_input = WrapWithSecureFlag(*msg, is_secure);
+    auto to_override_if_user_input =
+        WrapWithSecureFlag(std::move(msg), is_secure);
     if (to_override_if_user_input) {
       msg = std::move(to_override_if_user_input);
     }
@@ -138,6 +133,12 @@
   }
 }
 
+/**
+ * Send inputs generated not by auto-tester but by the human users
+ *
+ * Send such inputs into the command queue consumed by the state machine
+ * in the main loop/current session.
+ */
 void HostServer::SendUserSelection(std::unique_ptr<ConfUiMessage>& input) {
   if (!curr_session_ ||
       curr_session_->GetState() != MainLoopState::kInSession ||
@@ -146,7 +147,7 @@
     return;
   }
   constexpr bool is_secure = true;
-  auto secure_input = WrapWithSecureFlag(*input, is_secure);
+  auto secure_input = WrapWithSecureFlag(std::move(input), is_secure);
   input_multiplexer_.Push(user_input_evt_q_id_, std::move(secure_input));
 }
 
@@ -156,9 +157,7 @@
   }
   std::unique_ptr<ConfUiMessage> input =
       std::make_unique<ConfUiUserTouchMessage>(GetCurrentSessionId(), x, y);
-  constexpr bool is_secure = true;
-  auto secure_input = WrapWithSecureFlag(*input, is_secure);
-  SendUserSelection(secure_input);
+  SendUserSelection(input);
 }
 
 void HostServer::UserAbortEvent() {
@@ -168,29 +167,7 @@
   std::unique_ptr<ConfUiMessage> input =
       std::make_unique<ConfUiUserSelectionMessage>(GetCurrentSessionId(),
                                                    UserResponse::kUserAbort);
-  constexpr bool is_secure = true;
-  auto secure_input = WrapWithSecureFlag(*input, is_secure);
-  SendUserSelection(secure_input);
-}
-
-bool HostServer::IsConfUiActive() {
-  if (!curr_session_) {
-    return false;
-  }
-  return curr_session_->IsConfUiActive();
-}
-
-SharedFD HostServer::EstablishHalConnection() {
-  using namespace std::chrono_literals;
-  while (true) {
-    ConfUiLog(VERBOSE) << "Waiting hal accepting";
-    auto new_cli = SharedFD::Accept(*guest_hal_socket_);
-    ConfUiLog(VERBOSE) << "hal client accepted";
-    if (new_cli->IsOpen()) {
-      return new_cli;
-    }
-    std::this_thread::sleep_for(500ms);
-  }
+  SendUserSelection(input);
 }
 
 // read the comments in the header file
@@ -227,6 +204,9 @@
       auto [x, y] = touch_event.GetLocation();
       const bool is_confirm = curr_session_->IsConfirm(x, y);
       const bool is_cancel = curr_session_->IsCancel(x, y);
+      ConfUiLog(INFO) << "Touch at [" << x << ", " << y << "] was "
+                      << (is_cancel ? "CANCEL"
+                                    : (is_confirm ? "CONFIRM" : "INVALID"));
       if (!is_confirm && !is_cancel) {
         // ignore, take the next input
         continue;
@@ -235,7 +215,8 @@
           std::make_unique<ConfUiUserSelectionMessage>(
               GetCurrentSessionId(),
               (is_confirm ? UserResponse::kConfirm : UserResponse::kCancel));
-      input_ptr = WrapWithSecureFlag(*tmp_input_ptr, touch_event.IsSecure());
+      input_ptr =
+          WrapWithSecureFlag(std::move(tmp_input_ptr), touch_event.IsSecure());
     }
     Transition(input_ptr);
 
@@ -249,8 +230,8 @@
 }
 
 std::shared_ptr<Session> HostServer::CreateSession(const std::string& name) {
-  return std::make_shared<Session>(name, display_num_, host_mode_ctrl_,
-                                   screen_connector_);
+  return std::make_shared<Session>(name, display_num_, host_renderer_,
+                                   host_mode_ctrl_);
 }
 
 static bool IsUserAbort(ConfUiMessage& msg) {
@@ -270,7 +251,7 @@
   FsmInput fsm_input = ToFsmInput(input);
   ConfUiLog(VERBOSE) << "Handling " << ToString(cmd);
   if (IsUserAbort(input)) {
-    curr_session_->UserAbort(hal_cli_socket_);
+    curr_session_->UserAbort(to_guest_fifo_fd_);
     return;
   }
 
@@ -278,7 +259,7 @@
     curr_session_->Abort();
     return;
   }
-  curr_session_->Transition(hal_cli_socket_, fsm_input, input);
+  curr_session_->Transition(to_guest_fifo_fd_, fsm_input, input);
 }
 
 }  // end of namespace confui
diff --git a/host/libs/confui/host_server.h b/host/libs/confui/host_server.h
index 904af33..8affafd 100644
--- a/host/libs/confui/host_server.h
+++ b/host/libs/confui/host_server.h
@@ -26,6 +26,7 @@
 #include <vector>
 
 #include <android-base/logging.h>
+#include <fruit/fruit.h>
 #include <teeui/utils.h>
 
 #include "common/libs/concurrency/multiplexer.h"
@@ -35,31 +36,30 @@
 #include "host/commands/kernel_log_monitor/utils.h"
 #include "host/libs/config/logging.h"
 #include "host/libs/confui/host_mode_ctrl.h"
-#include "host/libs/confui/host_virtual_input.h"
+#include "host/libs/confui/host_renderer.h"
 #include "host/libs/confui/server_common.h"
 #include "host/libs/confui/session.h"
-#include "host/libs/screen_connector/screen_connector.h"
 
 namespace cuttlefish {
 namespace confui {
-class HostServer : public HostVirtualInput {
+struct PipeConnectionPair {
+  SharedFD from_guest_;
+  SharedFD to_guest_;
+};
+
+class HostServer {
  public:
-  static HostServer& Get(
-      HostModeCtrl& host_mode_ctrl,
-      cuttlefish::ScreenConnectorFrameRenderer& screen_connector);
+  INJECT(HostServer(HostModeCtrl& host_mode_ctrl, ConfUiRenderer& host_renderer,
+                    const PipeConnectionPair& fd_pair));
 
   void Start();  // start this server itself
   virtual ~HostServer() {}
 
   // implement input interfaces. called by webRTC
-  void TouchEvent(const int x, const int y, const bool is_down) override;
-  void UserAbortEvent() override;
-  bool IsConfUiActive() override;
+  void TouchEvent(const int x, const int y, const bool is_down);
+  void UserAbortEvent();
 
  private:
-  explicit HostServer(
-      cuttlefish::HostModeCtrl& host_mode_ctrl,
-      cuttlefish::ScreenConnectorFrameRenderer& screen_connector);
   HostServer() = delete;
 
   /**
@@ -112,8 +112,9 @@
   [[noreturn]] void MainLoop();
   void HalCmdFetcherLoop();
 
-  SharedFD EstablishHalConnection();
-
+  bool IsVirtioConsoleOpen() const;
+  // If !IsVirtioConsoleOpen(), LOG(FATAL) and return false
+  bool CheckVirtioConsole();
   std::shared_ptr<Session> CreateSession(const std::string& session_name);
   void SendUserSelection(std::unique_ptr<ConfUiMessage>& input);
 
@@ -133,17 +134,13 @@
   }
 
   const std::uint32_t display_num_;
+  ConfUiRenderer& host_renderer_;
   HostModeCtrl& host_mode_ctrl_;
-  ScreenConnectorFrameRenderer& screen_connector_;
-
-  std::string input_socket_path_;
-  int hal_vsock_port_;
 
   std::shared_ptr<Session> curr_session_;
 
-  SharedFD guest_hal_socket_;
-  // ACCEPTED fd on guest_hal_socket_
-  SharedFD hal_cli_socket_;
+  SharedFD from_guest_fifo_fd_;
+  SharedFD to_guest_fifo_fd_;
 
   using Multiplexer =
       Multiplexer<std::unique_ptr<ConfUiMessage>,
@@ -162,10 +159,6 @@
 
   std::thread main_loop_thread_;
   std::thread hal_input_fetcher_thread_;
-
-  std::mutex socket_flag_mtx_;
-  std::condition_variable socket_flag_cv_;
-  bool is_socket_ok_;
 };
 
 }  // end of namespace confui
diff --git a/host/libs/confui/host_utils.cc b/host/libs/confui/host_utils.cc
index 6ff67b0..bc5d23e 100644
--- a/host/libs/confui/host_utils.cc
+++ b/host/libs/confui/host_utils.cc
@@ -21,7 +21,7 @@
 namespace thread {
 std::string ThreadTracer::Get(const std::thread::id tid) {
   std::lock_guard<std::mutex> lock(mtx_);
-  if (id2name_.find(tid) != id2name_.end()) {
+  if (Contains(id2name_, tid)) {
     return id2name_[tid];
   }
   std::stringstream ss;
@@ -31,7 +31,7 @@
 
 void ThreadTracer::Set(const std::string& name, const std::thread::id tid) {
   std::lock_guard<std::mutex> lock(mtx_);
-  if (name2id_.find(name) != name2id_.end()) {
+  if (Contains(name2id_, name)) {
     // has the name already
     if (name2id_[name] != tid) {  // used for another thread
       ConfUiLog(FATAL) << "Thread name is duplicated.";
@@ -39,7 +39,7 @@
     // name and id are already set correctly
     return;
   }
-  if (id2name_.find(tid) != id2name_.end()) {
+  if (Contains(id2name_, tid)) {
     // tid exists but has a different name
     name2id_.erase(id2name_[tid]);  // delete old_name -> tid map
   }
@@ -50,10 +50,10 @@
 
 std::optional<std::thread::id> ThreadTracer::Get(const std::string& name) {
   std::lock_guard<std::mutex> lock(mtx_);
-  if (name2id_.find(name) != name2id_.end()) {
+  if (Contains(name2id_, name)) {
     return {name2id_[name]};
   }
-  return std::nullopt;  // unknown
+  return std::nullopt;
 }
 
 ThreadTracer& GetThreadTracer() {
diff --git a/host/libs/confui/host_utils.h b/host/libs/confui/host_utils.h
index bd4004b..4037419 100644
--- a/host/libs/confui/host_utils.h
+++ b/host/libs/confui/host_utils.h
@@ -27,6 +27,7 @@
 #include <android-base/logging.h>
 
 #include "common/libs/confui/confui.h"
+#include "common/libs/utils/contains.h"
 #include "host/commands/kernel_log_monitor/utils.h"
 #include "host/libs/config/logging.h"
 
@@ -68,7 +69,7 @@
   template <typename F, typename... Args>
   std::thread RunThread(const std::string& name, F&& f, Args&&... args) {
     auto th = std::thread(std::forward<F>(f), std::forward<Args>(args)...);
-    if (name2id_.find(name) != name2id_.end()) {
+    if (Contains(name2id_, name)) {
       ConfUiLog(FATAL) << "Thread name is duplicated";
     }
     name2id_[name] = th.get_id();
diff --git a/host/libs/confui/host_virtual_input.cc b/host/libs/confui/host_virtual_input.cc
new file mode 100644
index 0000000..62d1b8d
--- /dev/null
+++ b/host/libs/confui/host_virtual_input.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0f
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/confui/host_virtual_input.h"
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+namespace confui {
+
+HostVirtualInput::HostVirtualInput(HostServer& host_server,
+                                   HostModeCtrl& host_mode_ctrl)
+    : host_server_(host_server), host_mode_ctrl_(host_mode_ctrl) {}
+
+void HostVirtualInput::TouchEvent(const int x, const int y,
+                                  const bool is_down) {
+  std::string mode("Android Mode");
+  if (IsConfUiActive()) {
+    mode = std::string("Confirmation UI Mode");
+  }
+  if (is_down) {
+    ConfUiLog(INFO) << "TouchEvent occurs in " << mode << " at [" << x << ", "
+                    << y << "]";
+  }
+  host_server_.TouchEvent(x, y, is_down);
+}
+
+void HostVirtualInput::UserAbortEvent() { host_server_.UserAbortEvent(); }
+
+bool HostVirtualInput::IsConfUiActive() {
+  return host_mode_ctrl_.IsConfirmatioUiMode();
+}
+
+}  // namespace confui
+}  // namespace cuttlefish
diff --git a/host/libs/confui/host_virtual_input.h b/host/libs/confui/host_virtual_input.h
index 65ec577..1fcc84e 100644
--- a/host/libs/confui/host_virtual_input.h
+++ b/host/libs/confui/host_virtual_input.h
@@ -18,6 +18,10 @@
 
 #include <cstdint>
 
+#include <fruit/fruit.h>
+
+#include "host/libs/confui/host_server.h"
+
 namespace cuttlefish {
 namespace confui {
 enum class ConfUiKeys : std::uint32_t { Confirm = 7, Cancel = 8 };
@@ -28,11 +32,18 @@
  */
 class HostVirtualInput {
  public:
-  virtual void TouchEvent(const int x, const int y, const bool is_down) = 0;
-  virtual void UserAbortEvent() = 0;
-  virtual ~HostVirtualInput() = default;
+  INJECT(HostVirtualInput(HostServer& host_server,
+                          HostModeCtrl& host_mode_ctrl));
+
+  void TouchEvent(const int x, const int y, const bool is_down);
+  void UserAbortEvent();
+  ~HostVirtualInput() = default;
   // guarantees that if this returns true, it is confirmation UI mode
-  virtual bool IsConfUiActive() = 0;
+  bool IsConfUiActive();
+
+ private:
+  HostServer& host_server_;
+  HostModeCtrl& host_mode_ctrl_;
 };
 }  // namespace confui
 }  // namespace cuttlefish
diff --git a/host/libs/confui/secure_input.cc b/host/libs/confui/secure_input.cc
index 672ed3d..0ce6f22 100644
--- a/host/libs/confui/secure_input.cc
+++ b/host/libs/confui/secure_input.cc
@@ -18,15 +18,37 @@
 
 namespace cuttlefish {
 namespace confui {
+namespace {
+
+template <typename T>
+auto CheckAndReturnSessionId(const std::unique_ptr<T>& msg) {
+  CHECK(msg) << "ConfUiUserSelectionMessage must not be null";
+  return msg->GetSessionId();
+}
+
+}  // end of namespace
+
+ConfUiSecureUserSelectionMessage::ConfUiSecureUserSelectionMessage(
+    std::unique_ptr<ConfUiUserSelectionMessage>&& msg, const bool secure)
+    : ConfUiMessage(CheckAndReturnSessionId(msg)),
+      msg_(std::move(msg)),
+      is_secure_(secure) {}
+
+ConfUiSecureUserTouchMessage::ConfUiSecureUserTouchMessage(
+    std::unique_ptr<ConfUiUserTouchMessage>&& msg, const bool secure)
+    : ConfUiMessage(CheckAndReturnSessionId(msg)),
+      msg_(std::move(msg)),
+      is_secure_(secure) {}
 
 std::unique_ptr<ConfUiSecureUserSelectionMessage> ToSecureSelectionMessage(
-    const ConfUiUserSelectionMessage& msg, const bool secure) {
-  return std::make_unique<ConfUiSecureUserSelectionMessage>(msg, secure);
+    std::unique_ptr<ConfUiUserSelectionMessage>&& msg, const bool secure) {
+  return std::make_unique<ConfUiSecureUserSelectionMessage>(std::move(msg),
+                                                            secure);
 }
 
 std::unique_ptr<ConfUiSecureUserTouchMessage> ToSecureTouchMessage(
-    const ConfUiUserTouchMessage& msg, const bool secure) {
-  return std::make_unique<ConfUiSecureUserTouchMessage>(msg, secure);
+    std::unique_ptr<ConfUiUserTouchMessage>&& msg, const bool secure) {
+  return std::make_unique<ConfUiSecureUserTouchMessage>(std::move(msg), secure);
 }
 
 }  // end of namespace confui
diff --git a/host/libs/confui/secure_input.h b/host/libs/confui/secure_input.h
index 527fcd7..a56af5e 100644
--- a/host/libs/confui/secure_input.h
+++ b/host/libs/confui/secure_input.h
@@ -33,45 +33,43 @@
 namespace confui {
 class ConfUiSecureUserSelectionMessage : public ConfUiMessage {
  public:
-  ConfUiSecureUserSelectionMessage(const ConfUiUserSelectionMessage& msg,
-                                   const bool secure)
-      : ConfUiMessage(msg.GetSessionId()), msg_(msg), is_secure_(secure) {}
+  ConfUiSecureUserSelectionMessage(
+      std::unique_ptr<ConfUiUserSelectionMessage>&& msg, const bool secure);
   ConfUiSecureUserSelectionMessage() = delete;
   virtual ~ConfUiSecureUserSelectionMessage() = default;
-  std::string ToString() const override { return msg_.ToString(); }
-  ConfUiCmd GetType() const override { return msg_.GetType(); }
-  auto GetResponse() const { return msg_.GetResponse(); }
+  std::string ToString() const override { return msg_->ToString(); }
+  ConfUiCmd GetType() const override { return msg_->GetType(); }
+  auto GetResponse() const { return msg_->GetResponse(); }
   // SendOver is between guest and host, so it doesn't send the is_secure_
-  bool SendOver(SharedFD fd) override { return msg_.SendOver(fd); }
+  bool SendOver(SharedFD fd) override { return msg_->SendOver(fd); }
   bool IsSecure() const { return is_secure_; }
   // SetSecure() might be needed later on but not now.
 
  private:
-  ConfUiUserSelectionMessage msg_;
+  std::unique_ptr<ConfUiUserSelectionMessage> msg_;
   bool is_secure_;
 };
 
 class ConfUiSecureUserTouchMessage : public ConfUiMessage {
  public:
-  ConfUiSecureUserTouchMessage(const ConfUiUserTouchMessage& msg,
-                               const bool secure)
-      : ConfUiMessage(msg.GetSessionId()), msg_(msg), is_secure_(secure) {}
+  ConfUiSecureUserTouchMessage(std::unique_ptr<ConfUiUserTouchMessage>&& msg,
+                               const bool secure);
   virtual ~ConfUiSecureUserTouchMessage() = default;
-  std::string ToString() const override { return msg_.ToString(); }
-  ConfUiCmd GetType() const override { return msg_.GetType(); }
-  auto GetResponse() const { return msg_.GetResponse(); }
-  bool SendOver(SharedFD fd) override { return msg_.SendOver(fd); }
-  std::pair<int, int> GetLocation() { return msg_.GetLocation(); }
+  std::string ToString() const override { return msg_->ToString(); }
+  ConfUiCmd GetType() const override { return msg_->GetType(); }
+  auto GetResponse() const { return msg_->GetResponse(); }
+  bool SendOver(SharedFD fd) override { return msg_->SendOver(fd); }
+  std::pair<int, int> GetLocation() const { return msg_->GetLocation(); }
   bool IsSecure() const { return is_secure_; }
 
  private:
-  ConfUiUserTouchMessage msg_;
+  std::unique_ptr<ConfUiUserTouchMessage> msg_;
   bool is_secure_;
 };
 
 std::unique_ptr<ConfUiSecureUserSelectionMessage> ToSecureSelectionMessage(
-    const ConfUiUserSelectionMessage& msg, const bool secure);
+    std::unique_ptr<ConfUiUserSelectionMessage>&& msg, const bool secure);
 std::unique_ptr<ConfUiSecureUserTouchMessage> ToSecureTouchMessage(
-    const ConfUiUserTouchMessage& msg, const bool secure);
+    std::unique_ptr<ConfUiUserTouchMessage>&& msg, const bool secure);
 }  // end of namespace confui
 }  // end of namespace cuttlefish
diff --git a/host/libs/confui/session.cc b/host/libs/confui/session.cc
index 2b7069b..6da0420 100644
--- a/host/libs/confui/session.cc
+++ b/host/libs/confui/session.cc
@@ -18,19 +18,19 @@
 
 #include <algorithm>
 
+#include "common/libs/utils/contains.h"
 #include "host/libs/confui/secure_input.h"
 
 namespace cuttlefish {
 namespace confui {
 
 Session::Session(const std::string& session_name,
-                 const std::uint32_t display_num, HostModeCtrl& host_mode_ctrl,
-                 ScreenConnectorFrameRenderer& screen_connector,
-                 const std::string& locale)
+                 const std::uint32_t display_num, ConfUiRenderer& host_renderer,
+                 HostModeCtrl& host_mode_ctrl, const std::string& locale)
     : session_id_{session_name},
       display_num_{display_num},
+      renderer_{host_renderer},
       host_mode_ctrl_{host_mode_ctrl},
-      screen_connector_{screen_connector},
       locale_{locale},
       state_{MainLoopState::kInit},
       saved_state_{MainLoopState::kInit} {}
@@ -57,46 +57,14 @@
   return (right_now - *start_time_) >= GetGracePeriod();
 }
 
-bool Session::IsConfUiActive() const {
-  if (state_ == MainLoopState::kInSession ||
-      state_ == MainLoopState::kWaitStop) {
-    return true;
-  }
-  return false;
-}
-
-template <typename C, typename T>
-static bool Contains(const C& c, T&& item) {
-  auto itr = std::find(c.begin(), c.end(), std::forward<T>(item));
-  return itr != c.end();
-}
-
-bool Session::IsInverted() const {
-  return Contains(ui_options_, teeui::UIOption::AccessibilityInverted);
-}
-
-bool Session::IsMagnified() const {
-  return Contains(ui_options_, teeui::UIOption::AccessibilityMagnified);
-}
-
 bool Session::RenderDialog() {
-  renderer_ = ConfUiRenderer::GenerateRenderer(
-      display_num_, prompt_text_, locale_, IsInverted(), IsMagnified());
-  if (!renderer_) {
+  auto result =
+      renderer_.RenderDialog(display_num_, prompt_text_, locale_, ui_options_);
+  if (!result.ok()) {
+    LOG(ERROR) << result.error().Trace();
     return false;
   }
-  auto teeui_frame = renderer_->RenderRawFrame();
-  if (!teeui_frame) {
-    return false;
-  }
-  ConfUiLog(VERBOSE) << "actually trying to render the frame"
-                     << thread::GetName();
-  auto frame_width = teeui_frame->Width();
-  auto frame_height = teeui_frame->Height();
-  auto frame_stride_bytes = teeui_frame->ScreenStrideBytes();
-  auto frame_bytes = reinterpret_cast<std::uint8_t*>(teeui_frame->data());
-  return screen_connector_.RenderConfirmationUi(
-      display_num_, frame_width, frame_height, frame_stride_bytes, frame_bytes);
+  return true;
 }
 
 MainLoopState Session::Transition(SharedFD& hal_cli, const FsmInput fsm_input,
diff --git a/host/libs/confui/session.h b/host/libs/confui/session.h
index 1afccac..ad4046f 100644
--- a/host/libs/confui/session.h
+++ b/host/libs/confui/session.h
@@ -29,7 +29,6 @@
 #include "host/libs/confui/host_renderer.h"
 #include "host/libs/confui/server_common.h"
 #include "host/libs/confui/sign.h"
-#include "host/libs/screen_connector/screen_connector.h"
 
 namespace cuttlefish {
 namespace confui {
@@ -44,12 +43,9 @@
 class Session {
  public:
   Session(const std::string& session_name, const std::uint32_t display_num,
-          HostModeCtrl& host_mode_ctrl,
-          ScreenConnectorFrameRenderer& screen_connector,
+          ConfUiRenderer& host_renderer, HostModeCtrl& host_mode_ctrl,
           const std::string& locale = "en");
 
-  bool IsConfUiActive() const;
-
   std::string GetId() { return session_id_; }
 
   MainLoopState GetState() { return state_; }
@@ -78,12 +74,10 @@
   void CleanUp();
 
   bool IsConfirm(const int x, const int y) {
-    return renderer_->IsInConfirm(x, y);
+    return renderer_.IsInConfirm(x, y);
   }
 
-  bool IsCancel(const int x, const int y) {
-    return renderer_->IsInCancel(x, y);
-  }
+  bool IsCancel(const int x, const int y) { return renderer_.IsInCancel(x, y); }
 
   // tell if grace period has passed
   bool IsReadyForUserInput() const;
@@ -117,14 +111,10 @@
 
   void ScheduleToTerminate();
 
-  bool IsInverted() const;
-  bool IsMagnified() const;
-
   const std::string session_id_;
   const std::uint32_t display_num_;
-  std::unique_ptr<ConfUiRenderer> renderer_;
+  ConfUiRenderer& renderer_;
   HostModeCtrl& host_mode_ctrl_;
-  ScreenConnectorFrameRenderer& screen_connector_;
 
   // only context to save
   std::string prompt_text_;
diff --git a/host/libs/confui/sign.cc b/host/libs/confui/sign.cc
index 2c454e5..bbaac83 100644
--- a/host/libs/confui/sign.cc
+++ b/host/libs/confui/sign.cc
@@ -37,7 +37,7 @@
   auto config = cuttlefish::CuttlefishConfig::Get();
   CHECK(config) << "Config must not be null";
   auto instance = config->ForDefaultInstance();
-  return instance.PerInstanceInternalPath("confui_sign.sock");
+  return instance.PerInstanceInternalUdsPath("confui_sign.sock");
 }
 
 /**
diff --git a/host/libs/graphics_detector/Android.bp b/host/libs/graphics_detector/Android.bp
index 679825a..278bdeb 100644
--- a/host/libs/graphics_detector/Android.bp
+++ b/host/libs/graphics_detector/Android.bp
@@ -30,14 +30,41 @@
 }
 
 cc_library_static {
+    name: "libcuttlefish_graphics_configuration",
+    srcs: [
+        "graphics_configuration.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libjsoncpp",
+        "liblog",
+    ],
+    static_libs: [
+        "libcuttlefish_host_config",
+    ],
+    defaults: ["cuttlefish_host"],
+}
+
+cc_library_static {
     name: "libcuttlefish_graphics_detector",
     srcs: [
+        "egl.cpp",
+        "gles.cpp",
+        "img.cpp",
+        "lib.cpp",
         "graphics_detector.cpp",
+        "graphics_detector_gl.cpp",
+        "graphics_detector_vk.cpp",
+        "graphics_detector_vk_precision_qualifiers_on_yuv_samplers.cpp",
+        "subprocess.cpp",
+        "vk.cpp",
+    ],
+    local_include_dirs: [
+        "include",
     ],
     header_libs: [
         "egl_headers",
         "gl_headers",
-        "vulkan_headers",
     ],
     shared_libs: [
         "libbase",
@@ -46,18 +73,17 @@
     defaults: ["cuttlefish_host"],
 }
 
-cc_binary {
+cc_binary_host {
     name: "detect_graphics",
     srcs: [
         "detect_graphics.cpp",
     ],
-    shared_libs: [
+    static_libs: [
         "libbase",
         "liblog",
-    ],
-    static_libs: [
         "libcuttlefish_graphics_detector",
         "libgflags",
     ],
+    stl: "libc++_static",
     defaults: ["cuttlefish_host"],
 }
diff --git a/host/libs/graphics_detector/egl.cpp b/host/libs/graphics_detector/egl.cpp
new file mode 100644
index 0000000..29f03a9
--- /dev/null
+++ b/host/libs/graphics_detector/egl.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/egl.h"
+
+#include <GLES/gl.h>
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+namespace cuttlefish {
+namespace {
+
+constexpr const char kEglLib[] = "libEGL.so";
+constexpr const char kEglLibAlt[] = "libEGL.so.1";
+
+std::optional<Lib> LoadEglLib() {
+  for (const auto* possible_name : {kEglLib, kEglLibAlt}) {
+    auto lib_opt = Lib::Load(possible_name);
+    if (!lib_opt) {
+      LOG(VERBOSE) << "Failed to load " << possible_name;
+    } else {
+      LOG(VERBOSE) << "Loaded " << possible_name;
+      return std::move(lib_opt);
+    }
+  }
+  return std::nullopt;
+}
+
+}  // namespace
+
+/*static*/
+std::optional<Egl> Egl::Load() {
+  auto lib_opt = LoadEglLib();
+  if (!lib_opt) {
+    return std::nullopt;
+  }
+
+  Egl egl;
+  egl.lib_ = std::move(*lib_opt);
+
+#define LOAD_EGL_FUNCTION_POINTER(return_type, function_name, signature)       \
+  egl.function_name = reinterpret_cast<return_type(GL_APIENTRY*) signature>(   \
+      egl.lib_.GetSymbol(#function_name));                                     \
+  if (egl.function_name == nullptr) {                                          \
+    egl.function_name = reinterpret_cast<return_type(GL_APIENTRY*) signature>( \
+        egl.eglGetProcAddress(#function_name));                                \
+  }                                                                            \
+  if (egl.function_name == nullptr) {                                          \
+    LOG(VERBOSE) << "Failed to load EGL function: " << #function_name;         \
+  } else {                                                                     \
+    LOG(VERBOSE) << "Loaded EGL function: " << #function_name;                 \
+  }
+
+  FOR_EACH_EGL_FUNCTION(LOAD_EGL_FUNCTION_POINTER);
+
+  egl.Init();
+
+  return std::move(egl);
+}
+
+void Egl::Init() {
+  EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+  if (display == EGL_NO_DISPLAY) {
+    LOG(FATAL) << "Failed to get default display";
+  }
+
+  EGLint client_version_major = 0;
+  EGLint client_version_minor = 0;
+  if (eglInitialize(display, &client_version_major, &client_version_minor) !=
+      EGL_TRUE) {
+    LOG(FATAL) << "Failed to initialize display.";
+    return;
+  }
+  LOG(VERBOSE) << "Found EGL client version " << client_version_major << "."
+               << client_version_minor;
+
+  const std::string vendor_string = eglQueryString(display, EGL_VENDOR);
+  if (vendor_string.empty()) {
+    LOG(FATAL) << "Failed to query vendor.";
+    return;
+  }
+  LOG(VERBOSE) << "Found EGL vendor: " << vendor_string;
+
+  const std::string extensions_string = eglQueryString(display, EGL_EXTENSIONS);
+  if (extensions_string.empty()) {
+    LOG(FATAL) << "Failed to query extensions.";
+    return;
+  }
+  LOG(VERBOSE) << "Found EGL extensions: " << extensions_string;
+
+  if (eglBindAPI(EGL_OPENGL_ES_API) == EGL_FALSE) {
+    LOG(FATAL) << "Failed to bind GLES API.";
+    return;
+  }
+
+  const EGLint attribs[] = {
+      // clang-format off
+    EGL_SURFACE_TYPE,     EGL_PBUFFER_BIT,
+    EGL_RENDERABLE_TYPE,  EGL_OPENGL_ES3_BIT,
+    EGL_RED_SIZE,         8,
+    EGL_GREEN_SIZE,       8,
+    EGL_BLUE_SIZE,        8,
+    EGL_ALPHA_SIZE,       8,
+    EGL_NONE,
+      // clang-format on
+  };
+
+  EGLConfig config;
+  EGLint num_configs = 0;
+  if (eglChooseConfig(display, attribs, &config, 1, &num_configs) != EGL_TRUE) {
+    LOG(FATAL) << "Failed to find matching framebuffer config.";
+    return;
+  }
+  LOG(VERBOSE) << "Found matching framebuffer config.";
+
+  const EGLint pbuffer_attribs[] = {
+      // clang-format off
+    EGL_WIDTH,  720,
+    EGL_HEIGHT, 720,
+    EGL_NONE,
+      // clang-format on
+  };
+
+  EGLSurface primary_surface =
+      eglCreatePbufferSurface(display, config, pbuffer_attribs);
+  if (primary_surface == EGL_NO_SURFACE) {
+    LOG(FATAL) << "Failed to create EGL surface.";
+    return;
+  }
+
+  const EGLint context_attribs[] = {
+      // clang-format off
+    EGL_CONTEXT_CLIENT_VERSION, 3,
+    EGL_NONE
+      // clang-format on
+  };
+
+  EGLContext primary_context =
+      eglCreateContext(display, config, EGL_NO_CONTEXT, context_attribs);
+  if (primary_context == EGL_NO_CONTEXT) {
+    LOG(FATAL) << "Failed to create EGL context.";
+    return;
+  }
+
+  if (eglMakeCurrent(display, primary_surface, primary_surface,
+                     primary_context) == EGL_FALSE) {
+    LOG(FATAL) << "Failed to make primary EGL context/surface current.";
+    return;
+  }
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/egl.h b/host/libs/graphics_detector/egl.h
new file mode 100644
index 0000000..696eb00
--- /dev/null
+++ b/host/libs/graphics_detector/egl.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+
+#include "host/libs/graphics_detector/egl_funcs.h"
+#include "host/libs/graphics_detector/lib.h"
+
+namespace cuttlefish {
+
+class Egl {
+ public:
+  static std::optional<Egl> Load();
+
+  Egl(const Egl&) = delete;
+  Egl& operator=(const Egl&) = delete;
+
+  Egl(Egl&&) = default;
+  Egl& operator=(Egl&&) = default;
+
+#define DECLARE_EGL_FUNCTION_MEMBER_POINTER(return_type, function_name, \
+                                            signature)                  \
+  return_type(EGLAPIENTRY* function_name) signature = nullptr;
+
+  FOR_EACH_EGL_FUNCTION(DECLARE_EGL_FUNCTION_MEMBER_POINTER);
+
+ private:
+  Egl() = default;
+
+  void Init();
+
+  Lib lib_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/egl_funcs.h b/host/libs/graphics_detector/egl_funcs.h
new file mode 100644
index 0000000..3d783aa
--- /dev/null
+++ b/host/libs/graphics_detector/egl_funcs.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// clang-format off
+#define FOR_EACH_EGL_FUNCTION(X) \
+  X(void*, eglGetProcAddress, (const char* procname)) \
+  X(const char*, eglQueryString, (EGLDisplay dpy, EGLint id)) \
+  X(EGLDisplay, eglGetPlatformDisplay, (EGLenum platform, void *native_display, const EGLAttrib *attrib_list)) \
+  X(EGLDisplay, eglGetPlatformDisplayEXT, (EGLenum platform, void *native_display, const EGLint *attrib_list)) \
+  X(EGLBoolean, eglBindAPI, (EGLenum api)) \
+  X(EGLBoolean, eglChooseConfig, (EGLDisplay display, EGLint const* attrib_list, EGLConfig* configs, EGLint config_size, EGLint* num_config))  \
+  X(EGLContext, eglCreateContext, (EGLDisplay display, EGLConfig config, EGLContext share_context, EGLint const* attrib_list)) \
+  X(EGLSurface, eglCreatePbufferSurface, (EGLDisplay display, EGLConfig config, EGLint const* attrib_list)) \
+  X(EGLBoolean, eglDestroyContext, (EGLDisplay display, EGLContext context)) \
+  X(EGLBoolean, eglDestroySurface, (EGLDisplay display, EGLSurface surface)) \
+  X(EGLBoolean, eglGetConfigAttrib, (EGLDisplay display, EGLConfig config, EGLint attribute, EGLint * value)) \
+  X(EGLDisplay, eglGetDisplay, (NativeDisplayType native_display)) \
+  X(EGLint, eglGetError, (void)) \
+  X(EGLBoolean, eglInitialize, (EGLDisplay display, EGLint * major, EGLint * minor)) \
+  X(EGLBoolean, eglTerminate, (EGLDisplay display)) \
+  X(EGLBoolean, eglMakeCurrent, (EGLDisplay display, EGLSurface draw, EGLSurface read, EGLContext context)) \
+  X(EGLBoolean, eglSwapBuffers, (EGLDisplay display, EGLSurface surface)) \
+  X(EGLSurface, eglCreateWindowSurface, (EGLDisplay display, EGLConfig config, EGLNativeWindowType native_window, EGLint const* attrib_list)) \
+  X(EGLBoolean, eglSwapInterval, (EGLDisplay display, EGLint interval)) \
+  X(void, eglSetBlobCacheFuncsANDROID, (EGLDisplay display, EGLSetBlobFuncANDROID set, EGLGetBlobFuncANDROID get)) \
+  X(EGLImage, eglCreateImageKHR, (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list)) \
+  X(EGLBoolean, eglDestroyImageKHR, (EGLDisplay dpy, EGLImage image)) \
+  X(EGLImage, eglCreateImage, (EGLDisplay dpy, EGLContext ctx, EGLenum target, EGLClientBuffer buffer, const EGLint *attrib_list)) \
+  X(EGLBoolean, eglDestroyImage, (EGLDisplay dpy, EGLImage image))
+
+// clang-format on
\ No newline at end of file
diff --git a/host/libs/graphics_detector/gles.cpp b/host/libs/graphics_detector/gles.cpp
new file mode 100644
index 0000000..d8fbea6
--- /dev/null
+++ b/host/libs/graphics_detector/gles.cpp
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/gles.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+namespace cuttlefish {
+namespace {
+
+constexpr const char kGles2Lib[] = "libGLESv2.so";
+
+static void GL_APIENTRY GlDebugCallback(GLenum, GLenum, GLuint, GLenum, GLsizei,
+                                        const GLchar* message, const void*) {
+  LOG(VERBOSE) << "GlDebugCallback message: " << message;
+}
+
+}  // namespace
+
+/*static*/ std::optional<Gles> Gles::Load() {
+  auto lib_opt = Lib::Load(kGles2Lib);
+  if (!lib_opt) {
+    return std::nullopt;
+  }
+
+  Gles gles;
+  gles.lib_ = std::move(*lib_opt);
+
+#define LOAD_GLES_FUNCTION_POINTER(return_type, function_name, signature, \
+                                   args)                                  \
+  gles.function_name = reinterpret_cast<return_type(*) signature>(        \
+      gles.lib_.GetSymbol(#function_name));                               \
+  if (gles.function_name == nullptr) {                                    \
+    gles.function_name = reinterpret_cast<return_type(*) signature>(      \
+        gles.lib_.GetSymbol(#function_name));                             \
+  }                                                                       \
+  if (gles.function_name == nullptr) {                                    \
+    gles.function_name = reinterpret_cast<return_type(*) signature>(      \
+        gles.lib_.GetSymbol(#function_name "OES"));                       \
+  }                                                                       \
+  if (gles.function_name == nullptr) {                                    \
+    gles.function_name = reinterpret_cast<return_type(*) signature>(      \
+        gles.lib_.GetSymbol(#function_name "EXT"));                       \
+  }                                                                       \
+  if (gles.function_name == nullptr) {                                    \
+    gles.function_name = reinterpret_cast<return_type(*) signature>(      \
+        gles.lib_.GetSymbol(#function_name "ARB"));                       \
+  }                                                                       \
+  if (gles.function_name == nullptr) {                                    \
+    LOG(VERBOSE) << "Failed to load GLES function: " << #function_name;   \
+  } else {                                                                \
+    LOG(VERBOSE) << "Loaded GLES function: " << #function_name;           \
+  }
+
+  FOR_EACH_GLES_FUNCTION(LOAD_GLES_FUNCTION_POINTER);
+
+  gles.Init();
+
+  return std::move(gles);
+}
+
+/*static*/ std::optional<Gles> Gles::LoadFromEgl(Egl* egl) {
+  auto lib_opt = Lib::Load(kGles2Lib);
+  if (!lib_opt) {
+    return std::nullopt;
+  }
+
+  Gles gles;
+
+#define LOAD_GLES_FUNCTION_POINTER_FROM_EGL(return_type, function_name, \
+                                            signature, args)            \
+  gles.function_name = reinterpret_cast<return_type(*) signature>(      \
+      egl->eglGetProcAddress(#function_name));                          \
+  if (gles.function_name == nullptr) {                                  \
+    LOG(VERBOSE) << "Failed to load GLES function: " << #function_name; \
+  } else {                                                              \
+    LOG(VERBOSE) << "Loaded GLES function: " << #function_name;         \
+  }
+
+  FOR_EACH_GLES_FUNCTION(LOAD_GLES_FUNCTION_POINTER_FROM_EGL);
+
+  gles.Init();
+
+  return std::move(gles);
+}
+
+void Gles::Init() {
+  const GLubyte* gles_vendor = glGetString(GL_VENDOR);
+  if (gles_vendor == nullptr) {
+    LOG(FATAL) << "Failed to get GLES vendor";
+    return;
+  }
+  LOG(VERBOSE) << "Found GLES vendor: " << gles_vendor;
+
+  const std::string gles_extensions_str =
+      reinterpret_cast<const char*>(glGetString(GL_EXTENSIONS));
+  if (gles_extensions_str.empty()) {
+    LOG(FATAL) << "Failed to get GLES extensions";
+    return;
+  }
+  std::vector<std::string> gles_extensions =
+      android::base::Split(gles_extensions_str, " ");
+  std::sort(gles_extensions.begin(), gles_extensions.end());
+  LOG(VERBOSE) << "Found GLES extensions:";
+  for (const std::string& gles_extension : gles_extensions) {
+    LOG(VERBOSE) << gles_extension;
+  }
+  LOG(VERBOSE) << "";
+
+  glEnable(GL_DEBUG_OUTPUT);
+  glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
+  glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_HIGH, 0,
+                        nullptr, GL_TRUE);
+  glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_MEDIUM, 0,
+                        nullptr, GL_TRUE);
+  glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_LOW, 0,
+                        nullptr, GL_TRUE);
+  glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE,
+                        GL_DEBUG_SEVERITY_NOTIFICATION, 0, nullptr, GL_FALSE);
+  glDebugMessageCallback(&GlDebugCallback, nullptr);
+}
+
+std::optional<GLuint> Gles::CreateShader(GLenum shader_type,
+                                         const std::string& shader_source) {
+  GLuint shader = glCreateShader(shader_type);
+
+  const char* const shader_source_cstr = shader_source.c_str();
+  glShaderSource(shader, 1, &shader_source_cstr, nullptr);
+  glCompileShader(shader);
+
+  GLint status;
+  glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
+
+  if (status != GL_TRUE) {
+    GLsizei log_length = 0;
+    glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
+
+    std::vector<char> log(log_length + 1, 0);
+    glGetShaderInfoLog(shader, log_length, nullptr, log.data());
+    LOG(ERROR) << "Failed to compile shader: " << log.data();
+
+    glDeleteShader(shader);
+    return std::nullopt;
+  }
+
+  return shader;
+}
+
+std::optional<GLuint> Gles::CreateProgram(
+    const std::string& vert_shader_source,
+    const std::string& frag_shader_source) {
+  auto vert_shader_opt = CreateShader(GL_VERTEX_SHADER, vert_shader_source);
+  if (!vert_shader_opt) {
+    LOG(ERROR) << "Failed to create vert shader.";
+    return std::nullopt;
+  }
+  auto vert_shader = *vert_shader_opt;
+
+  auto frag_shader_opt = CreateShader(GL_FRAGMENT_SHADER, frag_shader_source);
+  if (!frag_shader_opt) {
+    LOG(ERROR) << "Failed to create frag shader.";
+    return std::nullopt;
+  }
+  auto frag_shader = *frag_shader_opt;
+
+  GLuint program = glCreateProgram();
+  glAttachShader(program, vert_shader);
+  glAttachShader(program, frag_shader);
+  glLinkProgram(program);
+
+  GLint status;
+  glGetProgramiv(program, GL_LINK_STATUS, &status);
+
+  if (status != GL_TRUE) {
+    GLsizei log_length = 0;
+    glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
+
+    std::vector<char> log(log_length + 1, 0);
+    glGetProgramInfoLog(program, log_length, nullptr, log.data());
+    LOG(ERROR) << "Failed to link program: " << log.data();
+
+    glDeleteProgram(program);
+    return std::nullopt;
+  }
+
+  glDeleteShader(vert_shader);
+  glDeleteShader(frag_shader);
+
+  return program;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/gles.h b/host/libs/graphics_detector/gles.h
new file mode 100644
index 0000000..5b85719
--- /dev/null
+++ b/host/libs/graphics_detector/gles.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+
+#include <GLES/gl.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+#include <GLES3/gl3ext.h>
+
+#include "host/libs/graphics_detector/egl.h"
+#include "host/libs/graphics_detector/gles_funcs.h"
+#include "host/libs/graphics_detector/lib.h"
+
+namespace cuttlefish {
+
+#define CHECK_GL_ERROR()                                                      \
+  do {                                                                        \
+    if (GLenum error = gles->glGetError(); error != GL_NO_ERROR) {            \
+      LOG(ERROR) << __FILE__ << ":" << __LINE__ << ":" << __PRETTY_FUNCTION__ \
+                 << " found error: " << error;                                \
+    }                                                                         \
+  } while (0);
+
+class Gles {
+ public:
+  static std::optional<Gles> Load();
+  static std::optional<Gles> LoadFromEgl(Egl* egl);
+
+  Gles(const Gles&) = delete;
+  Gles& operator=(const Gles&) = delete;
+
+  Gles(Gles&&) = default;
+  Gles& operator=(Gles&&) = default;
+
+  std::optional<GLuint> CreateShader(GLenum shader_type,
+                                     const std::string& shader_source);
+
+  std::optional<GLuint> CreateProgram(const std::string& vert_shader_source,
+                                      const std::string& frag_shader_source);
+
+#define DECLARE_GLES_FUNCTION_MEMBER_POINTER(return_type, function_name, \
+                                             signature, args)            \
+  return_type(*function_name) signature = nullptr;
+
+  FOR_EACH_GLES_FUNCTION(DECLARE_GLES_FUNCTION_MEMBER_POINTER);
+
+ private:
+  Gles() = default;
+
+  void Init();
+
+  Lib lib_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/gles_funcs.h b/host/libs/graphics_detector/gles_funcs.h
new file mode 100644
index 0000000..3bb18ac
--- /dev/null
+++ b/host/libs/graphics_detector/gles_funcs.h
@@ -0,0 +1,429 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <GLES/gl.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+#include <GLES3/gl32.h>
+
+typedef const GLubyte* GLconstubyteptr;
+
+// clang-format off
+#define FOR_EACH_GLES_COMMON_FUNCTION(X) \
+  X(void, glActiveTexture, (GLenum texture), (texture)) \
+  X(void, glBindBuffer, (GLenum target, GLuint buffer), (target, buffer)) \
+  X(void, glBindTexture, (GLenum target, GLuint texture), (target, texture)) \
+  X(void, glBlendFunc, (GLenum sfactor, GLenum dfactor), (sfactor, dfactor)) \
+  X(void, glBlendEquation, (GLenum mode), (mode)) \
+  X(void, glBlendEquationSeparate, (GLenum modeRGB, GLenum modeAlpha), (modeRGB, modeAlpha)) \
+  X(void, glBlendFuncSeparate, (GLenum srcRGB, GLenum dstRGB, GLenum srcAlpha, GLenum dstAlpha), (srcRGB, dstRGB, srcAlpha, dstAlpha)) \
+  X(void, glBufferData, (GLenum target, GLsizeiptr size, const GLvoid * data, GLenum usage), (target, size, data, usage)) \
+  X(void, glBufferSubData, (GLenum target, GLintptr offset, GLsizeiptr size, const GLvoid * data), (target, offset, size, data)) \
+  X(void, glClear, (GLbitfield mask), (mask)) \
+  X(void, glClearColor, (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha), (red, green, blue, alpha)) \
+  X(void, glClearDepthf, (GLfloat depth), (depth)) \
+  X(void, glClearStencil, (GLint s), (s)) \
+  X(void, glColorMask, (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha), (red, green, blue, alpha)) \
+  X(void, glCompressedTexImage2D, (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const GLvoid * data), (target, level, internalformat, width, height, border, imageSize, data)) \
+  X(void, glCompressedTexSubImage2D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const GLvoid * data), (target, level, xoffset, yoffset, width, height, format, imageSize, data)) \
+  X(void, glCopyTexImage2D, (GLenum target, GLint level, GLenum internalFormat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border), (target, level, internalFormat, x, y, width, height, border)) \
+  X(void, glCopyTexSubImage2D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height), (target, level, xoffset, yoffset, x, y, width, height)) \
+  X(void, glCullFace, (GLenum mode), (mode)) \
+  X(void, glDeleteBuffers, (GLsizei n, const GLuint * buffers), (n, buffers)) \
+  X(void, glDeleteTextures, (GLsizei n, const GLuint * textures), (n, textures)) \
+  X(void, glDepthFunc, (GLenum func), (func)) \
+  X(void, glDepthMask, (GLboolean flag), (flag)) \
+  X(void, glDepthRangef, (GLfloat zNear, GLfloat zFar), (zNear, zFar)) \
+  X(void, glDisable, (GLenum cap), (cap)) \
+  X(void, glDrawArrays, (GLenum mode, GLint first, GLsizei count), (mode, first, count)) \
+  X(void, glDrawElements, (GLenum mode, GLsizei count, GLenum type, const GLvoid * indices), (mode, count, type, indices)) \
+  X(void, glEnable, (GLenum cap), (cap)) \
+  X(void, glFinish, (), ()) \
+  X(void, glFlush, (), ()) \
+  X(void, glFrontFace, (GLenum mode), (mode)) \
+  X(void, glGenBuffers, (GLsizei n, GLuint * buffers), (n, buffers)) \
+  X(void, glGenTextures, (GLsizei n, GLuint * textures), (n, textures)) \
+  X(void, glGetBooleanv, (GLenum pname, GLboolean * params), (pname, params)) \
+  X(void, glGetBufferParameteriv, (GLenum buffer, GLenum parameter, GLint * value), (buffer, parameter, value)) \
+  X(GLenum, glGetError, (), ()) \
+  X(void, glGetFloatv, (GLenum pname, GLfloat * params), (pname, params)) \
+  X(void, glGetIntegerv, (GLenum pname, GLint * params), (pname, params)) \
+  X(GLconstubyteptr, glGetString, (GLenum name), (name)) \
+  X(void, glTexParameterf, (GLenum target, GLenum pname, GLfloat param), (target, pname, param)) \
+  X(void, glTexParameterfv, (GLenum target, GLenum pname, const GLfloat * params), (target, pname, params)) \
+  X(void, glGetTexImage, (GLenum target, GLint level, GLenum format, GLenum type, GLvoid * pixels), (target, level, format, type, pixels)) \
+  X(void, glGetTexParameterfv, (GLenum target, GLenum pname, GLfloat * params), (target, pname, params)) \
+  X(void, glGetTexParameteriv, (GLenum target, GLenum pname, GLint * params), (target, pname, params)) \
+  X(void, glGetTexLevelParameteriv, (GLenum target, GLint level, GLenum pname, GLint * params), (target, level, pname, params)) \
+  X(void, glGetTexLevelParameterfv, (GLenum target, GLint level, GLenum pname, GLfloat * params), (target, level, pname, params)) \
+  X(void, glHint, (GLenum target, GLenum mode), (target, mode)) \
+  X(GLboolean, glIsBuffer, (GLuint buffer), (buffer)) \
+  X(GLboolean, glIsEnabled, (GLenum cap), (cap)) \
+  X(GLboolean, glIsTexture, (GLuint texture), (texture)) \
+  X(void, glLineWidth, (GLfloat width), (width)) \
+  X(void, glPolygonOffset, (GLfloat factor, GLfloat units), (factor, units)) \
+  X(void, glPixelStorei, (GLenum pname, GLint param), (pname, param)) \
+  X(void, glReadPixels, (GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, GLvoid * pixels), (x, y, width, height, format, type, pixels)) \
+  X(void, glRenderbufferStorageMultisample, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height), (target, samples, internalformat, width, height)) \
+  X(void, glSampleCoverage, (GLclampf value, GLboolean invert), (value, invert)) \
+  X(void, glScissor, (GLint x, GLint y, GLsizei width, GLsizei height), (x, y, width, height)) \
+  X(void, glStencilFunc, (GLenum func, GLint ref, GLuint mask), (func, ref, mask)) \
+  X(void, glStencilMask, (GLuint mask), (mask)) \
+  X(void, glStencilOp, (GLenum fail, GLenum zfail, GLenum zpass), (fail, zfail, zpass)) \
+  X(void, glTexImage2D, (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const GLvoid * pixels), (target, level, internalformat, width, height, border, format, type, pixels)) \
+  X(void, glTexParameteri, (GLenum target, GLenum pname, GLint param), (target, pname, param)) \
+  X(void, glTexParameteriv, (GLenum target, GLenum pname, const GLint * params), (target, pname, params)) \
+  X(void, glTexSubImage2D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid * pixels), (target, level, xoffset, yoffset, width, height, format, type, pixels)) \
+  X(void, glViewport, (GLint x, GLint y, GLsizei width, GLsizei height), (x, y, width, height)) \
+  X(void, glPushAttrib, (GLbitfield mask), (mask)) \
+  X(void, glPushClientAttrib, (GLbitfield mask), (mask)) \
+  X(void, glPopAttrib, (), ()) \
+  X(void, glPopClientAttrib, (), ()) \
+
+#define FOR_EACH_GLES1_ONLY_FUNCTION(X) \
+  X(void, glAlphaFunc, (GLenum func, GLclampf ref), (func, ref)) \
+  X(void, glBegin, (GLenum mode), (mode)) \
+  X(void, glClientActiveTexture, (GLenum texture), (texture)) \
+  X(void, glClipPlane, (GLenum plane, const GLdouble * equation), (plane, equation)) \
+  X(void, glColor4d, (GLdouble red, GLdouble green, GLdouble blue, GLdouble alpha), (red, green, blue, alpha)) \
+  X(void, glColor4f, (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha), (red, green, blue, alpha)) \
+  X(void, glColor4fv, (const GLfloat * v), (v)) \
+  X(void, glColor4ub, (GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha), (red, green, blue, alpha)) \
+  X(void, glColor4ubv, (const GLubyte * v), (v)) \
+  X(void, glColorPointer, (GLint size, GLenum type, GLsizei stride, const GLvoid * pointer), (size, type, stride, pointer)) \
+  X(void, glDisableClientState, (GLenum array), (array)) \
+  X(void, glEnableClientState, (GLenum array), (array)) \
+  X(void, glEnd, (), ()) \
+  X(void, glFogf, (GLenum pname, GLfloat param), (pname, param)) \
+  X(void, glFogfv, (GLenum pname, const GLfloat * params), (pname, params)) \
+  X(void, glFrustum, (GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glGetClipPlane, (GLenum plane, GLdouble * equation), (plane, equation)) \
+  X(void, glGetDoublev, (GLenum pname, GLdouble * params), (pname, params)) \
+  X(void, glGetLightfv, (GLenum light, GLenum pname, GLfloat * params), (light, pname, params)) \
+  X(void, glGetMaterialfv, (GLenum face, GLenum pname, GLfloat * params), (face, pname, params)) \
+  X(void, glGetPointerv, (GLenum pname, GLvoid* * params), (pname, params)) \
+  X(void, glGetTexEnvfv, (GLenum target, GLenum pname, GLfloat * params), (target, pname, params)) \
+  X(void, glGetTexEnviv, (GLenum target, GLenum pname, GLint * params), (target, pname, params)) \
+  X(void, glLightf, (GLenum light, GLenum pname, GLfloat param), (light, pname, param)) \
+  X(void, glLightfv, (GLenum light, GLenum pname, const GLfloat * params), (light, pname, params)) \
+  X(void, glLightModelf, (GLenum pname, GLfloat param), (pname, param)) \
+  X(void, glLightModelfv, (GLenum pname, const GLfloat * params), (pname, params)) \
+  X(void, glLoadIdentity, (), ()) \
+  X(void, glLoadMatrixf, (const GLfloat * m), (m)) \
+  X(void, glLogicOp, (GLenum opcode), (opcode)) \
+  X(void, glMaterialf, (GLenum face, GLenum pname, GLfloat param), (face, pname, param)) \
+  X(void, glMaterialfv, (GLenum face, GLenum pname, const GLfloat * params), (face, pname, params)) \
+  X(void, glMultiTexCoord2fv, (GLenum target, const GLfloat * v), (target, v)) \
+  X(void, glMultiTexCoord2sv, (GLenum target, const GLshort * v), (target, v)) \
+  X(void, glMultiTexCoord3fv, (GLenum target, const GLfloat * v), (target, v)) \
+  X(void, glMultiTexCoord3sv, (GLenum target, const GLshort * v), (target, v)) \
+  X(void, glMultiTexCoord4f, (GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q), (target, s, t, r, q)) \
+  X(void, glMultiTexCoord4fv, (GLenum target, const GLfloat * v), (target, v)) \
+  X(void, glMultiTexCoord4sv, (GLenum target, const GLshort * v), (target, v)) \
+  X(void, glMultMatrixf, (const GLfloat * m), (m)) \
+  X(void, glNormal3f, (GLfloat nx, GLfloat ny, GLfloat nz), (nx, ny, nz)) \
+  X(void, glNormal3fv, (const GLfloat * v), (v)) \
+  X(void, glNormal3sv, (const GLshort * v), (v)) \
+  X(void, glOrtho, (GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glPointParameterf, (GLenum param, GLfloat value), (param, value)) \
+  X(void, glPointParameterfv, (GLenum param, const GLfloat * values), (param, values)) \
+  X(void, glPointSize, (GLfloat size), (size)) \
+  X(void, glRotatef, (GLfloat angle, GLfloat x, GLfloat y, GLfloat z), (angle, x, y, z)) \
+  X(void, glScalef, (GLfloat x, GLfloat y, GLfloat z), (x, y, z)) \
+  X(void, glTexEnvf, (GLenum target, GLenum pname, GLfloat param), (target, pname, param)) \
+  X(void, glTexEnvfv, (GLenum target, GLenum pname, const GLfloat * params), (target, pname, params)) \
+  X(void, glMatrixMode, (GLenum mode), (mode)) \
+  X(void, glNormalPointer, (GLenum type, GLsizei stride, const GLvoid * pointer), (type, stride, pointer)) \
+  X(void, glPopMatrix, (), ()) \
+  X(void, glPushMatrix, (), ()) \
+  X(void, glShadeModel, (GLenum mode), (mode)) \
+  X(void, glTexCoordPointer, (GLint size, GLenum type, GLsizei stride, const GLvoid * pointer), (size, type, stride, pointer)) \
+  X(void, glTexEnvi, (GLenum target, GLenum pname, GLint param), (target, pname, param)) \
+  X(void, glTexEnviv, (GLenum target, GLenum pname, const GLint * params), (target, pname, params)) \
+  X(void, glTranslatef, (GLfloat x, GLfloat y, GLfloat z), (x, y, z)) \
+  X(void, glVertexPointer, (GLint size, GLenum type, GLsizei stride, const GLvoid * pointer), (size, type, stride, pointer)) \
+  X(void, glClipPlanef, (GLenum plane, const GLfloat * equation), (plane, equation)) \
+  X(void, glFrustumf, (GLfloat left, GLfloat right, GLfloat bottom, GLfloat top, GLfloat zNear, GLfloat zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glGetClipPlanef, (GLenum pname, GLfloat eqn[4]), (pname, eqn[4])) \
+  X(void, glOrthof, (GLfloat left, GLfloat right, GLfloat bottom, GLfloat top, GLfloat zNear, GLfloat zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glAlphaFuncx, (GLenum func, GLclampx ref), (func, ref)) \
+  X(void, glClearColorx, (GLclampx red, GLclampx green, GLclampx blue, GLclampx alpha), (red, green, blue, alpha)) \
+  X(void, glClearDepthx, (GLclampx depth), (depth)) \
+  X(void, glColor4x, (GLfixed red, GLfixed green, GLfixed blue, GLfixed alpha), (red, green, blue, alpha)) \
+  X(void, glDepthRangex, (GLclampx zNear, GLclampx zFar), (zNear, zFar)) \
+  X(void, glFogx, (GLenum pname, GLfixed param), (pname, param)) \
+  X(void, glFogxv, (GLenum pname, const GLfixed * params), (pname, params)) \
+  X(void, glFrustumx, (GLfixed left, GLfixed right, GLfixed bottom, GLfixed top, GLfixed zNear, GLfixed zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glClipPlanex, (GLenum pname, const GLfixed * eqn), (pname, eqn)) \
+  X(void, glGetFixedv, (GLenum pname, GLfixed * params), (pname, params)) \
+  X(void, glGetLightxv, (GLenum light, GLenum pname, GLfixed * params), (light, pname, params)) \
+  X(void, glGetMaterialxv, (GLenum face, GLenum pname, GLfixed * params), (face, pname, params)) \
+  X(void, glGetTexEnvxv, (GLenum env, GLenum pname, GLfixed * params), (env, pname, params)) \
+  X(void, glGetTexParameterxv, (GLenum target, GLenum pname, GLfixed * params), (target, pname, params)) \
+  X(void, glLightModelx, (GLenum pname, GLfixed param), (pname, param)) \
+  X(void, glLightModelxv, (GLenum pname, const GLfixed * params), (pname, params)) \
+  X(void, glLightx, (GLenum light, GLenum pname, GLfixed param), (light, pname, param)) \
+  X(void, glLightxv, (GLenum light, GLenum pname, const GLfixed * params), (light, pname, params)) \
+  X(void, glLineWidthx, (GLfixed width), (width)) \
+  X(void, glLoadMatrixx, (const GLfixed * m), (m)) \
+  X(void, glMaterialx, (GLenum face, GLenum pname, GLfixed param), (face, pname, param)) \
+  X(void, glMaterialxv, (GLenum face, GLenum pname, const GLfixed * params), (face, pname, params)) \
+  X(void, glMultMatrixx, (const GLfixed * m), (m)) \
+  X(void, glMultiTexCoord4x, (GLenum target, GLfixed s, GLfixed t, GLfixed r, GLfixed q), (target, s, t, r, q)) \
+  X(void, glNormal3x, (GLfixed nx, GLfixed ny, GLfixed nz), (nx, ny, nz)) \
+  X(void, glOrthox, (GLfixed left, GLfixed right, GLfixed bottom, GLfixed top, GLfixed zNear, GLfixed zFar), (left, right, bottom, top, zNear, zFar)) \
+  X(void, glPointParameterx, (GLenum pname, GLfixed param), (pname, param)) \
+  X(void, glPointParameterxv, (GLenum pname, const GLfixed * params), (pname, params)) \
+  X(void, glPointSizex, (GLfixed size), (size)) \
+  X(void, glPolygonOffsetx, (GLfixed factor, GLfixed units), (factor, units)) \
+  X(void, glRotatex, (GLfixed angle, GLfixed x, GLfixed y, GLfixed z), (angle, x, y, z)) \
+  X(void, glSampleCoveragex, (GLclampx value, GLboolean invert), (value, invert)) \
+  X(void, glScalex, (GLfixed x, GLfixed y, GLfixed z), (x, y, z)) \
+  X(void, glTexEnvx, (GLenum target, GLenum pname, GLfixed param), (target, pname, param)) \
+  X(void, glTexEnvxv, (GLenum target, GLenum pname, const GLfixed * params), (target, pname, params)) \
+  X(void, glTexParameterx, (GLenum target, GLenum pname, GLfixed param), (target, pname, param)) \
+  X(void, glTexParameterxv, (GLenum target, GLenum pname, const GLfixed * params), (target, pname, params)) \
+  X(void, glTranslatex, (GLfixed x, GLfixed y, GLfixed z), (x, y, z)) \
+  X(void, glGetClipPlanex, (GLenum pname, GLfixed eqn[4]), (pname, eqn[4])) \
+
+#define FOR_EACH_GLES2_ONLY_FUNCTION(X) \
+  X(void, glBlendColor, (GLclampf red, GLclampf green, GLclampf blue, GLclampf alpha), (red, green, blue, alpha)) \
+  X(void, glStencilFuncSeparate, (GLenum face, GLenum func, GLint ref, GLuint mask), (face, func, ref, mask)) \
+  X(void, glStencilMaskSeparate, (GLenum face, GLuint mask), (face, mask)) \
+  X(void, glStencilOpSeparate, (GLenum face, GLenum fail, GLenum zfail, GLenum zpass), (face, fail, zfail, zpass)) \
+  X(GLboolean, glIsProgram, (GLuint program), (program)) \
+  X(GLboolean, glIsShader, (GLuint shader), (shader)) \
+  X(void, glVertexAttrib1f, (GLuint indx, GLfloat x), (indx, x)) \
+  X(void, glVertexAttrib1fv, (GLuint indx, const GLfloat* values), (indx, values)) \
+  X(void, glVertexAttrib2f, (GLuint indx, GLfloat x, GLfloat y), (indx, x, y)) \
+  X(void, glVertexAttrib2fv, (GLuint indx, const GLfloat* values), (indx, values)) \
+  X(void, glVertexAttrib3f, (GLuint indx, GLfloat x, GLfloat y, GLfloat z), (indx, x, y, z)) \
+  X(void, glVertexAttrib3fv, (GLuint indx, const GLfloat* values), (indx, values)) \
+  X(void, glVertexAttrib4f, (GLuint indx, GLfloat x, GLfloat y, GLfloat z, GLfloat w), (indx, x, y, z, w)) \
+  X(void, glVertexAttrib4fv, (GLuint indx, const GLfloat* values), (indx, values)) \
+  X(void, glVertexAttribPointer, (GLuint indx, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const GLvoid* ptr), (indx, size, type, normalized, stride, ptr)) \
+  X(void, glDisableVertexAttribArray, (GLuint index), (index)) \
+  X(void, glEnableVertexAttribArray, (GLuint index), (index)) \
+  X(void, glGetVertexAttribfv, (GLuint index, GLenum pname, GLfloat* params), (index, pname, params)) \
+  X(void, glGetVertexAttribiv, (GLuint index, GLenum pname, GLint* params), (index, pname, params)) \
+  X(void, glGetVertexAttribPointerv, (GLuint index, GLenum pname, GLvoid** pointer), (index, pname, pointer)) \
+  X(void, glUniform1f, (GLint location, GLfloat x), (location, x)) \
+  X(void, glUniform1fv, (GLint location, GLsizei count, const GLfloat* v), (location, count, v)) \
+  X(void, glUniform1i, (GLint location, GLint x), (location, x)) \
+  X(void, glUniform1iv, (GLint location, GLsizei count, const GLint* v), (location, count, v)) \
+  X(void, glUniform2f, (GLint location, GLfloat x, GLfloat y), (location, x, y)) \
+  X(void, glUniform2fv, (GLint location, GLsizei count, const GLfloat* v), (location, count, v)) \
+  X(void, glUniform2i, (GLint location, GLint x, GLint y), (location, x, y)) \
+  X(void, glUniform2iv, (GLint location, GLsizei count, const GLint* v), (location, count, v)) \
+  X(void, glUniform3f, (GLint location, GLfloat x, GLfloat y, GLfloat z), (location, x, y, z)) \
+  X(void, glUniform3fv, (GLint location, GLsizei count, const GLfloat* v), (location, count, v)) \
+  X(void, glUniform3i, (GLint location, GLint x, GLint y, GLint z), (location, x, y, z)) \
+  X(void, glUniform3iv, (GLint location, GLsizei count, const GLint* v), (location, count, v)) \
+  X(void, glUniform4f, (GLint location, GLfloat x, GLfloat y, GLfloat z, GLfloat w), (location, x, y, z, w)) \
+  X(void, glUniform4fv, (GLint location, GLsizei count, const GLfloat* v), (location, count, v)) \
+  X(void, glUniform4i, (GLint location, GLint x, GLint y, GLint z, GLint w), (location, x, y, z, w)) \
+  X(void, glUniform4iv, (GLint location, GLsizei count, const GLint* v), (location, count, v)) \
+  X(void, glUniformMatrix2fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat* value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix3fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat* value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix4fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat* value), (location, count, transpose, value)) \
+  X(void, glAttachShader, (GLuint program, GLuint shader), (program, shader)) \
+  X(void, glBindAttribLocation, (GLuint program, GLuint index, const GLchar* name), (program, index, name)) \
+  X(void, glCompileShader, (GLuint shader), (shader)) \
+  X(GLuint, glCreateProgram, (), ()) \
+  X(GLuint, glCreateShader, (GLenum type), (type)) \
+  X(void, glDeleteProgram, (GLuint program), (program)) \
+  X(void, glDeleteShader, (GLuint shader), (shader)) \
+  X(void, glDetachShader, (GLuint program, GLuint shader), (program, shader)) \
+  X(void, glLinkProgram, (GLuint program), (program)) \
+  X(void, glUseProgram, (GLuint program), (program)) \
+  X(void, glValidateProgram, (GLuint program), (program)) \
+  X(void, glGetActiveAttrib, (GLuint program, GLuint index, GLsizei bufsize, GLsizei* length, GLint* size, GLenum* type, GLchar* name), (program, index, bufsize, length, size, type, name)) \
+  X(void, glGetActiveUniform, (GLuint program, GLuint index, GLsizei bufsize, GLsizei* length, GLint* size, GLenum* type, GLchar* name), (program, index, bufsize, length, size, type, name)) \
+  X(void, glGetAttachedShaders, (GLuint program, GLsizei maxcount, GLsizei* count, GLuint* shaders), (program, maxcount, count, shaders)) \
+  X(int, glGetAttribLocation, (GLuint program, const GLchar* name), (program, name)) \
+  X(void, glGetProgramiv, (GLuint program, GLenum pname, GLint* params), (program, pname, params)) \
+  X(void, glGetProgramInfoLog, (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog), (program, bufsize, length, infolog)) \
+  X(void, glGetShaderiv, (GLuint shader, GLenum pname, GLint* params), (shader, pname, params)) \
+  X(void, glGetShaderInfoLog, (GLuint shader, GLsizei bufsize, GLsizei* length, GLchar* infolog), (shader, bufsize, length, infolog)) \
+  X(void, glGetShaderSource, (GLuint shader, GLsizei bufsize, GLsizei* length, GLchar* source), (shader, bufsize, length, source)) \
+  X(void, glGetUniformfv, (GLuint program, GLint location, GLfloat* params), (program, location, params)) \
+  X(void, glGetUniformiv, (GLuint program, GLint location, GLint* params), (program, location, params)) \
+  X(int, glGetUniformLocation, (GLuint program, const GLchar* name), (program, name)) \
+  X(void, glShaderSource, (GLuint shader, GLsizei count, const GLchar* const* string, const GLint* length), (shader, count, string, length)) \
+  X(void, glBindFramebuffer, (GLenum target, GLuint framebuffer), (target, framebuffer)) \
+  X(void, glGenFramebuffers, (GLsizei n, GLuint* framebuffers), (n, framebuffers)) \
+  X(void, glFramebufferTexture2D, (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level), (target, attachment, textarget, texture, level)) \
+  X(GLenum, glCheckFramebufferStatus, (GLenum target), (target)) \
+  X(GLboolean, glIsFramebuffer, (GLuint framebuffer), (framebuffer)) \
+  X(void, glDeleteFramebuffers, (GLsizei n, const GLuint* framebuffers), (n, framebuffers)) \
+  X(GLboolean, glIsRenderbuffer, (GLuint renderbuffer), (renderbuffer)) \
+  X(void, glBindRenderbuffer, (GLenum target, GLuint renderbuffer), (target, renderbuffer)) \
+  X(void, glDeleteRenderbuffers, (GLsizei n, const GLuint * renderbuffers), (n, renderbuffers)) \
+  X(void, glGenRenderbuffers, (GLsizei n, GLuint * renderbuffers), (n, renderbuffers)) \
+  X(void, glRenderbufferStorage, (GLenum target, GLenum internalformat, GLsizei width, GLsizei height), (target, internalformat, width, height)) \
+  X(void, glGetRenderbufferParameteriv, (GLenum target, GLenum pname, GLint * params), (target, pname, params)) \
+  X(void, glFramebufferRenderbuffer, (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer), (target, attachment, renderbuffertarget, renderbuffer)) \
+  X(void, glGetFramebufferAttachmentParameteriv, (GLenum target, GLenum attachment, GLenum pname, GLint * params), (target, attachment, pname, params)) \
+  X(void, glGenerateMipmap, (GLenum target), (target)) \
+
+#define FOR_EACH_GLES3_ONLY_FUNCTION(X) \
+  X(GLconstubyteptr, glGetStringi, (GLenum name, GLint index), (name, index)) \
+  X(void, glGenVertexArrays, (GLsizei n, GLuint* arrays), (n, arrays)) \
+  X(void, glBindVertexArray, (GLuint array), (array)) \
+  X(void, glDeleteVertexArrays, (GLsizei n, const GLuint * arrays), (n, arrays)) \
+  X(GLboolean, glIsVertexArray, (GLuint array), (array)) \
+  X(void *, glMapBufferRange, (GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access), (target, offset, length, access)) \
+  X(GLboolean, glUnmapBuffer, (GLenum target), (target)) \
+  X(void, glFlushMappedBufferRange, (GLenum target, GLintptr offset, GLsizeiptr length), (target, offset, length)) \
+  X(void, glBindBufferRange, (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size), (target, index, buffer, offset, size)) \
+  X(void, glBindBufferBase, (GLenum target, GLuint index, GLuint buffer), (target, index, buffer)) \
+  X(void, glCopyBufferSubData, (GLenum readtarget, GLenum writetarget, GLintptr readoffset, GLintptr writeoffset, GLsizeiptr size), (readtarget, writetarget, readoffset, writeoffset, size)) \
+  X(void, glClearBufferiv, (GLenum buffer, GLint drawBuffer, const GLint * value), (buffer, drawBuffer, value)) \
+  X(void, glClearBufferuiv, (GLenum buffer, GLint drawBuffer, const GLuint * value), (buffer, drawBuffer, value)) \
+  X(void, glClearBufferfv, (GLenum buffer, GLint drawBuffer, const GLfloat * value), (buffer, drawBuffer, value)) \
+  X(void, glClearBufferfi, (GLenum buffer, GLint drawBuffer, GLfloat depth, GLint stencil), (buffer, drawBuffer, depth, stencil)) \
+  X(void, glGetBufferParameteri64v, (GLenum target, GLenum value, GLint64 * data), (target, value, data)) \
+  X(void, glGetBufferPointerv, (GLenum target, GLenum pname, GLvoid ** params), (target, pname, params)) \
+  X(void, glUniformBlockBinding, (GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding), (program, uniformBlockIndex, uniformBlockBinding)) \
+  X(GLuint, glGetUniformBlockIndex, (GLuint program, const GLchar * uniformBlockName), (program, uniformBlockName)) \
+  X(void, glGetUniformIndices, (GLuint program, GLsizei uniformCount, const GLchar ** uniformNames, GLuint * uniformIndices), (program, uniformCount, uniformNames, uniformIndices)) \
+  X(void, glGetActiveUniformBlockiv, (GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint * params), (program, uniformBlockIndex, pname, params)) \
+  X(void, glGetActiveUniformBlockName, (GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei * length, GLchar * uniformBlockName), (program, uniformBlockIndex, bufSize, length, uniformBlockName)) \
+  X(void, glUniform1ui, (GLint location, GLuint v0), (location, v0)) \
+  X(void, glUniform2ui, (GLint location, GLuint v0, GLuint v1), (location, v0, v1)) \
+  X(void, glUniform3ui, (GLint location, GLuint v0, GLuint v1, GLuint v2), (location, v0, v1, v2)) \
+  X(void, glUniform4ui, (GLint location, GLint v0, GLuint v1, GLuint v2, GLuint v3), (location, v0, v1, v2, v3)) \
+  X(void, glUniform1uiv, (GLint location, GLsizei count, const GLuint * value), (location, count, value)) \
+  X(void, glUniform2uiv, (GLint location, GLsizei count, const GLuint * value), (location, count, value)) \
+  X(void, glUniform3uiv, (GLint location, GLsizei count, const GLuint * value), (location, count, value)) \
+  X(void, glUniform4uiv, (GLint location, GLsizei count, const GLuint * value), (location, count, value)) \
+  X(void, glUniformMatrix2x3fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix3x2fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix2x4fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix4x2fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix3x4fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glUniformMatrix4x3fv, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value), (location, count, transpose, value)) \
+  X(void, glGetUniformuiv, (GLuint program, GLint location, GLuint * params), (program, location, params)) \
+  X(void, glGetActiveUniformsiv, (GLuint program, GLsizei uniformCount, const GLuint * uniformIndices, GLenum pname, GLint * params), (program, uniformCount, uniformIndices, pname, params)) \
+  X(void, glVertexAttribI4i, (GLuint index, GLint v0, GLint v1, GLint v2, GLint v3), (index, v0, v1, v2, v3)) \
+  X(void, glVertexAttribI4ui, (GLuint index, GLuint v0, GLuint v1, GLuint v2, GLuint v3), (index, v0, v1, v2, v3)) \
+  X(void, glVertexAttribI4iv, (GLuint index, const GLint * v), (index, v)) \
+  X(void, glVertexAttribI4uiv, (GLuint index, const GLuint * v), (index, v)) \
+  X(void, glVertexAttribIPointer, (GLuint index, GLint size, GLenum type, GLsizei stride, const GLvoid * pointer), (index, size, type, stride, pointer)) \
+  X(void, glGetVertexAttribIiv, (GLuint index, GLenum pname, GLint * params), (index, pname, params)) \
+  X(void, glGetVertexAttribIuiv, (GLuint index, GLenum pname, GLuint * params), (index, pname, params)) \
+  X(void, glVertexAttribDivisor, (GLuint index, GLuint divisor), (index, divisor)) \
+  X(void, glDrawArraysInstanced, (GLenum mode, GLint first, GLsizei count, GLsizei primcount), (mode, first, count, primcount)) \
+  X(void, glDrawElementsInstanced, (GLenum mode, GLsizei count, GLenum type, const void * indices, GLsizei primcount), (mode, count, type, indices, primcount)) \
+  X(void, glDrawRangeElements, (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const GLvoid * indices), (mode, start, end, count, type, indices)) \
+  X(GLsync, glFenceSync, (GLenum condition, GLbitfield flags), (condition, flags)) \
+  X(GLenum, glClientWaitSync, (GLsync wait_on, GLbitfield flags, GLuint64 timeout), (wait_on, flags, timeout)) \
+  X(void, glWaitSync, (GLsync wait_on, GLbitfield flags, GLuint64 timeout), (wait_on, flags, timeout)) \
+  X(void, glDeleteSync, (GLsync to_delete), (to_delete)) \
+  X(GLboolean, glIsSync, (GLsync sync), (sync)) \
+  X(void, glGetSynciv, (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * values), (sync, pname, bufSize, length, values)) \
+  X(void, glDrawBuffers, (GLsizei n, const GLenum * bufs), (n, bufs)) \
+  X(void, glReadBuffer, (GLenum src), (src)) \
+  X(void, glBlitFramebuffer, (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter), (srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter)) \
+  X(void, glInvalidateFramebuffer, (GLenum target, GLsizei numAttachments, const GLenum * attachments), (target, numAttachments, attachments)) \
+  X(void, glInvalidateSubFramebuffer, (GLenum target, GLsizei numAttachments, const GLenum * attachments, GLint x, GLint y, GLsizei width, GLsizei height), (target, numAttachments, attachments, x, y, width, height)) \
+  X(void, glFramebufferTextureLayer, (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer), (target, attachment, texture, level, layer)) \
+  X(void, glGetInternalformativ, (GLenum target, GLenum internalformat, GLenum pname, GLsizei bufSize, GLint * params), (target, internalformat, pname, bufSize, params)) \
+  X(void, glTexStorage2D, (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height), (target, levels, internalformat, width, height)) \
+  X(void, glBeginTransformFeedback, (GLenum primitiveMode), (primitiveMode)) \
+  X(void, glEndTransformFeedback, (), ()) \
+  X(void, glGenTransformFeedbacks, (GLsizei n, GLuint * ids), (n, ids)) \
+  X(void, glDeleteTransformFeedbacks, (GLsizei n, const GLuint * ids), (n, ids)) \
+  X(void, glBindTransformFeedback, (GLenum target, GLuint id), (target, id)) \
+  X(void, glPauseTransformFeedback, (), ()) \
+  X(void, glResumeTransformFeedback, (), ()) \
+  X(GLboolean, glIsTransformFeedback, (GLuint id), (id)) \
+  X(void, glTransformFeedbackVaryings, (GLuint program, GLsizei count, const char ** varyings, GLenum bufferMode), (program, count, varyings, bufferMode)) \
+  X(void, glGetTransformFeedbackVarying, (GLuint program, GLuint index, GLsizei bufSize, GLsizei * length, GLsizei * size, GLenum * type, char * name), (program, index, bufSize, length, size, type, name)) \
+  X(void, glGenSamplers, (GLsizei n, GLuint * samplers), (n, samplers)) \
+  X(void, glDeleteSamplers, (GLsizei n, const GLuint * samplers), (n, samplers)) \
+  X(void, glBindSampler, (GLuint unit, GLuint sampler), (unit, sampler)) \
+  X(void, glSamplerParameterf, (GLuint sampler, GLenum pname, GLfloat param), (sampler, pname, param)) \
+  X(void, glSamplerParameteri, (GLuint sampler, GLenum pname, GLint param), (sampler, pname, param)) \
+  X(void, glSamplerParameterfv, (GLuint sampler, GLenum pname, const GLfloat * params), (sampler, pname, params)) \
+  X(void, glSamplerParameteriv, (GLuint sampler, GLenum pname, const GLint * params), (sampler, pname, params)) \
+  X(void, glGetSamplerParameterfv, (GLuint sampler, GLenum pname, GLfloat * params), (sampler, pname, params)) \
+  X(void, glGetSamplerParameteriv, (GLuint sampler, GLenum pname, GLint * params), (sampler, pname, params)) \
+  X(GLboolean, glIsSampler, (GLuint sampler), (sampler)) \
+  X(void, glGenQueries, (GLsizei n, GLuint * queries), (n, queries)) \
+  X(void, glDeleteQueries, (GLsizei n, const GLuint * queries), (n, queries)) \
+  X(void, glBeginQuery, (GLenum target, GLuint query), (target, query)) \
+  X(void, glEndQuery, (GLenum target), (target)) \
+  X(void, glGetQueryiv, (GLenum target, GLenum pname, GLint * params), (target, pname, params)) \
+  X(void, glGetQueryObjectuiv, (GLuint query, GLenum pname, GLuint * params), (query, pname, params)) \
+  X(GLboolean, glIsQuery, (GLuint query), (query)) \
+  X(void, glProgramParameteri, (GLuint program, GLenum pname, GLint value), (program, pname, value)) \
+  X(void, glProgramBinary, (GLuint program, GLenum binaryFormat, const void * binary, GLsizei length), (program, binaryFormat, binary, length)) \
+  X(void, glGetProgramBinary, (GLuint program, GLsizei bufsize, GLsizei * length, GLenum * binaryFormat, void * binary), (program, bufsize, length, binaryFormat, binary)) \
+  X(GLint, glGetFragDataLocation, (GLuint program, const char * name), (program, name)) \
+  X(void, glGetInteger64v, (GLenum pname, GLint64 * data), (pname, data)) \
+  X(void, glGetIntegeri_v, (GLenum target, GLuint index, GLint * data), (target, index, data)) \
+  X(void, glGetInteger64i_v, (GLenum target, GLuint index, GLint64 * data), (target, index, data)) \
+  X(void, glTexImage3D, (GLenum target, GLint level, GLint internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const GLvoid * data), (target, level, internalFormat, width, height, depth, border, format, type, data)) \
+  X(void, glTexStorage3D, (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth), (target, levels, internalformat, width, height, depth)) \
+  X(void, glTexSubImage3D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const GLvoid * data), (target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, data)) \
+  X(void, glCompressedTexImage3D, (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const GLvoid * data), (target, level, internalformat, width, height, depth, border, imageSize, data)) \
+  X(void, glCompressedTexSubImage3D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const GLvoid * data), (target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data)) \
+  X(void, glCopyTexSubImage3D, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height), (target, level, xoffset, yoffset, zoffset, x, y, width, height)) \
+  X(void, glDebugMessageControl, (GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint * ids, GLboolean enabled), (source, type, severity, count, ids, enabled)) \
+  X(void, glDebugMessageInsert, (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar * buf), (source, type, id, severity, length, buf)) \
+  X(void, glDebugMessageCallback, (GLDEBUGPROC callback, const void * userParam), (callback, userParam)) \
+  X(GLuint, glGetDebugMessageLog, (GLuint count, GLsizei bufSize, GLenum * sources, GLenum * types, GLuint * ids, GLenum * severities, GLsizei * lengths, GLchar * messageLog), (count, bufSize, sources, types, ids, severities, lengths, messageLog)) \
+  X(void, glPushDebugGroup, (GLenum source, GLuint id, GLsizei length, const GLchar* message), (source, id, length, message)) \
+  X(void, glPopDebugGroup, (), ()) \
+
+#define FOR_EACH_GLES_EXTENSION_FUNCTION(X) \
+  X(void, glImportMemoryFdEXT, (GLuint memory, GLuint64 size, GLenum handleType, GLint fd), (memory, size, handleType, fd)) \
+  X(void, glImportMemoryWin32HandleEXT, (GLuint memory, GLuint64 size, GLenum handleType, void* handle), (memory, size, handleType, handle)) \
+  X(void, glDeleteMemoryObjectsEXT, (GLsizei n, const GLuint * memoryObjects), (n, memoryObjects)) \
+  X(GLboolean, glIsMemoryObjectEXT, (GLuint memoryObject), (memoryObject)) \
+  X(void, glCreateMemoryObjectsEXT, (GLsizei n, GLuint * memoryObjects), (n, memoryObjects)) \
+  X(void, glMemoryObjectParameterivEXT, (GLuint memoryObject, GLenum pname, const GLint * params), (memoryObject, pname, params)) \
+  X(void, glGetMemoryObjectParameterivEXT, (GLuint memoryObject, GLenum pname, GLint * params), (memoryObject, pname, params)) \
+  X(void, glTexStorageMem2DEXT, (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLuint memory, GLuint64 offset), (target, levels, internalFormat, width, height, memory, offset)) \
+  X(void, glTexStorageMem2DMultisampleEXT, (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset), (target, samples, internalFormat, width, height, fixedSampleLocations, memory, offset)) \
+  X(void, glTexStorageMem3DEXT, (GLenum target, GLsizei levels, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLuint memory, GLuint64 offset), (target, levels, internalFormat, width, height, depth, memory, offset)) \
+  X(void, glTexStorageMem3DMultisampleEXT, (GLenum target, GLsizei samples, GLenum internalFormat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedSampleLocations, GLuint memory, GLuint64 offset), (target, samples, internalFormat, width, height, depth, fixedSampleLocations, memory, offset)) \
+  X(void, glBufferStorageMemEXT, (GLenum target, GLsizeiptr size, GLuint memory, GLuint64 offset), (target, size, memory, offset)) \
+  X(void, glTexParameteriHOST, (GLenum target, GLenum pname, GLint param), (target, pname, param)) \
+  X(void, glImportSemaphoreFdEXT, (GLuint semaphore, GLenum handleType, GLint fd), (semaphore, handleType, fd)) \
+  X(void, glImportSemaphoreWin32HandleEXT, (GLuint semaphore, GLenum handleType, void* handle), (semaphore, handleType, handle)) \
+  X(void, glGenSemaphoresEXT, (GLsizei n, GLuint * semaphores), (n, semaphores)) \
+  X(void, glDeleteSemaphoresEXT, (GLsizei n, const GLuint * semaphores), (n, semaphores)) \
+  X(GLboolean, glIsSemaphoreEXT, (GLuint semaphore), (semaphore)) \
+  X(void, glSemaphoreParameterui64vEXT, (GLuint semaphore, GLenum pname, const GLuint64 * params), (semaphore, pname, params)) \
+  X(void, glGetSemaphoreParameterui64vEXT, (GLuint semaphore, GLenum pname, GLuint64 * params), (semaphore, pname, params)) \
+  X(void, glWaitSemaphoreEXT, (GLuint semaphore, GLuint numBufferBarriers, const GLuint * buffers, GLuint numTextureBarriers, const GLuint * textures, const GLenum * srcLayouts), (semaphore, numBufferBarriers, buffers, numTextureBarriers, textures, srcLayouts)) \
+  X(void, glSignalSemaphoreEXT, (GLuint semaphore, GLuint numBufferBarriers, const GLuint * buffers, GLuint numTextureBarriers, const GLuint * textures, const GLenum * dstLayouts), (semaphore, numBufferBarriers, buffers, numTextureBarriers, textures, dstLayouts)) \
+  X(void, glGetUnsignedBytevEXT, (GLenum pname, GLubyte* data), (pname, data)) \
+  X(void, glGetUnsignedBytei_vEXT, (GLenum target, GLuint index, GLubyte* data), (target, index, data)) \
+
+#define FOR_EACH_GLES_FUNCTION(X) \
+    FOR_EACH_GLES_COMMON_FUNCTION(X) \
+    FOR_EACH_GLES_EXTENSION_FUNCTION(X) \
+    FOR_EACH_GLES1_ONLY_FUNCTION(X) \
+    FOR_EACH_GLES2_ONLY_FUNCTION(X) \
+    FOR_EACH_GLES3_ONLY_FUNCTION(X)
+
+// clang-format on
diff --git a/host/libs/graphics_detector/graphics_configuration.cpp b/host/libs/graphics_detector/graphics_configuration.cpp
new file mode 100644
index 0000000..c5b93b7
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_configuration.cpp
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/graphics_configuration.h"
+
+#include <ostream>
+
+#include <android-base/strings.h>
+
+#include "host/libs/config/cuttlefish_config.h"
+
+namespace cuttlefish {
+namespace {
+
+struct AngleFeatures {
+  // Prefer linear filtering for YUV AHBs to pass
+  // android.media.decoder.cts.DecodeAccuracyTest.
+  bool prefer_linear_filtering_for_yuv = true;
+
+  // Map unspecified color spaces to PASS_THROUGH to pass
+  // android.media.codec.cts.DecodeEditEncodeTest and
+  // android.media.codec.cts.EncodeDecodeTest.
+  bool map_unspecified_color_space_to_pass_through = true;
+
+  // b/264575911: Nvidia seems to have issues with YUV samplers with
+  // 'lowp' and 'mediump' precision qualifiers.
+  bool ignore_precision_qualifiers = false;
+};
+
+std::ostream& operator<<(std::ostream& stream, const AngleFeatures& features) {
+  std::ios_base::fmtflags flags_backup(stream.flags());
+  stream << std::boolalpha;
+  stream << "ANGLE features: "
+         << "\n";
+  stream << " - prefer_linear_filtering_for_yuv: "
+         << features.prefer_linear_filtering_for_yuv << "\n";
+  stream << " - map_unspecified_color_space_to_pass_through: "
+         << features.map_unspecified_color_space_to_pass_through << "\n";
+  stream << " - ignore_precision_qualifiers: "
+         << features.ignore_precision_qualifiers << "\n";
+  stream.flags(flags_backup);
+  return stream;
+}
+
+AngleFeatures GetNeededAngleFeaturesBasedOnQuirks(
+    const RenderingMode mode, const GraphicsAvailability& availability) {
+  AngleFeatures features = {};
+  switch (mode) {
+    case RenderingMode::kGfxstream:
+      break;
+    case RenderingMode::kGfxstreamGuestAngle: {
+      if (availability
+              .vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers) {
+        features.ignore_precision_qualifiers = true;
+      }
+      break;
+    }
+    case RenderingMode::kGuestSwiftShader:
+    case RenderingMode::kVirglRenderer:
+    case RenderingMode::kNone:
+      break;
+  }
+  return features;
+}
+
+}  // namespace
+
+Result<RenderingMode> GetRenderingMode(const std::string& mode) {
+  if (mode == std::string(kGpuModeDrmVirgl)) {
+    return RenderingMode::kVirglRenderer;
+  }
+  if (mode == std::string(kGpuModeGfxstream)) {
+    return RenderingMode::kGfxstream;
+  }
+  if (mode == std::string(kGpuModeGfxstreamGuestAngle)) {
+    return RenderingMode::kGfxstreamGuestAngle;
+  }
+  if (mode == std::string(kGpuModeGuestSwiftshader)) {
+    return RenderingMode::kGuestSwiftShader;
+  }
+  if (mode == std::string(kGpuModeNone)) {
+    return RenderingMode::kNone;
+  }
+  return CF_ERR("Unsupported rendering mode: " << mode);
+}
+
+Result<AngleFeatureOverrides> GetNeededAngleFeatures(
+    const RenderingMode mode, const GraphicsAvailability& availability) {
+  const AngleFeatures features =
+      GetNeededAngleFeaturesBasedOnQuirks(mode, availability);
+  LOG(DEBUG) << features;
+
+  std::vector<std::string> enable_feature_strings;
+  std::vector<std::string> disable_feature_strings;
+  if (features.prefer_linear_filtering_for_yuv) {
+    enable_feature_strings.push_back("preferLinearFilterForYUV");
+  }
+  if (features.map_unspecified_color_space_to_pass_through) {
+    enable_feature_strings.push_back("mapUnspecifiedColorSpaceToPassThrough");
+  }
+  if (features.ignore_precision_qualifiers) {
+    disable_feature_strings.push_back("enablePrecisionQualifiers");
+  }
+
+  return AngleFeatureOverrides{
+      .angle_feature_overrides_enabled =
+          android::base::Join(enable_feature_strings, ':'),
+      .angle_feature_overrides_disabled =
+          android::base::Join(disable_feature_strings, ':'),
+  };
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/graphics_configuration.h b/host/libs/graphics_detector/graphics_configuration.h
new file mode 100644
index 0000000..0d54130
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_configuration.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/graphics_detector/graphics_detector.h"
+
+namespace cuttlefish {
+
+enum class RenderingMode {
+  kNone,
+  kGuestSwiftShader,
+  kGfxstream,
+  kGfxstreamGuestAngle,
+  kVirglRenderer,
+};
+Result<RenderingMode> GetRenderingMode(const std::string& mode);
+
+struct AngleFeatureOverrides {
+  std::string angle_feature_overrides_enabled;
+  std::string angle_feature_overrides_disabled;
+};
+Result<AngleFeatureOverrides> GetNeededAngleFeatures(
+    RenderingMode mode, const GraphicsAvailability& availability);
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/graphics_detector.cpp b/host/libs/graphics_detector/graphics_detector.cpp
index 5776c41..a24df42 100644
--- a/host/libs/graphics_detector/graphics_detector.cpp
+++ b/host/libs/graphics_detector/graphics_detector.cpp
@@ -19,532 +19,16 @@
 #include <sstream>
 #include <vector>
 
-#include <EGL/egl.h>
-#include <EGL/eglext.h>
-#include <GLES2/gl2.h>
 #include <android-base/logging.h>
 #include <android-base/strings.h>
-#include <dlfcn.h>
-#include <sys/wait.h>
-#include <vulkan/vulkan.h>
+
+#include "host/libs/graphics_detector/graphics_detector_gl.h"
+#include "host/libs/graphics_detector/graphics_detector_vk.h"
+#include "host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.h"
 
 namespace cuttlefish {
 namespace {
 
-constexpr const char kEglLib[] = "libEGL.so.1";
-constexpr const char kGlLib[] = "libOpenGL.so.0";
-constexpr const char kGles1Lib[] = "libGLESv1_CM.so.1";
-constexpr const char kGles2Lib[] = "libGLESv2.so.2";
-constexpr const char kVulkanLib[] = "libvulkan.so.1";
-
-constexpr const char kSurfacelessContextExt[] = "EGL_KHR_surfaceless_context";
-
-class Closer {
-public:
-  Closer(std::function<void()> on_close) : on_close_(on_close) {}
-  ~Closer() { on_close_(); }
-
-private:
-  std::function<void()> on_close_;
-};
-
-struct LibraryCloser {
- public:
-  void operator()(void* library) { dlclose(library); }
-};
-
-using ManagedLibrary = std::unique_ptr<void, LibraryCloser>;
-
-void PopulateGlAvailability(GraphicsAvailability* availability) {
-  ManagedLibrary gl_lib(dlopen(kGlLib, RTLD_NOW | RTLD_LOCAL));
-  if (!gl_lib) {
-    LOG(DEBUG) << "Failed to dlopen " << kGlLib << ".";
-    return;
-  }
-  LOG(DEBUG) << "Loaded " << kGlLib << ".";
-  availability->has_gl = true;
-}
-
-void PopulateGles1Availability(GraphicsAvailability* availability) {
-  ManagedLibrary gles1_lib(dlopen(kGles1Lib, RTLD_NOW | RTLD_LOCAL));
-  if (!gles1_lib) {
-    LOG(DEBUG) << "Failed to dlopen " << kGles1Lib << ".";
-    return;
-  }
-  LOG(DEBUG) << "Loaded " << kGles1Lib << ".";
-  availability->has_gles1 = true;
-}
-
-void PopulateGles2Availability(GraphicsAvailability* availability) {
-  ManagedLibrary gles2_lib(dlopen(kGles2Lib, RTLD_NOW | RTLD_LOCAL));
-  if (!gles2_lib) {
-    LOG(DEBUG) << "Failed to dlopen " << kGles2Lib << ".";
-    return;
-  }
-  LOG(DEBUG) << "Loaded " << kGles2Lib << ".";
-  availability->has_gles2 = true;
-}
-
-void PopulateEglAvailability(GraphicsAvailability* availability) {
-  ManagedLibrary egllib(dlopen(kEglLib, RTLD_NOW | RTLD_LOCAL));
-  if (!egllib) {
-    LOG(DEBUG) << "Failed to dlopen " << kEglLib << ".";
-    return;
-  }
-  LOG(DEBUG) << "Loaded " << kEglLib << ".";
-  availability->has_egl = true;
-
-  PFNEGLGETPROCADDRESSPROC eglGetProcAddress =
-      reinterpret_cast<PFNEGLGETPROCADDRESSPROC>(
-          dlsym(egllib.get(), "eglGetProcAddress"));
-  if (eglGetProcAddress == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglGetProcAddress.";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglGetProcAddress.";
-
-  // Some implementations have it so that eglGetProcAddress is only for
-  // loading EXT functions.
-  auto EglLoadFunction = [&](const char* name) {
-    void* func = dlsym(egllib.get(), name);
-    if (func == NULL) {
-      func = reinterpret_cast<void*>(eglGetProcAddress(name));
-    }
-    return func;
-  };
-
-  PFNEGLGETERRORPROC eglGetError =
-    reinterpret_cast<PFNEGLGETERRORPROC>(EglLoadFunction("eglGetError"));
-  if (eglGetError == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglGetError.";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglGetError.";
-
-  PFNEGLGETDISPLAYPROC eglGetDisplay =
-    reinterpret_cast<PFNEGLGETDISPLAYPROC>(EglLoadFunction("eglGetDisplay"));
-  if (eglGetDisplay == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglGetDisplay.";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglGetDisplay.";
-
-  PFNEGLQUERYSTRINGPROC eglQueryString =
-    reinterpret_cast<PFNEGLQUERYSTRINGPROC>(EglLoadFunction("eglQueryString"));
-  if (eglQueryString == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglQueryString";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglQueryString.";
-
-  EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
-  if (display != EGL_NO_DISPLAY) {
-    LOG(DEBUG) << "Found default display.";
-  } else {
-    LOG(DEBUG) << "Failed to get default display. " << eglGetError()
-                 << ". Attempting to get surfaceless display via "
-                 << "eglGetPlatformDisplayEXT(EGL_PLATFORM_SURFACELESS_MESA)";
-
-    PFNEGLGETPLATFORMDISPLAYEXTPROC eglGetPlatformDisplayEXT =
-      reinterpret_cast<PFNEGLGETPLATFORMDISPLAYEXTPROC>(
-        EglLoadFunction("eglGetPlatformDisplayEXT"));
-    if (eglGetPlatformDisplayEXT == nullptr) {
-      LOG(DEBUG) << "Failed to find function eglGetPlatformDisplayEXT";
-    } else {
-      display = eglGetPlatformDisplayEXT(EGL_PLATFORM_SURFACELESS_MESA,
-                                         EGL_DEFAULT_DISPLAY, NULL);
-    }
-  }
-
-  if (display == EGL_NO_DISPLAY) {
-    LOG(DEBUG) << "Failed to find display.";
-    return;
-  }
-
-  PFNEGLINITIALIZEPROC eglInitialize =
-      reinterpret_cast<PFNEGLINITIALIZEPROC>(EglLoadFunction("eglInitialize"));
-  if (eglInitialize == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglQueryString";
-    return;
-  }
-
-  EGLint client_version_major = 0;
-  EGLint client_version_minor = 0;
-  if (eglInitialize(display,
-                    &client_version_major,
-                    &client_version_minor) != EGL_TRUE) {
-    LOG(DEBUG) << "Failed to initialize display.";
-    return;
-  }
-  LOG(DEBUG) << "Initialized display.";
-
-  const std::string version_string = eglQueryString(display, EGL_VERSION);
-  if (version_string.empty()) {
-    LOG(DEBUG) << "Failed to query client version.";
-    return;
-  }
-  LOG(DEBUG) << "Found version: " << version_string;
-  availability->egl_version = version_string;
-
-  const std::string vendor_string = eglQueryString(display, EGL_VENDOR);
-  if (vendor_string.empty()) {
-    LOG(DEBUG) << "Failed to query vendor.";
-    return;
-  }
-  LOG(DEBUG) << "Found vendor: " << vendor_string;
-  availability->egl_vendor = vendor_string;
-
-  const std::string extensions_string = eglQueryString(display, EGL_EXTENSIONS);
-  if (extensions_string.empty()) {
-    LOG(DEBUG) << "Failed to query extensions.";
-    return;
-  }
-  LOG(DEBUG) << "Found extensions: " << extensions_string;
-  availability->egl_extensions = extensions_string;
-
-  if (extensions_string.find(kSurfacelessContextExt) == std::string::npos) {
-    LOG(DEBUG) << "Failed to find extension EGL_KHR_surfaceless_context.";
-    return;
-  }
-
-  const std::string display_apis_string = eglQueryString(display,
-                                                         EGL_CLIENT_APIS);
-  if (display_apis_string.empty()) {
-    LOG(DEBUG) << "Failed to query display apis.";
-    return;
-  }
-  LOG(DEBUG) << "Found display apis: " << display_apis_string;
-
-  PFNEGLBINDAPIPROC eglBindAPI =
-    reinterpret_cast<PFNEGLBINDAPIPROC>(EglLoadFunction("eglBindAPI"));
-  if (eglBindAPI == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglBindAPI";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglBindAPI.";
-
-  if (eglBindAPI(EGL_OPENGL_ES_API) == EGL_FALSE) {
-    LOG(DEBUG) << "Failed to bind GLES API.";
-    return;
-  }
-  LOG(DEBUG) << "Bound GLES API.";
-
-  PFNEGLCHOOSECONFIGPROC eglChooseConfig =
-    reinterpret_cast<PFNEGLCHOOSECONFIGPROC>(
-      EglLoadFunction("eglChooseConfig"));
-  if (eglChooseConfig == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglChooseConfig";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglChooseConfig.";
-
-  const EGLint framebuffer_config_attributes[] = {
-    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
-    EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL_RED_SIZE, 1,
-    EGL_GREEN_SIZE, 1,
-    EGL_BLUE_SIZE, 1,
-    EGL_ALPHA_SIZE, 0,
-    EGL_NONE,
-  };
-
-  EGLConfig framebuffer_config;
-  EGLint num_framebuffer_configs = 0;
-  if (eglChooseConfig(display,
-                      framebuffer_config_attributes,
-                      &framebuffer_config,
-                      1,
-                      &num_framebuffer_configs) != EGL_TRUE) {
-    LOG(DEBUG) << "Failed to find matching framebuffer config.";
-    return;
-  }
-  LOG(DEBUG) << "Found matching framebuffer config.";
-
-  PFNEGLCREATECONTEXTPROC eglCreateContext =
-    reinterpret_cast<PFNEGLCREATECONTEXTPROC>(
-      EglLoadFunction("eglCreateContext"));
-  if (eglCreateContext == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglCreateContext";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglCreateContext.";
-
-  PFNEGLDESTROYCONTEXTPROC eglDestroyContext =
-    reinterpret_cast<PFNEGLDESTROYCONTEXTPROC>(
-      EglLoadFunction("eglDestroyContext"));
-  if (eglDestroyContext == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglDestroyContext";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglDestroyContext.";
-
-  const EGLint context_attributes[] = {
-    EGL_CONTEXT_CLIENT_VERSION, 2,
-    EGL_NONE
-  };
-
-  EGLContext context = eglCreateContext(display,
-                                        framebuffer_config,
-                                        EGL_NO_CONTEXT,
-                                        context_attributes);
-  if (context == EGL_NO_CONTEXT) {
-    LOG(DEBUG) << "Failed to create EGL context.";
-    return;
-  }
-  LOG(DEBUG) << "Created EGL context.";
-  Closer context_closer([&]() { eglDestroyContext(display, context); });
-
-  PFNEGLMAKECURRENTPROC eglMakeCurrent =
-    reinterpret_cast<PFNEGLMAKECURRENTPROC>(EglLoadFunction("eglMakeCurrent"));
-  if (eglMakeCurrent == nullptr) {
-    LOG(DEBUG) << "Failed to find function eglMakeCurrent";
-    return;
-  }
-  LOG(DEBUG) << "Loaded eglMakeCurrent.";
-
-  if (eglMakeCurrent(display,
-                     EGL_NO_SURFACE,
-                     EGL_NO_SURFACE,
-                     context) != EGL_TRUE) {
-    LOG(DEBUG) << "Failed to make EGL context current.";
-    return;
-  }
-  LOG(DEBUG) << "Make EGL context current.";
-  availability->can_init_gles2_on_egl_surfaceless = true;
-
-  PFNGLGETSTRINGPROC glGetString =
-      reinterpret_cast<PFNGLGETSTRINGPROC>(eglGetProcAddress("glGetString"));
-
-  const GLubyte* gles2_vendor = glGetString(GL_VENDOR);
-  if (gles2_vendor == nullptr) {
-    LOG(DEBUG) << "Failed to query GLES2 vendor.";
-    return;
-  }
-  const std::string gles2_vendor_string((const char*)gles2_vendor);
-  LOG(DEBUG) << "Found GLES2 vendor: " << gles2_vendor_string;
-  availability->gles2_vendor = gles2_vendor_string;
-
-  const GLubyte* gles2_version = glGetString(GL_VERSION);
-  if (gles2_version == nullptr) {
-    LOG(DEBUG) << "Failed to query GLES2 vendor.";
-    return;
-  }
-  const std::string gles2_version_string((const char*)gles2_version);
-  LOG(DEBUG) << "Found GLES2 version: " << gles2_version_string;
-  availability->gles2_version = gles2_version_string;
-
-  const GLubyte* gles2_renderer = glGetString(GL_RENDERER);
-  if (gles2_renderer == nullptr) {
-    LOG(DEBUG) << "Failed to query GLES2 renderer.";
-    return;
-  }
-  const std::string gles2_renderer_string((const char*)gles2_renderer);
-  LOG(DEBUG) << "Found GLES2 renderer: " << gles2_renderer_string;
-  availability->gles2_renderer = gles2_renderer_string;
-
-  const GLubyte* gles2_extensions = glGetString(GL_EXTENSIONS);
-  if (gles2_extensions == nullptr) {
-    LOG(DEBUG) << "Failed to query GLES2 extensions.";
-    return;
-  }
-  const std::string gles2_extensions_string((const char*)gles2_extensions);
-  LOG(DEBUG) << "Found GLES2 extensions: " << gles2_extensions_string;
-  availability->gles2_extensions = gles2_extensions_string;
-}
-
-void PopulateVulkanAvailability(GraphicsAvailability* availability) {
-  ManagedLibrary vklib(dlopen(kVulkanLib, RTLD_NOW | RTLD_LOCAL));
-  if (!vklib) {
-    LOG(DEBUG) << "Failed to dlopen " << kVulkanLib << ".";
-    return;
-  }
-  LOG(DEBUG) << "Loaded " << kVulkanLib << ".";
-  availability->has_vulkan = true;
-
-  uint32_t instance_version = 0;
-
-  PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr =
-      reinterpret_cast<PFN_vkGetInstanceProcAddr>(
-          dlsym(vklib.get(), "vkGetInstanceProcAddr"));
-  if (vkGetInstanceProcAddr == nullptr) {
-    LOG(DEBUG) << "Failed to find symbol vkGetInstanceProcAddr.";
-    return;
-  }
-
-  PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion =
-      reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
-          dlsym(vklib.get(), "vkEnumerateInstanceVersion"));
-  if (vkEnumerateInstanceVersion == nullptr ||
-      vkEnumerateInstanceVersion(&instance_version) != VK_SUCCESS) {
-    instance_version = VK_API_VERSION_1_0;
-  }
-
-  PFN_vkCreateInstance vkCreateInstance =
-    reinterpret_cast<PFN_vkCreateInstance>(
-      vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkCreateInstance"));
-  if (vkCreateInstance == nullptr) {
-    LOG(DEBUG) << "Failed to get function vkCreateInstance.";
-    return;
-  }
-
-  VkApplicationInfo application_info;
-  application_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
-  application_info.pNext = nullptr;
-  application_info.pApplicationName = "";
-  application_info.applicationVersion = 1;
-  application_info.pEngineName = "";
-  application_info.engineVersion = 1;
-  application_info.apiVersion = instance_version;
-
-  VkInstanceCreateInfo instance_create_info = {};
-  instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
-  instance_create_info.pNext = nullptr;
-  instance_create_info.flags = 0;
-  instance_create_info.pApplicationInfo = &application_info;
-  instance_create_info.enabledLayerCount = 0;
-  instance_create_info.ppEnabledLayerNames = nullptr;
-  instance_create_info.enabledExtensionCount = 0;
-  instance_create_info.ppEnabledExtensionNames = nullptr;
-
-  VkInstance instance = VK_NULL_HANDLE;
-  VkResult result = vkCreateInstance(&instance_create_info, nullptr, &instance);
-  if (result != VK_SUCCESS) {
-    if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_OUT_OF_HOST_MEMORY.";
-    } else if (result == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_OUT_OF_DEVICE_MEMORY.";
-    } else if (result == VK_ERROR_INITIALIZATION_FAILED) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_INITIALIZATION_FAILED.";
-    } else if (result == VK_ERROR_LAYER_NOT_PRESENT) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_LAYER_NOT_PRESENT.";
-    } else if (result == VK_ERROR_EXTENSION_NOT_PRESENT) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_EXTENSION_NOT_PRESENT.";
-    } else if (result == VK_ERROR_INCOMPATIBLE_DRIVER) {
-      LOG(DEBUG) << "Failed to create Vulkan instance: "
-                   << "VK_ERROR_INCOMPATIBLE_DRIVER.";
-    } else {
-      LOG(DEBUG) << "Failed to create Vulkan instance.";
-    }
-    return;
-  }
-
-  PFN_vkDestroyInstance vkDestroyInstance =
-    reinterpret_cast<PFN_vkDestroyInstance>(
-      vkGetInstanceProcAddr(instance, "vkDestroyInstance"));
-  if (vkDestroyInstance == nullptr) {
-    LOG(DEBUG) << "Failed to get function vkDestroyInstance.";
-    return;
-  }
-
-  Closer instancecloser([&]() {vkDestroyInstance(instance, nullptr); });
-
-  PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices =
-    reinterpret_cast<PFN_vkEnumeratePhysicalDevices>(
-      vkGetInstanceProcAddr(instance, "vkEnumeratePhysicalDevices"));
-  if (vkEnumeratePhysicalDevices == nullptr) {
-    LOG(DEBUG) << "Failed to "
-                 << "vkGetInstanceProcAddr(vkEnumeratePhysicalDevices).";
-    return;
-  }
-
-  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties =
-    reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(
-      vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties"));
-  if (vkGetPhysicalDeviceProperties == nullptr) {
-    LOG(DEBUG) << "Failed to "
-                 << "vkGetInstanceProcAddr(vkGetPhysicalDeviceProperties).";
-    return;
-  }
-
-  auto vkEnumerateDeviceExtensionProperties =
-    reinterpret_cast<PFN_vkEnumerateDeviceExtensionProperties>(
-      vkGetInstanceProcAddr(instance, "vkEnumerateDeviceExtensionProperties"));
-  if (vkEnumerateDeviceExtensionProperties == nullptr) {
-    LOG(DEBUG) << "Failed to "
-                 << "vkGetInstanceProcAddr("
-                 << "vkEnumerateDeviceExtensionProperties"
-                 << ").";
-    return;
-  }
-
-  uint32_t device_count = 0;
-  result = vkEnumeratePhysicalDevices(instance, &device_count, nullptr);
-  if (result != VK_SUCCESS) {
-    if (result == VK_INCOMPLETE) {
-      LOG(DEBUG) << "Failed to enumerate physical device count: "
-                   << "VK_INCOMPLETE";
-    } else if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
-      LOG(DEBUG) << "Failed to enumerate physical device count: "
-                   << "VK_ERROR_OUT_OF_HOST_MEMORY";
-    } else if (result == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
-      LOG(DEBUG) << "Failed to enumerate physical device count: "
-                   << "VK_ERROR_OUT_OF_DEVICE_MEMORY";
-    } else if (result == VK_ERROR_INITIALIZATION_FAILED) {
-      LOG(DEBUG) << "Failed to enumerate physical device count: "
-                   << "VK_ERROR_INITIALIZATION_FAILED";
-    } else {
-      LOG(DEBUG) << "Failed to enumerate physical device count.";
-    }
-    return;
-  }
-
-  if (device_count == 0) {
-    LOG(DEBUG) << "No physical devices present.";
-    return;
-  }
-
-  std::vector<VkPhysicalDevice> devices(device_count, VK_NULL_HANDLE);
-  result = vkEnumeratePhysicalDevices(instance, &device_count, devices.data());
-  if (result != VK_SUCCESS) {
-    LOG(DEBUG) << "Failed to enumerate physical devices.";
-    return;
-  }
-
-  for (VkPhysicalDevice device : devices) {
-    VkPhysicalDeviceProperties device_properties = {};
-    vkGetPhysicalDeviceProperties(device, &device_properties);
-
-    LOG(DEBUG) << "Found physical device: " << device_properties.deviceName;
-
-    uint32_t device_extensions_count = 0;
-    vkEnumerateDeviceExtensionProperties(device,
-                                         nullptr,
-                                         &device_extensions_count,
-                                         nullptr);
-
-    std::vector<VkExtensionProperties> device_extensions;
-    device_extensions.resize(device_extensions_count);
-
-    vkEnumerateDeviceExtensionProperties(device,
-                                         nullptr,
-                                         &device_extensions_count,
-                                         device_extensions.data());
-
-    std::vector<const char*> device_extensions_strings;
-    for (const VkExtensionProperties& device_extension : device_extensions) {
-      device_extensions_strings.push_back(device_extension.extensionName);
-    }
-
-    std::string device_extensions_string =
-      android::base::Join(device_extensions_strings, ' ');
-
-    LOG(DEBUG) << "Found physical device extensions: "
-                 << device_extensions_string;
-
-    if (device_properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) {
-      availability->has_discrete_gpu = true;
-      availability->discrete_gpu_device_name = device_properties.deviceName;
-      availability->discrete_gpu_device_extensions = device_extensions_string;
-      break;
-    }
-  }
-}
-
 std::string ToLower(const std::string& v) {
   std::string result = v;
   std::transform(result.begin(), result.end(), result.begin(),
@@ -557,47 +41,30 @@
   return lower_renderer.find("llvmpipe") != std::string::npos;
 }
 
-GraphicsAvailability GetGraphicsAvailability() {
-  GraphicsAvailability availability;
-
-  PopulateEglAvailability(&availability);
-  PopulateGlAvailability(&availability);
-  PopulateGles1Availability(&availability);
-  PopulateGles2Availability(&availability);
-  PopulateVulkanAvailability(&availability);
-
-  return availability;
-}
-
 }  // namespace
 
 bool ShouldEnableAcceleratedRendering(
     const GraphicsAvailability& availability) {
-  return availability.can_init_gles2_on_egl_surfaceless &&
-         !IsLikelySoftwareRenderer(availability.gles2_renderer) &&
+  const bool sufficient_gles2 =
+      availability.can_init_gles2_on_egl_surfaceless &&
+      !IsLikelySoftwareRenderer(availability.gles2_renderer);
+  const bool sufficient_gles3 =
+      availability.can_init_gles3_on_egl_surfaceless &&
+      !IsLikelySoftwareRenderer(availability.gles3_renderer);
+  return (sufficient_gles2 || sufficient_gles3) &&
          availability.has_discrete_gpu;
 }
 
-// Runs GetGraphicsAvailability() inside of a subprocess first to ensure that
-// GetGraphicsAvailability() can complete successfully without crashing
-// assemble_cvd. Configurations such as GCE instances without a GPU but with GPU
+// Runs various graphics tests inside of subprocesses first to ensure that
+// this function can complete successfully without crashing the Cuttlefish
+// launcher. Configurations such as GCE instances without a GPU but with GPU
 // drivers for example have seen crashes.
 GraphicsAvailability GetGraphicsAvailabilityWithSubprocessCheck() {
-  pid_t pid = fork();
-  if (pid == 0) {
-    GetGraphicsAvailability();
-    std::exit(0);
-  }
-  int status;
-  if (waitpid(pid, &status, 0) != pid) {
-    PLOG(DEBUG) << "Failed to wait for graphics check subprocess";
-    return GraphicsAvailability{};
-  }
-  if (WIFEXITED(status) && WEXITSTATUS(status) == 0) {
-    return GetGraphicsAvailability();
-  }
-  LOG(DEBUG) << "Subprocess for detect_graphics failed with " << status;
-  return GraphicsAvailability{};
+  GraphicsAvailability availability;
+  PopulateEglAndGlesAvailability(&availability);
+  PopulateVulkanAvailability(&availability);
+  PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirk(&availability);
+  return availability;
 }
 
 std::ostream& operator<<(std::ostream& stream,
@@ -607,10 +74,9 @@
   stream << "Graphics Availability:\n";
 
   stream << "\n";
-  stream << "OpenGL lib available: " << availability.has_gl << "\n";
-  stream << "OpenGL ES1 lib available: " << availability.has_gles1 << "\n";
-  stream << "OpenGL ES2 lib available: " << availability.has_gles2 << "\n";
-  stream << "EGL lib available: " << availability.has_egl << "\n";
+  stream << "EGL available: " << availability.has_egl << "\n";
+  stream << "OpenGL ES 2 available: " << availability.has_gles2 << "\n";
+  stream << "OpenGL ES 3 available: " << availability.has_gles3 << "\n";
   stream << "Vulkan lib available: " << availability.has_vulkan << "\n";
 
   stream << "\n";
@@ -624,12 +90,18 @@
 
   stream << "GLES2 can init on surfaceless display: "
          << availability.can_init_gles2_on_egl_surfaceless << "\n";
-  stream << "\n";
   stream << "GLES2 vendor: " << availability.gles2_vendor << "\n";
   stream << "GLES2 version: " << availability.gles2_version << "\n";
   stream << "GLES2 renderer: " << availability.gles2_renderer << "\n";
   stream << "GLES2 extensions: " << availability.gles2_extensions << "\n";
 
+  stream << "GLES3 can init on surfaceless display: "
+         << availability.can_init_gles3_on_egl_surfaceless << "\n";
+  stream << "GLES3 vendor: " << availability.gles3_vendor << "\n";
+  stream << "GLES3 version: " << availability.gles3_version << "\n";
+  stream << "GLES3 renderer: " << availability.gles3_renderer << "\n";
+  stream << "GLES3 extensions: " << availability.gles3_extensions << "\n";
+
   stream << "\n";
   stream << "Vulkan discrete GPU detected: " << availability.has_discrete_gpu
          << "\n";
@@ -640,6 +112,11 @@
            << availability.discrete_gpu_device_extensions << "\n";
   }
 
+  stream
+      << "Vulkan has quirk with precision qualifiers on YUV samplers: "
+      << availability.vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers
+      << "\n";
+
   stream << "\n";
   stream << "Accelerated rendering supported: "
          << ShouldEnableAcceleratedRendering(availability);
diff --git a/host/libs/graphics_detector/graphics_detector.h b/host/libs/graphics_detector/graphics_detector.h
index 1bacc3d..e852a33 100644
--- a/host/libs/graphics_detector/graphics_detector.h
+++ b/host/libs/graphics_detector/graphics_detector.h
@@ -21,14 +21,12 @@
 namespace cuttlefish {
 
 struct GraphicsAvailability {
-  bool has_gl = false;
-  bool has_gles1 = false;
-  bool has_gles2 = false;
   bool has_egl = false;
+  bool has_gles2 = false;
+  bool has_gles3 = false;
   bool has_vulkan = false;
 
   std::string egl_client_extensions;
-
   std::string egl_version;
   std::string egl_vendor;
   std::string egl_extensions;
@@ -39,9 +37,18 @@
   std::string gles2_renderer;
   std::string gles2_extensions;
 
+  bool can_init_gles3_on_egl_surfaceless = false;
+  std::string gles3_vendor;
+  std::string gles3_version;
+  std::string gles3_renderer;
+  std::string gles3_extensions;
+
   bool has_discrete_gpu = false;
   std::string discrete_gpu_device_name;
   std::string discrete_gpu_device_extensions;
+
+  // See b/264575911.
+  bool vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers = false;
 };
 
 bool ShouldEnableAcceleratedRendering(const GraphicsAvailability& availability);
diff --git a/host/libs/graphics_detector/graphics_detector_gl.cpp b/host/libs/graphics_detector/graphics_detector_gl.cpp
new file mode 100644
index 0000000..5bb5310
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_gl.cpp
@@ -0,0 +1,282 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/graphics_detector_gl.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+#include "host/libs/graphics_detector/egl.h"
+#include "host/libs/graphics_detector/gles.h"
+#include "host/libs/graphics_detector/subprocess.h"
+
+namespace cuttlefish {
+namespace {
+
+constexpr const char kSurfacelessContextExt[] = "EGL_KHR_surfaceless_context";
+
+class Closer {
+ public:
+  Closer(std::function<void()> on_close) : on_close_(std::move(on_close)) {}
+  ~Closer() { on_close_(); }
+
+ private:
+  std::function<void()> on_close_;
+};
+
+void PopulateEglAndGlesAvailabilityImpl(GraphicsAvailability* availability) {
+  auto egl = Egl::Load();
+  if (!egl) {
+    LOG(VERBOSE) << "Failed to load EGL library.";
+    return;
+  }
+  LOG(VERBOSE) << "Loaded EGL library.";
+  availability->has_egl = true;
+
+  EGLDisplay display = egl->eglGetDisplay(EGL_DEFAULT_DISPLAY);
+  if (display != EGL_NO_DISPLAY) {
+    LOG(VERBOSE) << "Found default display.";
+  } else {
+    LOG(VERBOSE) << "Failed to get default display. " << egl->eglGetError()
+                 << ". Attempting to get surfaceless display via "
+                 << "eglGetPlatformDisplayEXT(EGL_PLATFORM_SURFACELESS_MESA)";
+
+    if (egl->eglGetPlatformDisplayEXT == nullptr) {
+      LOG(VERBOSE) << "Failed to find function eglGetPlatformDisplayEXT";
+    } else {
+      display = egl->eglGetPlatformDisplayEXT(EGL_PLATFORM_SURFACELESS_MESA,
+                                              EGL_DEFAULT_DISPLAY, NULL);
+    }
+  }
+
+  if (display == EGL_NO_DISPLAY) {
+    LOG(VERBOSE) << "Failed to find display.";
+    return;
+  }
+
+  EGLint client_version_major = 0;
+  EGLint client_version_minor = 0;
+  if (egl->eglInitialize(display, &client_version_major,
+                         &client_version_minor) != EGL_TRUE) {
+    LOG(VERBOSE) << "Failed to initialize display.";
+    return;
+  }
+  LOG(VERBOSE) << "Initialized display.";
+
+  const std::string version_string = egl->eglQueryString(display, EGL_VERSION);
+  if (version_string.empty()) {
+    LOG(VERBOSE) << "Failed to query client version.";
+    return;
+  }
+  LOG(VERBOSE) << "Found version: " << version_string;
+  availability->egl_version = version_string;
+
+  const std::string vendor_string = egl->eglQueryString(display, EGL_VENDOR);
+  if (vendor_string.empty()) {
+    LOG(VERBOSE) << "Failed to query vendor.";
+    return;
+  }
+  LOG(VERBOSE) << "Found vendor: " << vendor_string;
+  availability->egl_vendor = vendor_string;
+
+  const std::string extensions_string =
+      egl->eglQueryString(display, EGL_EXTENSIONS);
+  if (extensions_string.empty()) {
+    LOG(VERBOSE) << "Failed to query extensions.";
+    return;
+  }
+  LOG(VERBOSE) << "Found extensions: " << extensions_string;
+  availability->egl_extensions = extensions_string;
+
+  if (extensions_string.find(kSurfacelessContextExt) == std::string::npos) {
+    LOG(VERBOSE) << "Failed to find extension EGL_KHR_surfaceless_context.";
+    return;
+  }
+
+  const std::string display_apis_string =
+      egl->eglQueryString(display, EGL_CLIENT_APIS);
+  if (display_apis_string.empty()) {
+    LOG(VERBOSE) << "Failed to query display apis.";
+    return;
+  }
+  LOG(VERBOSE) << "Found display apis: " << display_apis_string;
+
+  if (egl->eglBindAPI(EGL_OPENGL_ES_API) == EGL_FALSE) {
+    LOG(VERBOSE) << "Failed to bind GLES API.";
+    return;
+  }
+  LOG(VERBOSE) << "Bound GLES API.";
+
+  const EGLint framebuffer_config_attributes[] = {
+      EGL_SURFACE_TYPE,
+      EGL_PBUFFER_BIT,
+      EGL_RENDERABLE_TYPE,
+      EGL_OPENGL_ES2_BIT,
+      EGL_RED_SIZE,
+      1,
+      EGL_GREEN_SIZE,
+      1,
+      EGL_BLUE_SIZE,
+      1,
+      EGL_ALPHA_SIZE,
+      0,
+      EGL_NONE,
+  };
+
+  EGLConfig framebuffer_config;
+  EGLint num_framebuffer_configs = 0;
+  if (egl->eglChooseConfig(display, framebuffer_config_attributes,
+                           &framebuffer_config, 1,
+                           &num_framebuffer_configs) != EGL_TRUE) {
+    LOG(VERBOSE) << "Failed to find matching framebuffer config.";
+    return;
+  }
+  LOG(VERBOSE) << "Found matching framebuffer config.";
+
+  const EGLint gles2_context_attributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2,
+                                             EGL_NONE};
+  EGLContext gles2_context = egl->eglCreateContext(
+      display, framebuffer_config, EGL_NO_CONTEXT, gles2_context_attributes);
+  if (gles2_context == EGL_NO_CONTEXT) {
+    LOG(VERBOSE) << "Failed to create EGL context.";
+  } else {
+    LOG(VERBOSE) << "Created EGL context.";
+    Closer context_closer(
+        [&]() { egl->eglDestroyContext(display, gles2_context); });
+
+    if (egl->eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE,
+                            gles2_context) != EGL_TRUE) {
+      LOG(VERBOSE) << "Failed to make GLES2 context current.";
+      return;
+    }
+    LOG(VERBOSE) << "Make GLES2 context current.";
+    availability->can_init_gles2_on_egl_surfaceless = true;
+
+    auto gles = Gles::LoadFromEgl(&*egl);
+    if (!gles) {
+      LOG(VERBOSE) << "Failed to load GLES library.";
+      return;
+    }
+
+    const GLubyte* gles2_vendor = gles->glGetString(GL_VENDOR);
+    if (gles2_vendor == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES2 vendor.";
+      return;
+    }
+    const std::string gles2_vendor_string((const char*)gles2_vendor);
+    LOG(VERBOSE) << "Found GLES2 vendor: " << gles2_vendor_string;
+    availability->gles2_vendor = gles2_vendor_string;
+
+    const GLubyte* gles2_version = gles->glGetString(GL_VERSION);
+    if (gles2_version == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES2 vendor.";
+      return;
+    }
+    const std::string gles2_version_string((const char*)gles2_version);
+    LOG(VERBOSE) << "Found GLES2 version: " << gles2_version_string;
+    availability->gles2_version = gles2_version_string;
+
+    const GLubyte* gles2_renderer = gles->glGetString(GL_RENDERER);
+    if (gles2_renderer == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES2 renderer.";
+      return;
+    }
+    const std::string gles2_renderer_string((const char*)gles2_renderer);
+    LOG(VERBOSE) << "Found GLES2 renderer: " << gles2_renderer_string;
+    availability->gles2_renderer = gles2_renderer_string;
+
+    const GLubyte* gles2_extensions = gles->glGetString(GL_EXTENSIONS);
+    if (gles2_extensions == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES2 extensions.";
+      return;
+    }
+    const std::string gles2_extensions_string((const char*)gles2_extensions);
+    LOG(VERBOSE) << "Found GLES2 extensions: " << gles2_extensions_string;
+    availability->gles2_extensions = gles2_extensions_string;
+  }
+
+  const EGLint gles3_context_attributes[] = {EGL_CONTEXT_CLIENT_VERSION, 3,
+                                             EGL_NONE};
+  EGLContext gles3_context = egl->eglCreateContext(
+      display, framebuffer_config, EGL_NO_CONTEXT, gles3_context_attributes);
+  if (gles3_context == EGL_NO_CONTEXT) {
+    LOG(VERBOSE) << "Failed to create GLES3 context.";
+  } else {
+    LOG(VERBOSE) << "Created GLES3 context.";
+    Closer context_closer(
+        [&]() { egl->eglDestroyContext(display, gles3_context); });
+
+    if (egl->eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE,
+                            gles3_context) != EGL_TRUE) {
+      LOG(VERBOSE) << "Failed to make GLES3 context current.";
+      return;
+    }
+    LOG(VERBOSE) << "Make GLES3 context current.";
+    availability->can_init_gles3_on_egl_surfaceless = true;
+
+    auto gles = Gles::LoadFromEgl(&*egl);
+    if (!gles) {
+      LOG(VERBOSE) << "Failed to load GLES library.";
+      return;
+    }
+
+    const GLubyte* gles3_vendor = gles->glGetString(GL_VENDOR);
+    if (gles3_vendor == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES3 vendor.";
+      return;
+    }
+    const std::string gles3_vendor_string((const char*)gles3_vendor);
+    LOG(VERBOSE) << "Found GLES3 vendor: " << gles3_vendor_string;
+    availability->gles3_vendor = gles3_vendor_string;
+
+    const GLubyte* gles3_version = gles->glGetString(GL_VERSION);
+    if (gles3_version == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES2 vendor.";
+      return;
+    }
+    const std::string gles3_version_string((const char*)gles3_version);
+    LOG(VERBOSE) << "Found GLES3 version: " << gles3_version_string;
+    availability->gles3_version = gles3_version_string;
+
+    const GLubyte* gles3_renderer = gles->glGetString(GL_RENDERER);
+    if (gles3_renderer == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES3 renderer.";
+      return;
+    }
+    const std::string gles3_renderer_string((const char*)gles3_renderer);
+    LOG(VERBOSE) << "Found GLES3 renderer: " << gles3_renderer_string;
+    availability->gles3_renderer = gles3_renderer_string;
+
+    const GLubyte* gles3_extensions = gles->glGetString(GL_EXTENSIONS);
+    if (gles3_extensions == nullptr) {
+      LOG(VERBOSE) << "Failed to query GLES3 extensions.";
+      return;
+    }
+    const std::string gles3_extensions_string((const char*)gles3_extensions);
+    LOG(VERBOSE) << "Found GLES3 extensions: " << gles3_extensions_string;
+    availability->gles3_extensions = gles3_extensions_string;
+  }
+}
+
+}  // namespace
+
+void PopulateEglAndGlesAvailability(GraphicsAvailability* availability) {
+  DoWithSubprocessCheck("PopulateEglAndGlesAvailability", [&]() {
+    PopulateEglAndGlesAvailabilityImpl(availability);
+  });
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/graphics_detector_gl.h b/host/libs/graphics_detector/graphics_detector_gl.h
new file mode 100644
index 0000000..2bf349e
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_gl.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "host/libs/graphics_detector/graphics_detector.h"
+
+namespace cuttlefish {
+
+void PopulateEglAndGlesAvailability(GraphicsAvailability* availability);
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/graphics_detector_vk.cpp b/host/libs/graphics_detector/graphics_detector_vk.cpp
new file mode 100644
index 0000000..45db0d9
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_vk.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/graphics_detector_vk.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+#include "host/libs/graphics_detector/subprocess.h"
+#include "host/libs/graphics_detector/vk.h"
+
+namespace cuttlefish {
+namespace {
+
+vk::Result PopulateVulkanAvailabilityImpl(GraphicsAvailability* availability) {
+  auto vk = Vk::Load();
+  if (!vk) {
+    LOG(ERROR) << "Failed to Vulkan library.";
+    return vk::Result::eErrorInitializationFailed;
+  }
+  LOG(VERBOSE) << "Loaded Vulkan library.";
+  availability->has_vulkan = true;
+
+  const auto physical_devices =
+      VK_EXPECT_RESULT(vk::raii::PhysicalDevices::create(vk->vk_instance));
+  for (const auto& physical_device : physical_devices) {
+    const auto props = physical_device.getProperties();
+    if (props.deviceType != vk::PhysicalDeviceType::eDiscreteGpu) {
+      continue;
+    }
+
+    const auto exts = physical_device.enumerateDeviceExtensionProperties();
+
+    std::vector<std::string> exts_strs;
+    exts_strs.reserve(exts.size());
+    for (const auto& ext : exts) {
+      exts_strs.push_back(std::string(ext.extensionName));
+    }
+
+    availability->has_discrete_gpu = true;
+    availability->discrete_gpu_device_name = std::string(props.deviceName);
+    availability->discrete_gpu_device_extensions =
+        android::base::Join(exts_strs, ' ');
+    break;
+  }
+
+  return vk::Result::eSuccess;
+}
+
+}  // namespace
+
+void PopulateVulkanAvailability(GraphicsAvailability* availability) {
+  DoWithSubprocessCheck("PopulateVulkanAvailability", [&]() {
+    PopulateVulkanAvailabilityImpl(availability);
+  });
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/graphics_detector_vk.h b/host/libs/graphics_detector/graphics_detector_vk.h
new file mode 100644
index 0000000..1aa4bc3
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_vk.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "host/libs/graphics_detector/graphics_detector.h"
+
+namespace cuttlefish {
+
+void PopulateVulkanAvailability(GraphicsAvailability* availability);
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.cpp b/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.cpp
new file mode 100644
index 0000000..92f816f
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.cpp
@@ -0,0 +1,488 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.h"
+
+#include <vector>
+
+#include <android-base/logging.h>
+
+#include "host/libs/graphics_detector/img.h"
+#include "host/libs/graphics_detector/subprocess.h"
+#include "host/libs/graphics_detector/vk.h"
+
+namespace cuttlefish {
+namespace {
+
+// kBlitTextureVert
+#include "host/libs/graphics_detector/shaders/blit_texture.vert.inl"
+// kBlitTextureFrag
+#include "host/libs/graphics_detector/shaders/blit_texture.frag.inl"
+// kBlitTextureLowpFrag
+#include "host/libs/graphics_detector/shaders/blit_texture_lowp.frag.inl"
+// kBlitTextureMediumpFrag
+#include "host/libs/graphics_detector/shaders/blit_texture_mediump.frag.inl"
+// kBlitTextureHighpFrag
+#include "host/libs/graphics_detector/shaders/blit_texture_highp.frag.inl"
+
+vk::Result CanHandlePrecisionQualifierWithYuvSampler(
+    const std::vector<uint8_t>& blit_vert_shader_spirv,
+    const std::vector<uint8_t>& blit_frag_shader_spirv, bool* out_passed_test) {
+  std::optional<Vk> vk = Vk::Load(
+      /*instance_extensions=*/{},
+      /*instance_layers=*/{},
+      /*device_extensions=*/
+      {
+          VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
+      });
+  if (!vk) {
+    LOG(FATAL) << "Failed to load vk";
+  }
+
+  uint32_t texture_width = 32;
+  uint32_t texture_height = 32;
+  std::vector<uint8_t> texture_data_rgba8888;
+  FillWithColor(texture_width, texture_height,
+                /*red=*/0xFF,
+                /*green=*/0x00,
+                /*blue=*/0x00,
+                /*alpha=*/0xFF, &texture_data_rgba8888);
+
+  std::vector<uint8_t> texture_data_yuv420_y;
+  std::vector<uint8_t> texture_data_yuv420_u;
+  std::vector<uint8_t> texture_data_yuv420_v;
+  ConvertRGBA8888ToYUV420(texture_width, texture_height, texture_data_rgba8888,
+                          &texture_data_yuv420_y, &texture_data_yuv420_u,
+                          &texture_data_yuv420_v);
+
+#if 0
+    // Debugging can be easier with a larger image with more details.
+    texture_data_yuv420_y.clear();
+    texture_data_yuv420_u.clear();
+    texture_data_yuv420_v.clear();
+    LoadYUV420FromBitmapFile("custom.bmp",
+                             &texture_width,
+                             &texture_height,
+                             &texture_data_yuv420_y,
+                             &texture_data_yuv420_u,
+                             &texture_data_yuv420_v);
+#endif
+
+  Vk::YuvImageWithMemory sampled_image = VK_EXPECT_RESULT(vk->CreateYuvImage(
+      texture_width, texture_height,
+      vk::ImageUsageFlagBits::eSampled | vk::ImageUsageFlagBits::eTransferDst |
+          vk::ImageUsageFlagBits::eTransferSrc,
+      vk::MemoryPropertyFlagBits::eDeviceLocal,
+      vk::ImageLayout::eTransferDstOptimal));
+
+  VK_ASSERT(vk->LoadYuvImage(
+      sampled_image.image, texture_width, texture_height, texture_data_yuv420_y,
+      texture_data_yuv420_u, texture_data_yuv420_v,
+      /*current_layout=*/vk::ImageLayout::eTransferDstOptimal,
+      /*returned_layout=*/vk::ImageLayout::eShaderReadOnlyOptimal));
+
+  Vk::FramebufferWithAttachments framebuffer =
+      VK_EXPECT_RESULT(vk->CreateFramebuffer(
+          texture_width, texture_height,
+          /*color_attachment_format=*/vk::Format::eR8G8B8A8Unorm));
+
+  const vk::Sampler descriptor_set_0_binding_0_sampler =
+      *sampled_image.image_sampler;
+  const std::vector<vk::DescriptorSetLayoutBinding> descriptor_set_0_bindings =
+      {
+          vk::DescriptorSetLayoutBinding{
+              .binding = 0,
+              .descriptorType = vk::DescriptorType::eCombinedImageSampler,
+              .descriptorCount = 1,
+              .stageFlags = vk::ShaderStageFlagBits::eFragment,
+              .pImmutableSamplers = &descriptor_set_0_binding_0_sampler,
+          },
+      };
+  const vk::DescriptorSetLayoutCreateInfo descriptor_set_0_create_info = {
+      .bindingCount = static_cast<uint32_t>(descriptor_set_0_bindings.size()),
+      .pBindings = descriptor_set_0_bindings.data(),
+  };
+  auto descriptor_set_0_layout =
+      VK_EXPECT_RESULT(vk::raii::DescriptorSetLayout::create(
+          vk->vk_device, descriptor_set_0_create_info));
+
+  const std::vector<vk::DescriptorPoolSize> descriptor_pool_sizes = {
+      vk::DescriptorPoolSize{
+          .type = vk::DescriptorType::eCombinedImageSampler,
+          .descriptorCount = 1,
+      },
+  };
+  const vk::DescriptorPoolCreateInfo descriptor_pool_create_info = {
+      .flags = vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
+      .maxSets = 1,
+      .poolSizeCount = static_cast<uint32_t>(descriptor_pool_sizes.size()),
+      .pPoolSizes = descriptor_pool_sizes.data(),
+  };
+  auto descriptor_set_0_pool =
+      VK_EXPECT_RESULT(vk::raii::DescriptorPool::create(
+          vk->vk_device, descriptor_pool_create_info));
+
+  const vk::DescriptorSetLayout descriptor_set_0_layout_handle =
+      *descriptor_set_0_layout;
+  const vk::DescriptorSetAllocateInfo descriptor_set_allocate_info = {
+      .descriptorPool = *descriptor_set_0_pool,
+      .descriptorSetCount = 1,
+      .pSetLayouts = &descriptor_set_0_layout_handle,
+  };
+  auto descriptor_sets = VK_EXPECT_RESULT(vk::raii::DescriptorSets::create(
+      vk->vk_device, descriptor_set_allocate_info));
+  auto descriptor_set_0(std::move(descriptor_sets[0]));
+
+  const vk::DescriptorImageInfo descriptor_set_0_binding_0_image_info = {
+      .sampler = VK_NULL_HANDLE,
+      .imageView = *sampled_image.image_view,
+      .imageLayout = vk::ImageLayout::eShaderReadOnlyOptimal,
+  };
+  const std::vector<vk::WriteDescriptorSet> descriptor_set_0_writes = {
+      vk::WriteDescriptorSet{
+          .dstSet = *descriptor_set_0,
+          .dstBinding = 0,
+          .dstArrayElement = 0,
+          .descriptorCount = 1,
+          .descriptorType = vk::DescriptorType::eCombinedImageSampler,
+          .pImageInfo = &descriptor_set_0_binding_0_image_info,
+          .pBufferInfo = nullptr,
+          .pTexelBufferView = nullptr,
+      },
+  };
+  vk->vk_device.updateDescriptorSets(descriptor_set_0_writes, {});
+
+  const std::vector<vk::DescriptorSetLayout>
+      pipeline_layout_descriptor_set_layouts = {
+          *descriptor_set_0_layout,
+      };
+  const vk::PipelineLayoutCreateInfo pipeline_layout_create_info = {
+      .setLayoutCount =
+          static_cast<uint32_t>(pipeline_layout_descriptor_set_layouts.size()),
+      .pSetLayouts = pipeline_layout_descriptor_set_layouts.data(),
+  };
+  auto pipeline_layout = VK_EXPECT_RESULT(vk::raii::PipelineLayout::create(
+      vk->vk_device, pipeline_layout_create_info));
+
+  const vk::ShaderModuleCreateInfo vert_shader_create_info = {
+      .codeSize = static_cast<uint32_t>(blit_vert_shader_spirv.size()),
+      .pCode = reinterpret_cast<const uint32_t*>(blit_vert_shader_spirv.data()),
+  };
+  auto vert_shader_module = VK_EXPECT_RESULT(
+      vk::raii::ShaderModule::create(vk->vk_device, vert_shader_create_info));
+
+  const vk::ShaderModuleCreateInfo frag_shader_create_info = {
+      .codeSize = static_cast<uint32_t>(blit_frag_shader_spirv.size()),
+      .pCode = reinterpret_cast<const uint32_t*>(blit_frag_shader_spirv.data()),
+  };
+  auto frag_shader_module = VK_EXPECT_RESULT(
+      vk::raii::ShaderModule::create(vk->vk_device, frag_shader_create_info));
+
+  const std::vector<vk::PipelineShaderStageCreateInfo> pipeline_stages = {
+      vk::PipelineShaderStageCreateInfo{
+          .stage = vk::ShaderStageFlagBits::eVertex,
+          .module = *vert_shader_module,
+          .pName = "main",
+      },
+      vk::PipelineShaderStageCreateInfo{
+          .stage = vk::ShaderStageFlagBits::eFragment,
+          .module = *frag_shader_module,
+          .pName = "main",
+      },
+  };
+  const vk::PipelineVertexInputStateCreateInfo
+      pipeline_vertex_input_state_create_info = {};
+  const vk::PipelineInputAssemblyStateCreateInfo
+      pipeline_input_assembly_state_create_info = {
+          .topology = vk::PrimitiveTopology::eTriangleStrip,
+      };
+  const vk::PipelineViewportStateCreateInfo
+      pipeline_viewport_state_create_info = {
+          .viewportCount = 1,
+          .pViewports = nullptr,
+          .scissorCount = 1,
+          .pScissors = nullptr,
+      };
+  const vk::PipelineRasterizationStateCreateInfo
+      pipeline_rasterization_state_create_info = {
+          .depthClampEnable = VK_FALSE,
+          .rasterizerDiscardEnable = VK_FALSE,
+          .polygonMode = vk::PolygonMode::eFill,
+          .cullMode = {},
+          .frontFace = vk::FrontFace::eCounterClockwise,
+          .depthBiasEnable = VK_FALSE,
+          .depthBiasConstantFactor = 0.0f,
+          .depthBiasClamp = 0.0f,
+          .depthBiasSlopeFactor = 0.0f,
+          .lineWidth = 1.0f,
+      };
+  const vk::SampleMask pipeline_sample_mask = 65535;
+  const vk::PipelineMultisampleStateCreateInfo
+      pipeline_multisample_state_create_info = {
+          .rasterizationSamples = vk::SampleCountFlagBits::e1,
+          .sampleShadingEnable = VK_FALSE,
+          .minSampleShading = 1.0f,
+          .pSampleMask = &pipeline_sample_mask,
+          .alphaToCoverageEnable = VK_FALSE,
+          .alphaToOneEnable = VK_FALSE,
+      };
+  const vk::PipelineDepthStencilStateCreateInfo
+      pipeline_depth_stencil_state_create_info = {
+          .depthTestEnable = VK_FALSE,
+          .depthWriteEnable = VK_FALSE,
+          .depthCompareOp = vk::CompareOp::eLess,
+          .depthBoundsTestEnable = VK_FALSE,
+          .stencilTestEnable = VK_FALSE,
+          .front =
+              {
+                  .failOp = vk::StencilOp::eKeep,
+                  .passOp = vk::StencilOp::eKeep,
+                  .depthFailOp = vk::StencilOp::eKeep,
+                  .compareOp = vk::CompareOp::eAlways,
+                  .compareMask = 0,
+                  .writeMask = 0,
+                  .reference = 0,
+              },
+          .back =
+              {
+                  .failOp = vk::StencilOp::eKeep,
+                  .passOp = vk::StencilOp::eKeep,
+                  .depthFailOp = vk::StencilOp::eKeep,
+                  .compareOp = vk::CompareOp::eAlways,
+                  .compareMask = 0,
+                  .writeMask = 0,
+                  .reference = 0,
+              },
+          .minDepthBounds = 0.0f,
+          .maxDepthBounds = 0.0f,
+      };
+  const std::vector<vk::PipelineColorBlendAttachmentState>
+      pipeline_color_blend_attachments = {
+          vk::PipelineColorBlendAttachmentState{
+              .blendEnable = VK_FALSE,
+              .srcColorBlendFactor = vk::BlendFactor::eOne,
+              .dstColorBlendFactor = vk::BlendFactor::eOneMinusSrcAlpha,
+              .colorBlendOp = vk::BlendOp::eAdd,
+              .srcAlphaBlendFactor = vk::BlendFactor::eOne,
+              .dstAlphaBlendFactor = vk::BlendFactor::eOneMinusSrcAlpha,
+              .alphaBlendOp = vk::BlendOp::eAdd,
+              .colorWriteMask = vk::ColorComponentFlagBits::eR |
+                                vk::ColorComponentFlagBits::eG |
+                                vk::ColorComponentFlagBits::eB |
+                                vk::ColorComponentFlagBits::eA,
+          },
+      };
+  const vk::PipelineColorBlendStateCreateInfo
+      pipeline_color_blend_state_create_info = {
+          .logicOpEnable = VK_FALSE,
+          .logicOp = vk::LogicOp::eCopy,
+          .attachmentCount =
+              static_cast<uint32_t>(pipeline_color_blend_attachments.size()),
+          .pAttachments = pipeline_color_blend_attachments.data(),
+          .blendConstants = {{
+              0.0f,
+              0.0f,
+              0.0f,
+              0.0f,
+          }},
+      };
+  const std::vector<vk::DynamicState> pipeline_dynamic_states = {
+      vk::DynamicState::eViewport,
+      vk::DynamicState::eScissor,
+  };
+  const vk::PipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info =
+      {
+          .dynamicStateCount =
+              static_cast<uint32_t>(pipeline_dynamic_states.size()),
+          .pDynamicStates = pipeline_dynamic_states.data(),
+      };
+  const vk::GraphicsPipelineCreateInfo pipeline_create_info = {
+      .stageCount = static_cast<uint32_t>(pipeline_stages.size()),
+      .pStages = pipeline_stages.data(),
+      .pVertexInputState = &pipeline_vertex_input_state_create_info,
+      .pInputAssemblyState = &pipeline_input_assembly_state_create_info,
+      .pTessellationState = nullptr,
+      .pViewportState = &pipeline_viewport_state_create_info,
+      .pRasterizationState = &pipeline_rasterization_state_create_info,
+      .pMultisampleState = &pipeline_multisample_state_create_info,
+      .pDepthStencilState = &pipeline_depth_stencil_state_create_info,
+      .pColorBlendState = &pipeline_color_blend_state_create_info,
+      .pDynamicState = &pipeline_dynamic_state_create_info,
+      .layout = *pipeline_layout,
+      .renderPass = *framebuffer.renderpass,
+      .subpass = 0,
+      .basePipelineHandle = VK_NULL_HANDLE,
+      .basePipelineIndex = 0,
+  };
+  auto pipeline = VK_EXPECT_RESULT(
+      vk::raii::Pipeline::create(vk->vk_device, nullptr, pipeline_create_info));
+
+  VK_RETURN_IF_NOT_SUCCESS(
+      vk->DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
+        const std::vector<vk::ClearValue> render_pass_begin_clear_values = {
+            vk::ClearValue{
+                .color =
+                    {
+                        .float32 = {{
+                            1.0f,
+                            0.0f,
+                            0.0f,
+                            1.0f,
+                        }},
+                    },
+            },
+        };
+        const vk::RenderPassBeginInfo render_pass_begin_info = {
+            .renderPass = *framebuffer.renderpass,
+            .framebuffer = *framebuffer.framebuffer,
+            .renderArea =
+                {
+                    .offset =
+                        {
+                            .x = 0,
+                            .y = 0,
+                        },
+                    .extent =
+                        {
+                            .width = texture_width,
+                            .height = texture_height,
+                        },
+                },
+            .clearValueCount =
+                static_cast<uint32_t>(render_pass_begin_clear_values.size()),
+            .pClearValues = render_pass_begin_clear_values.data(),
+        };
+        command_buffer.beginRenderPass(render_pass_begin_info,
+                                       vk::SubpassContents::eInline);
+
+        command_buffer.bindPipeline(vk::PipelineBindPoint::eGraphics,
+                                    *pipeline);
+
+        command_buffer.bindDescriptorSets(vk::PipelineBindPoint::eGraphics,
+                                          *pipeline_layout,
+                                          /*firstSet=*/0, {*descriptor_set_0},
+                                          /*dynamicOffsets=*/{});
+        const vk::Viewport viewport = {
+            .x = 0.0f,
+            .y = 0.0f,
+            .width = static_cast<float>(texture_width),
+            .height = static_cast<float>(texture_height),
+            .minDepth = 0.0f,
+            .maxDepth = 1.0f,
+        };
+        command_buffer.setViewport(0, {viewport});
+
+        const vk::Rect2D scissor = {
+            .offset =
+                {
+                    .x = 0,
+                    .y = 0,
+                },
+            .extent =
+                {
+                    .width = texture_width,
+                    .height = texture_height,
+                },
+        };
+        command_buffer.setScissor(0, {scissor});
+
+        command_buffer.draw(4, 1, 0, 0);
+
+        command_buffer.endRenderPass();
+        return vk::Result::eSuccess;
+      }));
+
+  std::vector<uint8_t> rendered_pixels;
+  VK_RETURN_IF_NOT_SUCCESS(vk->DownloadImage(
+      texture_width, texture_height, framebuffer.color_attachment->image,
+      vk::ImageLayout::eColorAttachmentOptimal,
+      vk::ImageLayout::eColorAttachmentOptimal, &rendered_pixels));
+#if 0
+    SaveRGBAToBitmapFile(texture_width,
+                         texture_height,
+                         rendered_pixels.data(),
+                         "rendered.bmp");
+#endif
+
+  *out_passed_test = ImagesAreSimilar(texture_width, texture_height,
+                                      texture_data_rgba8888, rendered_pixels);
+  return vk::Result::eSuccess;
+}
+
+void PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirkImpl(
+    GraphicsAvailability* availability) {
+  struct ShaderCombo {
+    std::string name;
+    const std::vector<uint8_t>& vert;
+    const std::vector<uint8_t>& frag;
+  };
+  const std::vector<ShaderCombo> combos = {
+      ShaderCombo{
+          .name = "sampler2D has no precision qualifier",
+          .vert = kBlitTextureVert,
+          .frag = kBlitTextureFrag,
+      },
+      ShaderCombo{
+          .name = "sampler2D has a 'lowp' precision qualifier",
+          .vert = kBlitTextureVert,
+          .frag = kBlitTextureLowpFrag,
+      },
+      ShaderCombo{
+          .name = "sampler2D has a 'mediump' precision qualifier",
+          .vert = kBlitTextureVert,
+          .frag = kBlitTextureMediumpFrag,
+      },
+      ShaderCombo{
+          .name = "sampler2D has a 'highp' precision qualifier",
+          .vert = kBlitTextureVert,
+          .frag = kBlitTextureHighpFrag,
+      },
+  };
+  for (const auto& combo : combos) {
+    bool passed_test = false;
+    auto result = CanHandlePrecisionQualifierWithYuvSampler(
+        combo.vert, combo.frag, &passed_test);
+    if (result != vk::Result::eSuccess) {
+      LOG(ERROR) << "Failed to fully check if driver has issue when "
+                 << combo.name;
+      availability->vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers =
+          true;
+      return;
+    }
+    if (!passed_test) {
+      LOG(ERROR) << "Driver has issue when " << combo.name;
+      availability->vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers =
+          true;
+      return;
+    }
+  }
+}
+
+}  // namespace
+
+void PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirk(
+    GraphicsAvailability* availability) {
+  auto result = DoWithSubprocessCheck(
+      "PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirk", [&]() {
+        PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirkImpl(availability);
+      });
+  if (result == SubprocessResult::kFailure) {
+    availability->vulkan_has_issue_with_precision_qualifiers_on_yuv_samplers =
+        true;
+  }
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.h b/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.h
new file mode 100644
index 0000000..45bdb0d
--- /dev/null
+++ b/host/libs/graphics_detector/graphics_detector_vk_precision_qualifiers_on_yuv_samplers.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "host/libs/graphics_detector/graphics_detector.h"
+
+namespace cuttlefish {
+
+void PopulateVulkanPrecisionQualifiersOnYuvSamplersQuirk(
+    GraphicsAvailability* availability);
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/img.cpp b/host/libs/graphics_detector/img.cpp
new file mode 100644
index 0000000..3901e50
--- /dev/null
+++ b/host/libs/graphics_detector/img.cpp
@@ -0,0 +1,384 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/img.h"
+
+#include <fstream>
+#include <ostream>
+
+#include <android-base/logging.h>
+
+namespace cuttlefish {
+
+// Loads:
+//   rgba_pixels[0] = R for x:0 y:0
+//   rgba_pixels[1] = G for x:0 y:0
+//   rgba_pixels[2] = B for x:0 y:0
+//   rgba_pixels[3] = A for x:0 y:0
+void LoadRGBAFromBitmapFile(const std::string& filename, uint32_t* out_w,
+                            uint32_t* out_h, std::vector<uint8_t>* out_pixels) {
+  *out_w = 0;
+  *out_h = 0;
+  out_pixels->clear();
+
+  std::ifstream bitmap(filename, std::ofstream::in | std::ofstream::binary);
+  if (!bitmap.is_open()) {
+    LOG(ERROR) << "Failed to open " << filename;
+    return;
+  }
+
+  std::vector<char> bitmap_bytes((std::istreambuf_iterator<char>(bitmap)),
+                                 std::istreambuf_iterator<char>());
+
+  if (bitmap_bytes[0] != 0x42) {
+    LOG(ERROR) << "Invalid bitmap file?";
+    return;
+  }
+  if (bitmap_bytes[1] != 0x4D) {
+    LOG(ERROR) << "Invalid bitmap file?";
+    return;
+  }
+
+  auto ReadUint16AtByte = [&](const uint32_t offset) {
+    return *reinterpret_cast<uint16_t*>(&bitmap_bytes[offset]);
+  };
+  auto ReadUint32AtByte = [&](const uint32_t offset) {
+    return *reinterpret_cast<uint32_t*>(&bitmap_bytes[offset]);
+  };
+
+  uint32_t w = ReadUint32AtByte(18);
+  uint32_t h = ReadUint32AtByte(22);
+  LOG(ERROR) << "Loading " << filename << " w:" << w << " h:" << h;
+
+  uint32_t planes = ReadUint16AtByte(26);
+  if (planes != 1) {
+    LOG(ERROR) << "Unhandled number of planes: " << planes;
+    return;
+  }
+  uint32_t bits_per_pixel = ReadUint16AtByte(28);
+  if (bits_per_pixel != 32) {
+    LOG(ERROR) << "Unhandled number of bpp: " << bits_per_pixel;
+    return;
+  }
+
+  uint32_t r_channel_mask = ReadUint32AtByte(54);
+  uint32_t g_channel_mask = ReadUint32AtByte(58);
+  uint32_t b_channel_mask = ReadUint32AtByte(62);
+  uint32_t a_channel_mask = ReadUint32AtByte(66);
+
+  /*
+  LOG(ERROR) << " r_channel_mask:" << r_channel_mask
+             << " g_channel_mask:" << g_channel_mask
+             << " b_channel_mask:" << b_channel_mask
+             << " a_channel_mask:" << a_channel_mask;
+  */
+
+  *out_w = w;
+  *out_h = h;
+  out_pixels->clear();
+  out_pixels->reserve(w * h * 4);
+
+  uint32_t bitmap_headers_size = ReadUint32AtByte(10);
+  uint32_t bitmap_pixels_offset = bitmap_headers_size;
+
+  auto GetChannel = [](uint32_t pixel, uint32_t channel_mask) {
+    if (channel_mask == 0) {
+      return static_cast<uint8_t>(0xFF);
+    } else if (channel_mask == 0x000000FF) {
+      return static_cast<uint8_t>((pixel & channel_mask) >> 0);
+    } else if (channel_mask == 0x0000FF00) {
+      return static_cast<uint8_t>((pixel & channel_mask) >> 8);
+    } else if (channel_mask == 0x00FF0000) {
+      return static_cast<uint8_t>((pixel & channel_mask) >> 16);
+    } else if (channel_mask == 0xFF000000) {
+      return static_cast<uint8_t>((pixel & channel_mask) >> 24);
+    } else {
+      LOG(FATAL) << "Unhandled channel mask: " << channel_mask;
+      return static_cast<uint8_t>(0);
+    }
+  };
+
+  for (uint32_t y = 0; y < h; y++) {
+    uint32_t flipped_y = h - y - 1;
+    for (uint32_t x = 0; x < w; x++) {
+      uint32_t pixel_offset = (flipped_y * w * 4) + (x * 4);
+      uint32_t pixel = ReadUint32AtByte(bitmap_pixels_offset + pixel_offset);
+
+      uint8_t r = GetChannel(pixel, r_channel_mask);
+      uint8_t g = GetChannel(pixel, g_channel_mask);
+      uint8_t b = GetChannel(pixel, b_channel_mask);
+      uint8_t a = GetChannel(pixel, a_channel_mask);
+
+      out_pixels->push_back(r);
+      out_pixels->push_back(g);
+      out_pixels->push_back(b);
+      out_pixels->push_back(a);
+
+#if 0
+      LOG(ERROR) << " r_channel_mask:" << r_channel_mask
+                 << " g_channel_mask:" << g_channel_mask
+                 << " b_channel_mask:" << b_channel_mask
+                 << " a_channel_mask:" << a_channel_mask
+                 << " pixel:" << pixel;
+#endif
+#if 0
+      LOG(ERROR) << " x:" << x
+                 << " y:" << y
+                 << " r:" << (int)r
+                 << " g:" << (int)g
+                 << " b:" << (int)b
+                 << " a:" << (int)a;
+#endif
+    }
+  }
+}
+
+// Assumes:
+//   rgba_pixels[0] = R for x:0 y:0
+//   rgba_pixels[1] = G for x:0 y:0
+//   rgba_pixels[2] = B for x:0 y:0
+//   rgba_pixels[3] = A for x:0 y:0
+void SaveRGBAToBitmapFile(uint32_t w, uint32_t h, const uint8_t* rgba_pixels,
+                          const std::string& filename) {
+  std::ofstream bitmap(filename, std::ofstream::out | std::ofstream::binary);
+  if (!bitmap.is_open()) {
+    LOG(ERROR) << "Failed to open " << filename;
+    return;
+  }
+
+  static constexpr const uint32_t kBytesPerPixel = 4;
+  uint32_t bitmap_pixels_size = w * h * kBytesPerPixel;
+  uint32_t bitmap_header_size = 14;
+  uint32_t bitmap_dib_header_size = 108;
+  uint32_t bitmap_headers_size = bitmap_header_size + bitmap_dib_header_size;
+  uint32_t bitmap_file_size = bitmap_headers_size + bitmap_pixels_size;
+
+  auto WriteAsBytes = [&](const auto& value) {
+    bitmap.write(reinterpret_cast<const char*>(&value), sizeof(value));
+  };
+  auto WriteCharAsBytes = [&](const char value) { WriteAsBytes(value); };
+  auto WriteUint16AsBytes = [&](const uint16_t value) { WriteAsBytes(value); };
+  auto WriteUint32AsBytes = [&](const uint32_t value) { WriteAsBytes(value); };
+
+  WriteCharAsBytes(0x42);  // "B"
+  WriteCharAsBytes(0x4D);  // "M"
+  WriteUint32AsBytes(bitmap_file_size);
+  WriteCharAsBytes(0);                      // reserved 1
+  WriteCharAsBytes(0);                      // reserved 1
+  WriteCharAsBytes(0);                      // reserved 2
+  WriteCharAsBytes(0);                      // reserved 2
+  WriteUint32AsBytes(bitmap_headers_size);  // offset to actual pixel data
+  WriteUint32AsBytes(bitmap_dib_header_size);
+  WriteUint32AsBytes(w);
+  WriteUint32AsBytes(h);
+  WriteUint16AsBytes(1);                   // number of planes
+  WriteUint16AsBytes(32);                  // bits per pixel
+  WriteUint32AsBytes(0x03);                // compression/format
+  WriteUint32AsBytes(bitmap_pixels_size);  // image size
+  WriteUint32AsBytes(0);                   // horizontal print reset
+  WriteUint32AsBytes(0);                   // vertical print reset
+  WriteUint32AsBytes(0);                   // num_palette_colors
+  WriteUint32AsBytes(0);                   // num_important_colors
+  WriteUint32AsBytes(0x000000FF);          // red channel mask
+  WriteUint32AsBytes(0x0000FF00);          // green channel mask
+  WriteUint32AsBytes(0x00FF0000);          // blue channel mask
+  WriteUint32AsBytes(0xFF000000);          // alpha channel mask
+  WriteUint32AsBytes(0x206e6957);          // "win"
+  for (uint32_t i = 0; i < 36; i++) {
+    WriteCharAsBytes(0);
+  }                       // cie color space
+  WriteUint32AsBytes(0);  // "win"
+  WriteUint32AsBytes(0);  // "win"
+  WriteUint32AsBytes(0);  // "win"
+
+  uint32_t stride_bytes = w * 4;
+  for (uint32_t current_y = 0; current_y < h; current_y++) {
+    uint32_t flipped_y = h - current_y - 1;
+
+    const uint8_t* current_pixel = rgba_pixels + (stride_bytes * flipped_y);
+    for (uint32_t current_x = 0; current_x < w; current_x++) {
+      WriteAsBytes(*current_pixel);
+      ++current_pixel;
+      WriteAsBytes(*current_pixel);
+      ++current_pixel;
+      WriteAsBytes(*current_pixel);
+      ++current_pixel;
+      WriteAsBytes(*current_pixel);
+      ++current_pixel;
+    }
+  }
+
+  bitmap.close();
+  LOG(INFO) << "Saved bitmap to " << filename;
+}
+
+void LoadYUV420FromBitmapFile(const std::string& filename, uint32_t* out_w,
+                              uint32_t* out_h, std::vector<uint8_t>* out_y,
+                              std::vector<uint8_t>* out_u,
+                              std::vector<uint8_t>* out_v) {
+  std::vector<uint8_t> rgba;
+
+  LoadRGBAFromBitmapFile(filename, out_w, out_h, &rgba);
+
+  if (rgba.empty()) return;
+
+  ConvertRGBA8888ToYUV420(*out_w, *out_h, rgba, out_y, out_u, out_v);
+}
+
+void FillWithColor(uint32_t width, uint32_t height, uint8_t red, uint8_t green,
+                   uint8_t blue, uint8_t alpha,
+                   std::vector<uint8_t>* out_pixels) {
+  out_pixels->clear();
+  out_pixels->reserve(width * height * 4);
+  for (uint32_t y = 0; y < height; y++) {
+    for (uint32_t x = 0; x < width; x++) {
+      out_pixels->push_back(red);
+      out_pixels->push_back(green);
+      out_pixels->push_back(blue);
+      out_pixels->push_back(alpha);
+    }
+  }
+}
+
+namespace {
+
+uint8_t Clamp(int x) {
+  if (x > 255) {
+    return 255;
+  }
+  if (x < 0) {
+    return 0;
+  }
+  return static_cast<uint8_t>(x);
+}
+
+// BT.601 with "Studio Swing" / narrow range.
+void ConvertRGBA8888PixelToYUV(const uint8_t r, const uint8_t g,
+                               const uint8_t b, uint8_t* out_y, uint8_t* out_u,
+                               uint8_t* out_v) {
+  const int r_int = static_cast<int>(r);
+  const int g_int = static_cast<int>(g);
+  const int b_int = static_cast<int>(b);
+  *out_y =
+      Clamp((((66 * r_int) + (129 * g_int) + (25 * b_int) + 128) >> 8) + 16);
+  *out_u =
+      Clamp((((-38 * r_int) - (74 * g_int) + (112 * b_int) + 128) >> 8) + 128);
+  *out_v =
+      Clamp((((112 * r_int) - (94 * g_int) - (18 * b_int) + 128) >> 8) + 128);
+}
+
+}  // namespace
+
+void ConvertRGBA8888ToYUV420(uint32_t w, uint32_t h,
+                             const std::vector<uint8_t>& rgba_pixels,
+                             std::vector<uint8_t>* y_pixels,
+                             std::vector<uint8_t>* u_pixels,
+                             std::vector<uint8_t>* v_pixels) {
+  y_pixels->reserve(w * h);
+  u_pixels->reserve((w / 2) * (h / 2));
+  v_pixels->reserve((w / 2) * (h / 2));
+
+  const auto* input = rgba_pixels.data();
+  for (uint32_t y = 0; y < h; y++) {
+    for (uint32_t x = 0; x < w; x++) {
+      const uint8_t r = *input;
+      ++input;
+      const uint8_t g = *input;
+      ++input;
+      const uint8_t b = *input;
+      ++input;
+      // const uint8_t a = *input;
+      ++input;
+
+      uint8_t pixel_y;
+      uint8_t pixel_u;
+      uint8_t pixel_v;
+      ConvertRGBA8888PixelToYUV(r, g, b, &pixel_y, &pixel_u, &pixel_v);
+
+      y_pixels->push_back(pixel_y);
+      if ((x % 2 == 0) && (y % 2 == 0)) {
+        u_pixels->push_back(pixel_u);
+        v_pixels->push_back(pixel_v);
+      }
+    }
+  }
+}
+
+namespace {
+
+bool PixelsAreSimilar(uint32_t pixel1, uint32_t pixel2) {
+  const uint8_t* pixel1_rgba = reinterpret_cast<const uint8_t*>(&pixel1);
+  const uint8_t* pixel2_rgba = reinterpret_cast<const uint8_t*>(&pixel2);
+
+  constexpr const uint32_t kDefaultTolerance = 2;
+  for (uint32_t channel = 0; channel < 4; channel++) {
+    const uint8_t pixel1_channel = pixel1_rgba[channel];
+    const uint8_t pixel2_channel = pixel2_rgba[channel];
+    if ((std::max(pixel1_channel, pixel2_channel) -
+         std::min(pixel1_channel, pixel2_channel)) > kDefaultTolerance) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace
+
+bool ImagesAreSimilar(uint32_t width, uint32_t height,
+                      const std::vector<uint8_t>& image1_rgba8888,
+                      const std::vector<uint8_t>& image2_rgba8888) {
+  bool images_are_similar = true;
+
+  uint32_t reported_incorrect_pixels = 0;
+  constexpr const uint32_t kMaxReportedIncorrectPixels = 10;
+
+  const uint32_t* image1_pixels =
+      reinterpret_cast<const uint32_t*>(image1_rgba8888.data());
+  const uint32_t* image2_pixels =
+      reinterpret_cast<const uint32_t*>(image2_rgba8888.data());
+
+  for (uint32_t y = 0; y < width; y++) {
+    for (uint32_t x = 0; x < height; x++) {
+      const uint32_t image1_pixel = image1_pixels[y * height + x];
+      const uint32_t image2_pixel = image2_pixels[y * height + x];
+      if (!PixelsAreSimilar(image1_pixel, image2_pixel)) {
+        images_are_similar = false;
+        if (reported_incorrect_pixels < kMaxReportedIncorrectPixels) {
+          reported_incorrect_pixels++;
+          const uint8_t* image1_pixel_rgba =
+              reinterpret_cast<const uint8_t*>(&image1_pixel);
+          const uint8_t* image2_pixel_rgba =
+              reinterpret_cast<const uint8_t*>(&image2_pixel);
+          LOG(ERROR) << "Pixel comparison failed at (" << x << ", " << y << ") "
+                     << " with "
+                     << " r:" << static_cast<int>(image1_pixel_rgba[0])
+                     << " g:" << static_cast<int>(image1_pixel_rgba[1])
+                     << " b:" << static_cast<int>(image1_pixel_rgba[2])
+                     << " a:" << static_cast<int>(image1_pixel_rgba[3])
+                     << " versus "
+                     << " r:" << static_cast<int>(image2_pixel_rgba[0])
+                     << " g:" << static_cast<int>(image2_pixel_rgba[1])
+                     << " b:" << static_cast<int>(image2_pixel_rgba[2])
+                     << " a:" << static_cast<int>(image2_pixel_rgba[3]);
+        }
+      }
+    }
+  }
+
+  return images_are_similar;
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/img.h b/host/libs/graphics_detector/img.h
new file mode 100644
index 0000000..bc012e8
--- /dev/null
+++ b/host/libs/graphics_detector/img.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+#include <vector>
+
+namespace cuttlefish {
+
+void LoadRGBAFromBitmapFile(const std::string& filename, uint32_t* out_w,
+                            uint32_t* out_h, std::vector<uint8_t>* out_pixels);
+
+void SaveRGBAToBitmapFile(uint32_t w, uint32_t h, const uint8_t* rgba_pixels,
+                          const std::string& filename = "");
+
+void LoadYUV420FromBitmapFile(const std::string& filename, uint32_t* out_w,
+                              uint32_t* out_h, std::vector<uint8_t>* out_y,
+                              std::vector<uint8_t>* out_u,
+                              std::vector<uint8_t>* out_v);
+
+void FillWithColor(uint32_t width, uint32_t height, uint8_t red, uint8_t green,
+                   uint8_t blue, uint8_t alpha,
+                   std::vector<uint8_t>* out_pixels);
+
+void ConvertRGBA8888ToYUV420(uint32_t width, uint32_t height,
+                             const std::vector<uint8_t>& rgba_pixels,
+                             std::vector<uint8_t>* out_y_pixels,
+                             std::vector<uint8_t>* out_u_pixels,
+                             std::vector<uint8_t>* out_v_pixels);
+
+bool ImagesAreSimilar(uint32_t width, uint32_t height,
+                      const std::vector<uint8_t>& image1_rgba8888,
+                      const std::vector<uint8_t>& image2_rgba8888);
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std.h
new file mode 100644
index 0000000..d3ebec6
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std.h
@@ -0,0 +1,310 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_H_
+#define VULKAN_VIDEO_CODEC_H264STD_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std 1
+#include <stdint.h>
+#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H264_MAX_NUM_LIST_REF   32
+#define STD_VIDEO_H264_MAX_CHROMA_PLANES  2
+
+typedef enum StdVideoH264ChromaFormatIdc {
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ChromaFormatIdc;
+
+typedef enum StdVideoH264ProfileIdc {
+    STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66,
+    STD_VIDEO_H264_PROFILE_IDC_MAIN = 77,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH = 100,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244,
+    STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ProfileIdc;
+
+typedef enum StdVideoH264LevelIdc {
+    STD_VIDEO_H264_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H264_LEVEL_IDC_1_1 = 1,
+    STD_VIDEO_H264_LEVEL_IDC_1_2 = 2,
+    STD_VIDEO_H264_LEVEL_IDC_1_3 = 3,
+    STD_VIDEO_H264_LEVEL_IDC_2_0 = 4,
+    STD_VIDEO_H264_LEVEL_IDC_2_1 = 5,
+    STD_VIDEO_H264_LEVEL_IDC_2_2 = 6,
+    STD_VIDEO_H264_LEVEL_IDC_3_0 = 7,
+    STD_VIDEO_H264_LEVEL_IDC_3_1 = 8,
+    STD_VIDEO_H264_LEVEL_IDC_3_2 = 9,
+    STD_VIDEO_H264_LEVEL_IDC_4_0 = 10,
+    STD_VIDEO_H264_LEVEL_IDC_4_1 = 11,
+    STD_VIDEO_H264_LEVEL_IDC_4_2 = 12,
+    STD_VIDEO_H264_LEVEL_IDC_5_0 = 13,
+    STD_VIDEO_H264_LEVEL_IDC_5_1 = 14,
+    STD_VIDEO_H264_LEVEL_IDC_5_2 = 15,
+    STD_VIDEO_H264_LEVEL_IDC_6_0 = 16,
+    STD_VIDEO_H264_LEVEL_IDC_6_1 = 17,
+    STD_VIDEO_H264_LEVEL_IDC_6_2 = 18,
+    STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264LevelIdc;
+
+typedef enum StdVideoH264PocType {
+    STD_VIDEO_H264_POC_TYPE_0 = 0,
+    STD_VIDEO_H264_POC_TYPE_1 = 1,
+    STD_VIDEO_H264_POC_TYPE_2 = 2,
+    STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PocType;
+
+typedef enum StdVideoH264AspectRatioIdc {
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264AspectRatioIdc;
+
+typedef enum StdVideoH264WeightedBipredIdc {
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264WeightedBipredIdc;
+
+typedef enum StdVideoH264ModificationOfPicNumsIdc {
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ModificationOfPicNumsIdc;
+
+typedef enum StdVideoH264MemMgmtControlOp {
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264MemMgmtControlOp;
+
+typedef enum StdVideoH264CabacInitIdc {
+    STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0,
+    STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1,
+    STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2,
+    STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264CabacInitIdc;
+
+typedef enum StdVideoH264DisableDeblockingFilterIdc {
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264DisableDeblockingFilterIdc;
+
+typedef enum StdVideoH264SliceType {
+    STD_VIDEO_H264_SLICE_TYPE_P = 0,
+    STD_VIDEO_H264_SLICE_TYPE_B = 1,
+    STD_VIDEO_H264_SLICE_TYPE_I = 2,
+    STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264SliceType;
+
+typedef enum StdVideoH264PictureType {
+    STD_VIDEO_H264_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H264_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H264_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H264_PICTURE_TYPE_IDR = 5,
+    STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PictureType;
+
+typedef enum StdVideoH264NonVclNaluType {
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264NonVclNaluType;
+typedef struct StdVideoH264SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    color_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    timing_info_present_flag : 1;
+    uint32_t    fixed_frame_rate_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+} StdVideoH264SpsVuiFlags;
+
+typedef struct StdVideoH264HrdParameters {
+    uint8_t     cpb_cnt_minus1;
+    uint8_t     bit_rate_scale;
+    uint8_t     cpb_size_scale;
+    uint8_t     reserved1;
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint8_t     cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    initial_cpb_removal_delay_length_minus1;
+    uint32_t    cpb_removal_delay_length_minus1;
+    uint32_t    dpb_output_delay_length_minus1;
+    uint32_t    time_offset_length;
+} StdVideoH264HrdParameters;
+
+typedef struct StdVideoH264SequenceParameterSetVui {
+    StdVideoH264SpsVuiFlags             flags;
+    StdVideoH264AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coefficients;
+    uint32_t                            num_units_in_tick;
+    uint32_t                            time_scale;
+    uint8_t                             max_num_reorder_frames;
+    uint8_t                             max_dec_frame_buffering;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint32_t                            reserved1;
+    const StdVideoH264HrdParameters*    pHrdParameters;
+} StdVideoH264SequenceParameterSetVui;
+
+typedef struct StdVideoH264SpsFlags {
+    uint32_t    constraint_set0_flag : 1;
+    uint32_t    constraint_set1_flag : 1;
+    uint32_t    constraint_set2_flag : 1;
+    uint32_t    constraint_set3_flag : 1;
+    uint32_t    constraint_set4_flag : 1;
+    uint32_t    constraint_set5_flag : 1;
+    uint32_t    direct_8x8_inference_flag : 1;
+    uint32_t    mb_adaptive_frame_field_flag : 1;
+    uint32_t    frame_mbs_only_flag : 1;
+    uint32_t    delta_pic_order_always_zero_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    gaps_in_frame_num_value_allowed_flag : 1;
+    uint32_t    qpprime_y_zero_transform_bypass_flag : 1;
+    uint32_t    frame_cropping_flag : 1;
+    uint32_t    seq_scaling_matrix_present_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+} StdVideoH264SpsFlags;
+
+typedef struct StdVideoH264ScalingLists {
+    uint16_t    scaling_list_present_mask;
+    uint16_t    use_default_scaling_matrix_mask;
+    uint8_t     ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t     ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS];
+} StdVideoH264ScalingLists;
+
+typedef struct StdVideoH264SequenceParameterSet {
+    StdVideoH264SpsFlags                          flags;
+    StdVideoH264ProfileIdc                        profile_idc;
+    StdVideoH264LevelIdc                          level_idc;
+    StdVideoH264ChromaFormatIdc                   chroma_format_idc;
+    uint8_t                                       seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_frame_num_minus4;
+    StdVideoH264PocType                           pic_order_cnt_type;
+    int32_t                                       offset_for_non_ref_pic;
+    int32_t                                       offset_for_top_to_bottom_field;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       num_ref_frames_in_pic_order_cnt_cycle;
+    uint8_t                                       max_num_ref_frames;
+    uint8_t                                       reserved1;
+    uint32_t                                      pic_width_in_mbs_minus1;
+    uint32_t                                      pic_height_in_map_units_minus1;
+    uint32_t                                      frame_crop_left_offset;
+    uint32_t                                      frame_crop_right_offset;
+    uint32_t                                      frame_crop_top_offset;
+    uint32_t                                      frame_crop_bottom_offset;
+    uint32_t                                      reserved2;
+    const int32_t*                                pOffsetForRefFrame;
+    const StdVideoH264ScalingLists*               pScalingLists;
+    const StdVideoH264SequenceParameterSetVui*    pSequenceParameterSetVui;
+} StdVideoH264SequenceParameterSet;
+
+typedef struct StdVideoH264PpsFlags {
+    uint32_t    transform_8x8_mode_flag : 1;
+    uint32_t    redundant_pic_cnt_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    bottom_field_pic_order_in_frame_present_flag : 1;
+    uint32_t    entropy_coding_mode_flag : 1;
+    uint32_t    pic_scaling_matrix_present_flag : 1;
+} StdVideoH264PpsFlags;
+
+typedef struct StdVideoH264PictureParameterSet {
+    StdVideoH264PpsFlags               flags;
+    uint8_t                            seq_parameter_set_id;
+    uint8_t                            pic_parameter_set_id;
+    uint8_t                            num_ref_idx_l0_default_active_minus1;
+    uint8_t                            num_ref_idx_l1_default_active_minus1;
+    StdVideoH264WeightedBipredIdc      weighted_bipred_idc;
+    int8_t                             pic_init_qp_minus26;
+    int8_t                             pic_init_qs_minus26;
+    int8_t                             chroma_qp_index_offset;
+    int8_t                             second_chroma_qp_index_offset;
+    const StdVideoH264ScalingLists*    pScalingLists;
+} StdVideoH264PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_decode.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_decode.h
new file mode 100644
index 0000000..b1e7942
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_decode.h
@@ -0,0 +1,75 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std_decode 1
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode"
+
+typedef enum StdVideoDecodeH264FieldOrderCount {
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF
+} StdVideoDecodeH264FieldOrderCount;
+typedef struct StdVideoDecodeH264PictureInfoFlags {
+    uint32_t    field_pic_flag : 1;
+    uint32_t    is_intra : 1;
+    uint32_t    IdrPicFlag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    is_reference : 1;
+    uint32_t    complementary_field_pair : 1;
+} StdVideoDecodeH264PictureInfoFlags;
+
+typedef struct StdVideoDecodeH264PictureInfo {
+    StdVideoDecodeH264PictureInfoFlags    flags;
+    uint8_t                               seq_parameter_set_id;
+    uint8_t                               pic_parameter_set_id;
+    uint8_t                               reserved1;
+    uint8_t                               reserved2;
+    uint16_t                              frame_num;
+    uint16_t                              idr_pic_id;
+    int32_t                               PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264PictureInfo;
+
+typedef struct StdVideoDecodeH264ReferenceInfoFlags {
+    uint32_t    top_field_flag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    is_non_existing : 1;
+} StdVideoDecodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH264ReferenceInfo {
+    StdVideoDecodeH264ReferenceInfoFlags    flags;
+    uint16_t                                FrameNum;
+    uint16_t                                reserved;
+    int32_t                                 PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_encode.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_encode.h
new file mode 100644
index 0000000..7bd96aa
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h264std_encode.h
@@ -0,0 +1,132 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std_encode 1
+// Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode"
+typedef struct StdVideoEncodeH264WeightTableFlags {
+    uint32_t    luma_weight_l0_flag;
+    uint32_t    chroma_weight_l0_flag;
+    uint32_t    luma_weight_l1_flag;
+    uint32_t    chroma_weight_l1_flag;
+} StdVideoEncodeH264WeightTableFlags;
+
+typedef struct StdVideoEncodeH264WeightTable {
+    StdVideoEncodeH264WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    uint8_t                               chroma_log2_weight_denom;
+    int8_t                                luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+} StdVideoEncodeH264WeightTable;
+
+typedef struct StdVideoEncodeH264SliceHeaderFlags {
+    uint32_t    direct_spatial_mv_pred_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    adaptive_ref_pic_marking_mode_flag : 1;
+    uint32_t    no_prior_references_available_flag : 1;
+} StdVideoEncodeH264SliceHeaderFlags;
+
+typedef struct StdVideoEncodeH264PictureInfoFlags {
+    uint32_t    idr_flag : 1;
+    uint32_t    is_reference_flag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+} StdVideoEncodeH264PictureInfoFlags;
+
+typedef struct StdVideoEncodeH264ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+} StdVideoEncodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH264RefMgmtFlags {
+    uint32_t    ref_pic_list_modification_l0_flag : 1;
+    uint32_t    ref_pic_list_modification_l1_flag : 1;
+} StdVideoEncodeH264RefMgmtFlags;
+
+typedef struct StdVideoEncodeH264RefListModEntry {
+    StdVideoH264ModificationOfPicNumsIdc    modification_of_pic_nums_idc;
+    uint16_t                                abs_diff_pic_num_minus1;
+    uint16_t                                long_term_pic_num;
+} StdVideoEncodeH264RefListModEntry;
+
+typedef struct StdVideoEncodeH264RefPicMarkingEntry {
+    StdVideoH264MemMgmtControlOp    operation;
+    uint16_t                        difference_of_pic_nums_minus1;
+    uint16_t                        long_term_pic_num;
+    uint16_t                        long_term_frame_idx;
+    uint16_t                        max_long_term_frame_idx_plus1;
+} StdVideoEncodeH264RefPicMarkingEntry;
+
+typedef struct StdVideoEncodeH264RefMemMgmtCtrlOperations {
+    StdVideoEncodeH264RefMgmtFlags                 flags;
+    uint8_t                                        refList0ModOpCount;
+    const StdVideoEncodeH264RefListModEntry*       pRefList0ModOperations;
+    uint8_t                                        refList1ModOpCount;
+    const StdVideoEncodeH264RefListModEntry*       pRefList1ModOperations;
+    uint8_t                                        refPicMarkingOpCount;
+    const StdVideoEncodeH264RefPicMarkingEntry*    pRefPicMarkingOperations;
+} StdVideoEncodeH264RefMemMgmtCtrlOperations;
+
+typedef struct StdVideoEncodeH264PictureInfo {
+    StdVideoEncodeH264PictureInfoFlags    flags;
+    uint8_t                               seq_parameter_set_id;
+    uint8_t                               pic_parameter_set_id;
+    StdVideoH264PictureType               pictureType;
+    uint32_t                              frame_num;
+    int32_t                               PicOrderCnt;
+} StdVideoEncodeH264PictureInfo;
+
+typedef struct StdVideoEncodeH264ReferenceInfo {
+    StdVideoEncodeH264ReferenceInfoFlags    flags;
+    uint32_t                                FrameNum;
+    int32_t                                 PicOrderCnt;
+    uint16_t                                long_term_pic_num;
+    uint16_t                                long_term_frame_idx;
+} StdVideoEncodeH264ReferenceInfo;
+
+typedef struct StdVideoEncodeH264SliceHeader {
+    StdVideoEncodeH264SliceHeaderFlags        flags;
+    uint32_t                                  first_mb_in_slice;
+    StdVideoH264SliceType                     slice_type;
+    uint16_t                                  idr_pic_id;
+    uint8_t                                   num_ref_idx_l0_active_minus1;
+    uint8_t                                   num_ref_idx_l1_active_minus1;
+    StdVideoH264CabacInitIdc                  cabac_init_idc;
+    StdVideoH264DisableDeblockingFilterIdc    disable_deblocking_filter_idc;
+    int8_t                                    slice_alpha_c0_offset_div2;
+    int8_t                                    slice_beta_offset_div2;
+    const StdVideoEncodeH264WeightTable*      pWeightTable;
+} StdVideoEncodeH264SliceHeader;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std.h
new file mode 100644
index 0000000..862f881
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std.h
@@ -0,0 +1,443 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_H_
+#define VULKAN_VIDEO_CODEC_H265STD_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std 1
+#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7
+#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128
+#define STD_VIDEO_H265_MAX_DPB_SIZE       16
+#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21
+#define STD_VIDEO_H265_MAX_NUM_LIST_REF   15
+#define STD_VIDEO_H265_MAX_CHROMA_PLANES  2
+#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
+#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16
+#define STD_VIDEO_H265_MAX_DELTA_POC      48
+
+typedef enum StdVideoH265ChromaFormatIdc {
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ChromaFormatIdc;
+
+typedef enum StdVideoH265ProfileIdc {
+    STD_VIDEO_H265_PROFILE_IDC_MAIN = 1,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3,
+    STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4,
+    STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9,
+    STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ProfileIdc;
+
+typedef enum StdVideoH265LevelIdc {
+    STD_VIDEO_H265_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H265_LEVEL_IDC_2_0 = 1,
+    STD_VIDEO_H265_LEVEL_IDC_2_1 = 2,
+    STD_VIDEO_H265_LEVEL_IDC_3_0 = 3,
+    STD_VIDEO_H265_LEVEL_IDC_3_1 = 4,
+    STD_VIDEO_H265_LEVEL_IDC_4_0 = 5,
+    STD_VIDEO_H265_LEVEL_IDC_4_1 = 6,
+    STD_VIDEO_H265_LEVEL_IDC_5_0 = 7,
+    STD_VIDEO_H265_LEVEL_IDC_5_1 = 8,
+    STD_VIDEO_H265_LEVEL_IDC_5_2 = 9,
+    STD_VIDEO_H265_LEVEL_IDC_6_0 = 10,
+    STD_VIDEO_H265_LEVEL_IDC_6_1 = 11,
+    STD_VIDEO_H265_LEVEL_IDC_6_2 = 12,
+    STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265LevelIdc;
+
+typedef enum StdVideoH265SliceType {
+    STD_VIDEO_H265_SLICE_TYPE_B = 0,
+    STD_VIDEO_H265_SLICE_TYPE_P = 1,
+    STD_VIDEO_H265_SLICE_TYPE_I = 2,
+    STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265SliceType;
+
+typedef enum StdVideoH265PictureType {
+    STD_VIDEO_H265_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H265_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H265_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H265_PICTURE_TYPE_IDR = 3,
+    STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265PictureType;
+
+typedef enum StdVideoH265AspectRatioIdc {
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265AspectRatioIdc;
+typedef struct StdVideoH265DecPicBufMgr {
+    uint32_t    max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+} StdVideoH265DecPicBufMgr;
+
+typedef struct StdVideoH265SubLayerHrdParameters {
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cbr_flag;
+} StdVideoH265SubLayerHrdParameters;
+
+typedef struct StdVideoH265HrdFlags {
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+    uint32_t    sub_pic_hrd_params_present_flag : 1;
+    uint32_t    sub_pic_cpb_params_in_pic_timing_sei_flag : 1;
+    uint32_t    fixed_pic_rate_general_flag : 8;
+    uint32_t    fixed_pic_rate_within_cvs_flag : 8;
+    uint32_t    low_delay_hrd_flag : 8;
+} StdVideoH265HrdFlags;
+
+typedef struct StdVideoH265HrdParameters {
+    StdVideoH265HrdFlags                        flags;
+    uint8_t                                     tick_divisor_minus2;
+    uint8_t                                     du_cpb_removal_delay_increment_length_minus1;
+    uint8_t                                     dpb_output_delay_du_length_minus1;
+    uint8_t                                     bit_rate_scale;
+    uint8_t                                     cpb_size_scale;
+    uint8_t                                     cpb_size_du_scale;
+    uint8_t                                     initial_cpb_removal_delay_length_minus1;
+    uint8_t                                     au_cpb_removal_delay_length_minus1;
+    uint8_t                                     dpb_output_delay_length_minus1;
+    uint8_t                                     cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    reserved[3];
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersNal;
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersVcl;
+} StdVideoH265HrdParameters;
+
+typedef struct StdVideoH265VpsFlags {
+    uint32_t    vps_temporal_id_nesting_flag : 1;
+    uint32_t    vps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    vps_timing_info_present_flag : 1;
+    uint32_t    vps_poc_proportional_to_timing_flag : 1;
+} StdVideoH265VpsFlags;
+
+typedef struct StdVideoH265ProfileTierLevelFlags {
+    uint32_t    general_tier_flag : 1;
+    uint32_t    general_progressive_source_flag : 1;
+    uint32_t    general_interlaced_source_flag : 1;
+    uint32_t    general_non_packed_constraint_flag : 1;
+    uint32_t    general_frame_only_constraint_flag : 1;
+} StdVideoH265ProfileTierLevelFlags;
+
+typedef struct StdVideoH265ProfileTierLevel {
+    StdVideoH265ProfileTierLevelFlags    flags;
+    StdVideoH265ProfileIdc               general_profile_idc;
+    StdVideoH265LevelIdc                 general_level_idc;
+} StdVideoH265ProfileTierLevel;
+
+typedef struct StdVideoH265VideoParameterSet {
+    StdVideoH265VpsFlags                   flags;
+    uint8_t                                vps_video_parameter_set_id;
+    uint8_t                                vps_max_sub_layers_minus1;
+    uint8_t                                reserved1;
+    uint8_t                                reserved2;
+    uint32_t                               vps_num_units_in_tick;
+    uint32_t                               vps_time_scale;
+    uint32_t                               vps_num_ticks_poc_diff_one_minus1;
+    uint32_t                               reserved3;
+    const StdVideoH265DecPicBufMgr*        pDecPicBufMgr;
+    const StdVideoH265HrdParameters*       pHrdParameters;
+    const StdVideoH265ProfileTierLevel*    pProfileTierLevel;
+} StdVideoH265VideoParameterSet;
+
+typedef struct StdVideoH265ScalingLists {
+    uint8_t    ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t    ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS];
+    uint8_t    ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS];
+    uint8_t    ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS];
+    uint8_t    ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS];
+    uint8_t    ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS];
+} StdVideoH265ScalingLists;
+
+typedef struct StdVideoH265SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    colour_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    neutral_chroma_indication_flag : 1;
+    uint32_t    field_seq_flag : 1;
+    uint32_t    frame_field_info_present_flag : 1;
+    uint32_t    default_display_window_flag : 1;
+    uint32_t    vui_timing_info_present_flag : 1;
+    uint32_t    vui_poc_proportional_to_timing_flag : 1;
+    uint32_t    vui_hrd_parameters_present_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    tiles_fixed_structure_flag : 1;
+    uint32_t    motion_vectors_over_pic_boundaries_flag : 1;
+    uint32_t    restricted_ref_pic_lists_flag : 1;
+} StdVideoH265SpsVuiFlags;
+
+typedef struct StdVideoH265SequenceParameterSetVui {
+    StdVideoH265SpsVuiFlags             flags;
+    StdVideoH265AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coeffs;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint8_t                             reserved1;
+    uint8_t                             reserved2;
+    uint16_t                            def_disp_win_left_offset;
+    uint16_t                            def_disp_win_right_offset;
+    uint16_t                            def_disp_win_top_offset;
+    uint16_t                            def_disp_win_bottom_offset;
+    uint32_t                            vui_num_units_in_tick;
+    uint32_t                            vui_time_scale;
+    uint32_t                            vui_num_ticks_poc_diff_one_minus1;
+    uint16_t                            min_spatial_segmentation_idc;
+    uint16_t                            reserved3;
+    uint8_t                             max_bytes_per_pic_denom;
+    uint8_t                             max_bits_per_min_cu_denom;
+    uint8_t                             log2_max_mv_length_horizontal;
+    uint8_t                             log2_max_mv_length_vertical;
+    const StdVideoH265HrdParameters*    pHrdParameters;
+} StdVideoH265SequenceParameterSetVui;
+
+typedef struct StdVideoH265PredictorPaletteEntries {
+    uint16_t    PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE];
+} StdVideoH265PredictorPaletteEntries;
+
+typedef struct StdVideoH265SpsFlags {
+    uint32_t    sps_temporal_id_nesting_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    conformance_window_flag : 1;
+    uint32_t    sps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    scaling_list_enabled_flag : 1;
+    uint32_t    sps_scaling_list_data_present_flag : 1;
+    uint32_t    amp_enabled_flag : 1;
+    uint32_t    sample_adaptive_offset_enabled_flag : 1;
+    uint32_t    pcm_enabled_flag : 1;
+    uint32_t    pcm_loop_filter_disabled_flag : 1;
+    uint32_t    long_term_ref_pics_present_flag : 1;
+    uint32_t    sps_temporal_mvp_enabled_flag : 1;
+    uint32_t    strong_intra_smoothing_enabled_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+    uint32_t    sps_extension_present_flag : 1;
+    uint32_t    sps_range_extension_flag : 1;
+    uint32_t    transform_skip_rotation_enabled_flag : 1;
+    uint32_t    transform_skip_context_enabled_flag : 1;
+    uint32_t    implicit_rdpcm_enabled_flag : 1;
+    uint32_t    explicit_rdpcm_enabled_flag : 1;
+    uint32_t    extended_precision_processing_flag : 1;
+    uint32_t    intra_smoothing_disabled_flag : 1;
+    uint32_t    high_precision_offsets_enabled_flag : 1;
+    uint32_t    persistent_rice_adaptation_enabled_flag : 1;
+    uint32_t    cabac_bypass_alignment_enabled_flag : 1;
+    uint32_t    sps_scc_extension_flag : 1;
+    uint32_t    sps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    palette_mode_enabled_flag : 1;
+    uint32_t    sps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    intra_boundary_filtering_disabled_flag : 1;
+} StdVideoH265SpsFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSetFlags {
+    uint32_t    inter_ref_pic_set_prediction_flag : 1;
+    uint32_t    delta_rps_sign : 1;
+} StdVideoH265ShortTermRefPicSetFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSet {
+    StdVideoH265ShortTermRefPicSetFlags    flags;
+    uint32_t                               delta_idx_minus1;
+    uint16_t                               use_delta_flag;
+    uint16_t                               abs_delta_rps_minus1;
+    uint16_t                               used_by_curr_pic_flag;
+    uint16_t                               used_by_curr_pic_s0_flag;
+    uint16_t                               used_by_curr_pic_s1_flag;
+    uint16_t                               reserved1;
+    uint8_t                                reserved2;
+    uint8_t                                reserved3;
+    uint8_t                                num_negative_pics;
+    uint8_t                                num_positive_pics;
+    uint16_t                               delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+    uint16_t                               delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+} StdVideoH265ShortTermRefPicSet;
+
+typedef struct StdVideoH265LongTermRefPicsSps {
+    uint32_t    used_by_curr_pic_lt_sps_flag;
+    uint32_t    lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+} StdVideoH265LongTermRefPicsSps;
+
+typedef struct StdVideoH265SequenceParameterSet {
+    StdVideoH265SpsFlags                          flags;
+    StdVideoH265ChromaFormatIdc                   chroma_format_idc;
+    uint32_t                                      pic_width_in_luma_samples;
+    uint32_t                                      pic_height_in_luma_samples;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       sps_max_sub_layers_minus1;
+    uint8_t                                       sps_seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       log2_min_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_luma_coding_block_size;
+    uint8_t                                       log2_min_luma_transform_block_size_minus2;
+    uint8_t                                       log2_diff_max_min_luma_transform_block_size;
+    uint8_t                                       max_transform_hierarchy_depth_inter;
+    uint8_t                                       max_transform_hierarchy_depth_intra;
+    uint8_t                                       num_short_term_ref_pic_sets;
+    uint8_t                                       num_long_term_ref_pics_sps;
+    uint8_t                                       pcm_sample_bit_depth_luma_minus1;
+    uint8_t                                       pcm_sample_bit_depth_chroma_minus1;
+    uint8_t                                       log2_min_pcm_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_pcm_luma_coding_block_size;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint8_t                                       palette_max_size;
+    uint8_t                                       delta_palette_max_predictor_size;
+    uint8_t                                       motion_vector_resolution_control_idc;
+    uint8_t                                       sps_num_palette_predictor_initializers_minus1;
+    uint32_t                                      conf_win_left_offset;
+    uint32_t                                      conf_win_right_offset;
+    uint32_t                                      conf_win_top_offset;
+    uint32_t                                      conf_win_bottom_offset;
+    const StdVideoH265ProfileTierLevel*           pProfileTierLevel;
+    const StdVideoH265DecPicBufMgr*               pDecPicBufMgr;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265ShortTermRefPicSet*         pShortTermRefPicSet;
+    const StdVideoH265LongTermRefPicsSps*         pLongTermRefPicsSps;
+    const StdVideoH265SequenceParameterSetVui*    pSequenceParameterSetVui;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265SequenceParameterSet;
+
+typedef struct StdVideoH265PpsFlags {
+    uint32_t    dependent_slice_segments_enabled_flag : 1;
+    uint32_t    output_flag_present_flag : 1;
+    uint32_t    sign_data_hiding_enabled_flag : 1;
+    uint32_t    cabac_init_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    transform_skip_enabled_flag : 1;
+    uint32_t    cu_qp_delta_enabled_flag : 1;
+    uint32_t    pps_slice_chroma_qp_offsets_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    weighted_bipred_flag : 1;
+    uint32_t    transquant_bypass_enabled_flag : 1;
+    uint32_t    tiles_enabled_flag : 1;
+    uint32_t    entropy_coding_sync_enabled_flag : 1;
+    uint32_t    uniform_spacing_flag : 1;
+    uint32_t    loop_filter_across_tiles_enabled_flag : 1;
+    uint32_t    pps_loop_filter_across_slices_enabled_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    deblocking_filter_override_enabled_flag : 1;
+    uint32_t    pps_deblocking_filter_disabled_flag : 1;
+    uint32_t    pps_scaling_list_data_present_flag : 1;
+    uint32_t    lists_modification_present_flag : 1;
+    uint32_t    slice_segment_header_extension_present_flag : 1;
+    uint32_t    pps_extension_present_flag : 1;
+    uint32_t    cross_component_prediction_enabled_flag : 1;
+    uint32_t    chroma_qp_offset_list_enabled_flag : 1;
+    uint32_t    pps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    residual_adaptive_colour_transform_enabled_flag : 1;
+    uint32_t    pps_slice_act_qp_offsets_present_flag : 1;
+    uint32_t    pps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    monochrome_palette_flag : 1;
+    uint32_t    pps_range_extension_flag : 1;
+} StdVideoH265PpsFlags;
+
+typedef struct StdVideoH265PictureParameterSet {
+    StdVideoH265PpsFlags                          flags;
+    uint8_t                                       pps_pic_parameter_set_id;
+    uint8_t                                       pps_seq_parameter_set_id;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       num_extra_slice_header_bits;
+    uint8_t                                       num_ref_idx_l0_default_active_minus1;
+    uint8_t                                       num_ref_idx_l1_default_active_minus1;
+    int8_t                                        init_qp_minus26;
+    uint8_t                                       diff_cu_qp_delta_depth;
+    int8_t                                        pps_cb_qp_offset;
+    int8_t                                        pps_cr_qp_offset;
+    int8_t                                        pps_beta_offset_div2;
+    int8_t                                        pps_tc_offset_div2;
+    uint8_t                                       log2_parallel_merge_level_minus2;
+    uint8_t                                       log2_max_transform_skip_block_size_minus2;
+    uint8_t                                       diff_cu_chroma_qp_offset_depth;
+    uint8_t                                       chroma_qp_offset_list_len_minus1;
+    int8_t                                        cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    int8_t                                        cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    uint8_t                                       log2_sao_offset_scale_luma;
+    uint8_t                                       log2_sao_offset_scale_chroma;
+    int8_t                                        pps_act_y_qp_offset_plus5;
+    int8_t                                        pps_act_cb_qp_offset_plus5;
+    int8_t                                        pps_act_cr_qp_offset_plus3;
+    uint8_t                                       pps_num_palette_predictor_initializers;
+    uint8_t                                       luma_bit_depth_entry_minus8;
+    uint8_t                                       chroma_bit_depth_entry_minus8;
+    uint8_t                                       num_tile_columns_minus1;
+    uint8_t                                       num_tile_rows_minus1;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint16_t                                      column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
+    uint16_t                                      row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
+    uint32_t                                      reserved3;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_decode.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_decode.h
new file mode 100644
index 0000000..d8660d1
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_decode.h
@@ -0,0 +1,65 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std_decode 1
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode"
+typedef struct StdVideoDecodeH265PictureInfoFlags {
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    IdrPicFlag  : 1;
+    uint32_t    IsReference : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+} StdVideoDecodeH265PictureInfoFlags;
+
+typedef struct StdVideoDecodeH265PictureInfo {
+    StdVideoDecodeH265PictureInfoFlags    flags;
+    uint8_t                               sps_video_parameter_set_id;
+    uint8_t                               pps_seq_parameter_set_id;
+    uint8_t                               pps_pic_parameter_set_id;
+    uint8_t                               NumDeltaPocsOfRefRpsIdx;
+    int32_t                               PicOrderCntVal;
+    uint16_t                              NumBitsForSTRefPicSetInSlice;
+    uint16_t                              reserved;
+    uint8_t                               RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+} StdVideoDecodeH265PictureInfo;
+
+typedef struct StdVideoDecodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+} StdVideoDecodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH265ReferenceInfo {
+    StdVideoDecodeH265ReferenceInfoFlags    flags;
+    int32_t                                 PicOrderCntVal;
+} StdVideoDecodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_encode.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_encode.h
new file mode 100644
index 0000000..5a419b1
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codec_h265std_encode.h
@@ -0,0 +1,146 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std_encode 1
+// Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode"
+typedef struct StdVideoEncodeH265WeightTableFlags {
+    uint16_t    luma_weight_l0_flag;
+    uint16_t    chroma_weight_l0_flag;
+    uint16_t    luma_weight_l1_flag;
+    uint16_t    chroma_weight_l1_flag;
+} StdVideoEncodeH265WeightTableFlags;
+
+typedef struct StdVideoEncodeH265WeightTable {
+    StdVideoEncodeH265WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    int8_t                                delta_chroma_log2_weight_denom;
+    int8_t                                delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+} StdVideoEncodeH265WeightTable;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags {
+    uint32_t    first_slice_segment_in_pic_flag : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    dependent_slice_segment_flag : 1;
+    uint32_t    pic_output_flag : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+    uint32_t    slice_temporal_mvp_enable_flag : 1;
+    uint32_t    slice_sao_luma_flag : 1;
+    uint32_t    slice_sao_chroma_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    mvd_l1_zero_flag : 1;
+    uint32_t    cabac_init_flag : 1;
+    uint32_t    cu_chroma_qp_offset_enabled_flag : 1;
+    uint32_t    deblocking_filter_override_flag : 1;
+    uint32_t    slice_deblocking_filter_disabled_flag : 1;
+    uint32_t    collocated_from_l0_flag : 1;
+    uint32_t    slice_loop_filter_across_slices_enabled_flag : 1;
+} StdVideoEncodeH265SliceSegmentHeaderFlags;
+
+typedef struct StdVideoEncodeH265SliceSegmentLongTermRefPics {
+    uint8_t     num_long_term_sps;
+    uint8_t     num_long_term_pics;
+    uint8_t     lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+    uint8_t     poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS];
+    uint16_t    used_by_curr_pic_lt_flag;
+    uint8_t     delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC];
+    uint8_t     delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC];
+} StdVideoEncodeH265SliceSegmentLongTermRefPics;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeader {
+    StdVideoEncodeH265SliceSegmentHeaderFlags               flags;
+    StdVideoH265SliceType                                   slice_type;
+    uint32_t                                                slice_segment_address;
+    uint8_t                                                 short_term_ref_pic_set_idx;
+    uint8_t                                                 collocated_ref_idx;
+    uint8_t                                                 num_ref_idx_l0_active_minus1;
+    uint8_t                                                 num_ref_idx_l1_active_minus1;
+    uint8_t                                                 MaxNumMergeCand;
+    int8_t                                                  slice_cb_qp_offset;
+    int8_t                                                  slice_cr_qp_offset;
+    int8_t                                                  slice_beta_offset_div2;
+    int8_t                                                  slice_tc_offset_div2;
+    int8_t                                                  slice_act_y_qp_offset;
+    int8_t                                                  slice_act_cb_qp_offset;
+    int8_t                                                  slice_act_cr_qp_offset;
+    const StdVideoH265ShortTermRefPicSet*                   pShortTermRefPicSet;
+    const StdVideoEncodeH265SliceSegmentLongTermRefPics*    pLongTermRefPics;
+    const StdVideoEncodeH265WeightTable*                    pWeightTable;
+} StdVideoEncodeH265SliceSegmentHeader;
+
+typedef struct StdVideoEncodeH265ReferenceModificationFlags {
+    uint32_t    ref_pic_list_modification_flag_l0 : 1;
+    uint32_t    ref_pic_list_modification_flag_l1 : 1;
+} StdVideoEncodeH265ReferenceModificationFlags;
+
+typedef struct StdVideoEncodeH265ReferenceModifications {
+    StdVideoEncodeH265ReferenceModificationFlags    flags;
+    uint8_t                                         referenceList0ModificationsCount;
+    const uint8_t*                                  pReferenceList0Modifications;
+    uint8_t                                         referenceList1ModificationsCount;
+    const uint8_t*                                  pReferenceList1Modifications;
+} StdVideoEncodeH265ReferenceModifications;
+
+typedef struct StdVideoEncodeH265PictureInfoFlags {
+    uint32_t    is_reference_flag : 1;
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    long_term_flag : 1;
+    uint32_t    discardable_flag : 1;
+    uint32_t    cross_layer_bla_flag : 1;
+} StdVideoEncodeH265PictureInfoFlags;
+
+typedef struct StdVideoEncodeH265PictureInfo {
+    StdVideoEncodeH265PictureInfoFlags    flags;
+    StdVideoH265PictureType               PictureType;
+    uint8_t                               sps_video_parameter_set_id;
+    uint8_t                               pps_seq_parameter_set_id;
+    uint8_t                               pps_pic_parameter_set_id;
+    int32_t                               PicOrderCntVal;
+    uint8_t                               TemporalId;
+} StdVideoEncodeH265PictureInfo;
+
+typedef struct StdVideoEncodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+} StdVideoEncodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH265ReferenceInfo {
+    StdVideoEncodeH265ReferenceInfoFlags    flags;
+    int32_t                                 PicOrderCntVal;
+    uint8_t                                 TemporalId;
+} StdVideoEncodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vk_video/vulkan_video_codecs_common.h b/host/libs/graphics_detector/include/vk_video/vulkan_video_codecs_common.h
new file mode 100644
index 0000000..1e49826
--- /dev/null
+++ b/host/libs/graphics_detector/include/vk_video/vulkan_video_codecs_common.h
@@ -0,0 +1,31 @@
+#ifndef VULKAN_VIDEO_CODECS_COMMON_H_
+#define VULKAN_VIDEO_CODECS_COMMON_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codecs_common 1
+#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \
+    ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.cpp b/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.cpp
new file mode 100644
index 0000000..27a409e
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.cpp
@@ -0,0 +1,14564 @@
+// Copyright(c) 2015-2020, NVIDIA CORPORATION. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Modifications Copyright (C) 2023 The Android Open Source Project
+//   * Updated RAII to work with VULKAN_HPP_NO_EXCEPTIONS.
+
+#include "VulkanHppGenerator.hpp"
+
+#include <algorithm>
+#include <cassert>
+#include <fstream>
+#include <regex>
+#include <sstream>
+
+// Uncomment to generate function definitions with cpp comment saying
+// which function generated it.
+//#define DEBUG_GENERATOR 1
+
+void                                checkAttributes( int                                                  line,
+                                                     std::map<std::string, std::string> const &           attributes,
+                                                     std::map<std::string, std::set<std::string>> const & required,
+                                                     std::map<std::string, std::set<std::string>> const & optional );
+void                                checkElements( int                                               line,
+                                                   std::vector<tinyxml2::XMLElement const *> const & elements,
+                                                   std::map<std::string, bool> const &               required,
+                                                   std::set<std::string> const &                     optional = {} );
+void                                checkForError( bool condition, int line, std::string const & message );
+void                                checkForWarning( bool condition, int line, std::string const & message );
+std::string                         findTag( std::set<std::string> const & tags, std::string const & name, std::string const & postfix = "" );
+std::string                         generateCArraySizes( std::vector<std::string> const & sizes );
+std::pair<std::string, std::string> generateEnumSuffixes( std::string const & name, bool bitmask, std::set<std::string> const & tags );
+std::string generateEnumValueName( std::string const & enumName, std::string const & valueName, bool bitmask, std::set<std::string> const & tags );
+std::string generateNamespacedType( std::string const & type );
+std::string generateNoDiscard( bool returnsSomething, bool multiSuccessCodes, bool multiErrorCodes );
+std::string generateStandardArray( std::string const & type, std::vector<std::string> const & sizes );
+std::string generateStandardArrayWrapper( std::string const & type, std::vector<std::string> const & sizes );
+std::string generateSuccessCode( std::string const & code, std::set<std::string> const & tags );
+std::map<std::string, std::string> getAttributes( tinyxml2::XMLElement const * element );
+template <typename ElementContainer>
+std::vector<tinyxml2::XMLElement const *>        getChildElements( ElementContainer const * element );
+std::pair<std::vector<std::string>, std::string> readModifiers( tinyxml2::XMLNode const * node );
+std::string                                      readSnippet( std::string const & snippetFile );
+std::string                                      replaceWithMap( std::string const & input, std::map<std::string, std::string> replacements );
+std::string                                      startLowerCase( std::string const & input );
+std::string                                      startUpperCase( std::string const & input );
+std::string                                      stripPostfix( std::string const & value, std::string const & postfix );
+std::string                                      stripPluralS( std::string const & name, std::set<std::string> const & tags );
+std::string                                      stripPrefix( std::string const & value, std::string const & prefix );
+std::string                                      toCamelCase( std::string const & value );
+std::string                                      toUpperCase( std::string const & name );
+std::vector<std::string>                         tokenize( std::string const & tokenString, std::string const & separator );
+std::vector<std::string>                         tokenizeAny( std::string const & tokenString, std::string const & separators );
+std::string                                      toString( tinyxml2::XMLError error );
+std::string                                      trim( std::string const & input );
+std::string                                      trimEnd( std::string const & input );
+std::string                                      trimStars( std::string const & input );
+void                                             writeToFile( std::string const & str, std::string const & fileName );
+
+const std::set<std::string> altLens             = { "2*VK_UUID_SIZE", "codeSize / 4", "(rasterizationSamples + 31) / 32", "(samples + 31) / 32" };
+const std::set<std::string> specialPointerTypes = { "Display", "IDirectFB", "wl_display", "xcb_connection_t", "_screen_window" };
+
+//
+// VulkanHppGenerator public interface
+//
+
+VulkanHppGenerator::VulkanHppGenerator( tinyxml2::XMLDocument const & document )
+{
+  // insert the default "handle" without class (for createInstance, and such)
+  m_handles.insert( std::make_pair( "", HandleData( {}, "", false, 0 ) ) );
+
+  // read the document and check its correctness
+  int                                       line     = document.GetLineNum();
+  std::vector<tinyxml2::XMLElement const *> elements = getChildElements( &document );
+  checkElements( line, elements, { { "registry", true } } );
+  checkForError( elements.size() == 1, line, "encountered " + std::to_string( elements.size() ) + " elements named <registry> but only one is allowed" );
+  readRegistry( elements[0] );
+  checkCorrectness();
+
+  // add the commands to the respective handles
+  for ( auto & command : m_commands )
+  {
+    auto handleIt = m_handles.find( command.second.handle );
+    assert( handleIt != m_handles.end() );
+    assert( handleIt->second.commands.find( command.first ) == handleIt->second.commands.end() );
+    handleIt->second.commands.insert( command.first );
+
+    registerDeleter( command.first, command );
+  }
+
+  // some "FlagBits" enums are not specified, but needed for our "Flags" handling -> add them here
+  for ( auto & feature : m_features )
+  {
+    addMissingFlagBits( feature.second.requireData, feature.first );
+  }
+  for ( auto & extension : m_extensions )
+  {
+    addMissingFlagBits( extension.second.requireData, extension.first );
+  }
+
+  // determine the extensionsByNumber map
+  for ( auto extensionIt = m_extensions.begin(); extensionIt != m_extensions.end(); ++extensionIt )
+  {
+    int number = stoi( extensionIt->second.number );
+    assert( m_extensionsByNumber.find( number ) == m_extensionsByNumber.end() );
+    m_extensionsByNumber[number] = extensionIt;
+  }
+}
+
+void VulkanHppGenerator::generateVulkanEnumsHppFile() const
+{
+  std::string const vulkan_enums_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_enums.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_enums_hpp << " ..." << std::endl;
+
+  std::string const vulkanEnumsHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_ENUMS_HPP
+#  define VULKAN_ENUMS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  template <typename EnumType, EnumType value>
+  struct CppType
+  {};
+${enums}
+${indexTypeTraits}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap(
+    vulkanEnumsHppTemplate, { { "enums", generateEnums() }, { "indexTypeTraits", generateIndexTypeTraits() }, { "licenseHeader", m_vulkanLicenseHeader } } );
+
+  writeToFile( str, vulkan_enums_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanFormatTraitsHppFile() const
+{
+  std::string const vulkan_format_traits_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_format_traits.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_format_traits_hpp << " ..." << std::endl;
+
+  std::string const vulkanFormatTraitsHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_FORMAT_TRAITS_HPP
+#  define VULKAN_FORMAT_TRAITS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${formatTraits}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap( vulkanFormatTraitsHppTemplate, { { "formatTraits", generateFormatTraits() }, { "licenseHeader", m_vulkanLicenseHeader } } );
+
+  writeToFile( str, vulkan_format_traits_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanFuncsHppFile() const
+{
+  std::string const vulkan_funcs_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_funcs.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_funcs_hpp << " ..." << std::endl;
+
+  std::string const vulkanFuncsHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_FUNCS_HPP
+#  define VULKAN_FUNCS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${commandDefinitions}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str =
+    replaceWithMap( vulkanFuncsHppTemplate, { { "commandDefinitions", generateCommandDefinitions() }, { "licenseHeader", m_vulkanLicenseHeader } } );
+
+  writeToFile( str, vulkan_funcs_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanHandlesHppFile() const
+{
+  std::string const vulkan_handles_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_handles.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_handles_hpp << " ..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_HANDLES_HPP
+#  define VULKAN_HANDLES_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${structForwardDeclarations}
+${handles}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap(
+    vulkanHandlesHppTemplate,
+    { { "handles", generateHandles() }, { "licenseHeader", m_vulkanLicenseHeader }, { "structForwardDeclarations", generateStructForwardDeclarations() } } );
+
+  writeToFile( str, vulkan_handles_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanHashHppFile() const
+{
+  std::string const vulkan_hash_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_hash.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_hash_hpp << " ..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_HASH_HPP
+#  define VULKAN_HASH_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace std
+{
+  //=======================================
+  //=== HASH structures for Flags types ===
+  //=======================================
+
+  template <typename BitType>
+  struct hash<VULKAN_HPP_NAMESPACE::Flags<BitType>>
+  {
+    std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags<BitType> const & flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<typename std::underlying_type<BitType>::type>{}(
+        static_cast<typename std::underlying_type<BitType>::type>( flags ) );
+    }
+  };
+
+${handleHashStructures}
+${structHashStructures}
+} // namespace std
+#endif
+)";
+
+  std::string str = replaceWithMap( vulkanHandlesHppTemplate,
+                                    { { "handleHashStructures", generateHandleHashStructures() },
+                                      { "licenseHeader", m_vulkanLicenseHeader },
+                                      { "structHashStructures", generateStructHashStructures() } } );
+
+  writeToFile( str, vulkan_hash_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanHppFile() const
+{
+  std::string const vulkan_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_hpp << " ... " << std::endl;
+
+  std::string const vulkanHppTemplate = R"(${licenseHeader}
+${includes}
+
+static_assert( VK_HEADER_VERSION == ${headerVersion}, "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+${typesafeCheck}
+#  if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#    define VULKAN_HPP_TYPESAFE_CONVERSION
+#  endif
+#endif
+
+${defines}
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${ArrayWrapper1D}
+${ArrayWrapper2D}
+${Flags}
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+${ArrayProxy}
+${ArrayProxyNoTemporaries}
+${StridedArrayProxy}
+${Optional}
+${StructureChain}
+${UniqueHandle}
+#endif  // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+${DispatchLoaderBase}
+${DispatchLoaderStatic}
+${DispatchLoaderDefault}
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+${ObjectDestroy}
+${ObjectFree}
+${ObjectRelease}
+${PoolFree}
+#endif // !VULKAN_HPP_NO_SMART_HANDLE
+${baseTypes}
+} // namespace VULKAN_HPP_NAMESPACE
+
+#include <vulkan/vulkan_enums.hpp>
+#if !defined( VULKAN_HPP_NO_TO_STRING )
+#include <vulkan/vulkan_to_string.hpp>
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+namespace std
+{
+  template <>
+  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+  {};
+}  // namespace std
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+${Exceptions}
+${resultExceptions}
+${throwResultException}
+#endif
+
+${ResultValue}
+${resultChecks}
+} // namespace VULKAN_HPP_NAMESPACE
+
+// clang-format off
+#include <vulkan/vulkan_handles.hpp>
+#include <vulkan/vulkan_structs.hpp>
+#include <vulkan/vulkan_funcs.hpp>
+// clang-format on
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+${structExtendsStructs}
+#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+${DynamicLoader}
+${DispatchLoaderDynamic}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap( vulkanHppTemplate,
+                                    { { "ArrayProxy", readSnippet( "ArrayProxy.hpp" ) },
+                                      { "ArrayProxyNoTemporaries", readSnippet( "ArrayProxyNoTemporaries.hpp" ) },
+                                      { "ArrayWrapper1D", readSnippet( "ArrayWrapper1D.hpp" ) },
+                                      { "ArrayWrapper2D", readSnippet( "ArrayWrapper2D.hpp" ) },
+                                      { "baseTypes", generateBaseTypes() },
+                                      { "defines", readSnippet( "defines.hpp" ) },
+                                      { "DispatchLoaderBase", readSnippet( "DispatchLoaderBase.hpp" ) },
+                                      { "DispatchLoaderDefault", readSnippet( "DispatchLoaderDefault.hpp" ) },
+                                      { "DispatchLoaderDynamic", generateDispatchLoaderDynamic() },
+                                      { "DispatchLoaderStatic", generateDispatchLoaderStatic() },
+                                      { "DynamicLoader", readSnippet( "DynamicLoader.hpp" ) },
+                                      { "Exceptions", readSnippet( "Exceptions.hpp" ) },
+                                      { "Flags", readSnippet( "Flags.hpp" ) },
+                                      { "headerVersion", m_version },
+                                      { "includes", readSnippet( "includes.hpp" ) },
+                                      { "licenseHeader", m_vulkanLicenseHeader },
+                                      { "ObjectDestroy", readSnippet( "ObjectDestroy.hpp" ) },
+                                      { "ObjectFree", readSnippet( "ObjectFree.hpp" ) },
+                                      { "ObjectRelease", readSnippet( "ObjectRelease.hpp" ) },
+                                      { "Optional", readSnippet( "Optional.hpp" ) },
+                                      { "PoolFree", readSnippet( "PoolFree.hpp" ) },
+                                      { "resultChecks", readSnippet( "resultChecks.hpp" ) },
+                                      { "resultExceptions", generateResultExceptions() },
+                                      { "structExtendsStructs", generateStructExtendsStructs() },
+                                      { "ResultValue", readSnippet( "ResultValue.hpp" ) },
+                                      { "StridedArrayProxy", readSnippet( "StridedArrayProxy.hpp" ) },
+                                      { "StructureChain", readSnippet( "StructureChain.hpp" ) },
+                                      { "throwResultException", generateThrowResultException() },
+                                      { "typesafeCheck", m_typesafeCheck },
+                                      { "UniqueHandle", readSnippet( "UniqueHandle.hpp" ) } } );
+
+  writeToFile( str, vulkan_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanRAIIHppFile() const
+{
+  std::string const vulkan_raii_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_raii.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_raii_hpp << " ..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_RAII_HPP
+#define VULKAN_RAII_HPP
+
+#include <memory>
+#include <utility>  // std::exchange, std::forward
+#include <vulkan/vulkan.hpp>
+
+#include <android-base/expected.h>
+#include <android-base/logging.h>
+
+#if !defined( VULKAN_HPP_RAII_NAMESPACE )
+#  define VULKAN_HPP_RAII_NAMESPACE raii
+#endif
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+namespace VULKAN_HPP_NAMESPACE
+{
+  namespace VULKAN_HPP_RAII_NAMESPACE
+  {
+    template <class T, class U = T>
+    VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue )
+    {
+#  if ( 14 <= VULKAN_HPP_CPP_VERSION )
+      return std::exchange<T>( obj, std::forward<U>( newValue ) );
+#  else
+      T oldValue = std::move( obj );
+      obj        = std::forward<U>( newValue );
+      return oldValue;
+#  endif
+    }
+
+${RAIIDispatchers}
+${RAIIHandles}
+${RAIICommandDefinitions}
+  } // namespace VULKAN_HPP_RAII_NAMESPACE
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+#endif
+)";
+
+  std::string str = replaceWithMap( vulkanHandlesHppTemplate,
+                                    { { "licenseHeader", m_vulkanLicenseHeader },
+                                      { "RAIICommandDefinitions", generateRAIICommandDefinitions() },
+                                      { "RAIIDispatchers", generateRAIIDispatchers() },
+                                      { "RAIIHandles", generateRAIIHandles() } } );
+
+  writeToFile( str, vulkan_raii_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanStaticAssertionsHppFile() const
+{
+  std::string const static_assertions_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_static_assertions.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << static_assertions_hpp << " ..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_STATIC_ASSERTIONS_HPP
+#  define VULKAN_STATIC_ASSERTIONS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+//=========================
+//=== static_assertions ===
+//=========================
+
+${staticAssertions}
+#endif
+)";
+
+  std::string str =
+    replaceWithMap( vulkanHandlesHppTemplate, { { "licenseHeader", m_vulkanLicenseHeader }, { "staticAssertions", generateStaticAssertions() } } );
+
+  writeToFile( str, static_assertions_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanStructsHppFile() const
+{
+  std::string const vulkan_structs_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_structs.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_structs_hpp << " ..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_STRUCTS_HPP
+#  define VULKAN_STRUCTS_HPP
+
+#include <cstring>  // strcmp
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${structs}
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap( vulkanHandlesHppTemplate, { { "licenseHeader", m_vulkanLicenseHeader }, { "structs", generateStructs() } } );
+
+  writeToFile( str, vulkan_structs_hpp );
+}
+
+void VulkanHppGenerator::generateVulkanToStringHppFile() const
+{
+  std::string const vulkan_to_string_hpp = std::string( BASE_PATH ) + "/vulkan/vulkan_to_string.hpp";
+  std::cout << "VulkanHppGenerator: Generating " << vulkan_to_string_hpp << "..." << std::endl;
+
+  std::string const vulkanHandlesHppTemplate = R"(${licenseHeader}
+#ifndef VULKAN_TO_STRING_HPP
+#  define VULKAN_TO_STRING_HPP
+
+#include <vulkan/vulkan_enums.hpp>
+
+#if __cpp_lib_format
+#  include <format>   // std::format
+#else
+#  include <sstream>  // std::stringstream
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+${bitmasksToString}
+${enumsToString}
+} // namespace VULKAN_HPP_NAMESPACE
+#endif
+)";
+
+  std::string str = replaceWithMap(
+    vulkanHandlesHppTemplate,
+    { { "bitmasksToString", generateBitmasksToString() }, { "enumsToString", generateEnumsToString() }, { "licenseHeader", m_vulkanLicenseHeader } } );
+
+  writeToFile( str, vulkan_to_string_hpp );
+}
+
+void VulkanHppGenerator::prepareRAIIHandles()
+{
+  // filter out functions that are not usefull on this level of abstraction (like vkGetInstanceProcAddr)
+  // and all the construction and destruction functions, as they are used differently
+  assert( m_handles.begin()->first.empty() );
+  for ( auto handleIt = std::next( m_handles.begin() ); handleIt != m_handles.end(); ++handleIt )
+  {
+    handleIt->second.destructorIt = determineRAIIHandleDestructor( handleIt->first );
+    if ( handleIt->second.destructorIt != m_commands.end() )
+    {
+      m_RAIISpecialFunctions.insert( handleIt->second.destructorIt->first );
+    }
+    handleIt->second.constructorIts = determineRAIIHandleConstructors( handleIt->first, handleIt->second.destructorIt );
+  }
+
+  distributeSecondLevelCommands( m_RAIISpecialFunctions );
+}
+
+void VulkanHppGenerator::prepareVulkanFuncs()
+{
+  // rename a couple of function parameters to prevent this warning, treated as an error:
+  // warning C4458: declaration of 'objectType' hides class member
+  for ( auto & command : m_commands )
+  {
+    for ( auto & param : command.second.params )
+    {
+      if ( param.name == "objectType" )
+      {
+        param.name += "_";
+      }
+    }
+  }
+}
+
+//
+// VulkanHppGenerator private interface
+//
+
+void VulkanHppGenerator::addCommand( std::string const & name, CommandData & commandData )
+{
+  // find the handle this command is going to be associated to
+  checkForError( !commandData.params.empty(), commandData.xmlLine, "command <" + name + "> with no params" );
+  std::map<std::string, HandleData>::iterator handleIt = m_handles.find( commandData.params[0].type.type );
+  if ( handleIt == m_handles.end() )
+  {
+    handleIt = m_handles.begin();
+    assert( handleIt->first == "" );
+  }
+  commandData.handle = handleIt->first;
+
+  // add this command to the list of commands
+  checkForError( m_commands.insert( std::make_pair( name, commandData ) ).second, commandData.xmlLine, "already encountered command <" + name + ">" );
+}
+
+void VulkanHppGenerator::addMissingFlagBits( std::vector<RequireData> & requireData, std::string const & referencedIn )
+{
+  for ( auto & require : requireData )
+  {
+    std::vector<std::string> newTypes;
+    for ( auto const & type : require.types )
+    {
+      auto bitmaskIt = m_bitmasks.find( type );
+      if ( ( bitmaskIt != m_bitmasks.end() ) && bitmaskIt->second.requirements.empty() )
+      {
+        // generate the flagBits enum name out of the bitmask name: VkFooFlagsXXX -> VkFooFlagBitsXXX
+        size_t pos = bitmaskIt->first.find( "Flags" );
+        assert( pos != std::string::npos );
+        std::string flagBits = bitmaskIt->first.substr( 0, pos + 4 ) + "Bit" + bitmaskIt->first.substr( pos + 4 );
+
+        // as the bitmask's requirement is still empty, this flagBits should not be listed in the require list!
+        assert( std::find_if( require.types.begin(), require.types.end(), [&flagBits]( std::string const & type ) { return ( type == flagBits ); } ) ==
+                require.types.end() );
+
+        bitmaskIt->second.requirements = flagBits;
+
+        // some flagsBits are specified but never listed as required for any flags!
+        // so, even if this bitmask has no enum listed as required, it might still already exist in the enums list
+        auto enumIt = m_enums.find( flagBits );
+        if ( enumIt == m_enums.end() )
+        {
+          m_enums.insert( std::make_pair( flagBits, EnumData{ .isBitmask = true, .xmlLine = 0 } ) );
+
+          assert( m_types.find( flagBits ) == m_types.end() );
+          m_types.insert( std::make_pair( flagBits, TypeData{ .category = TypeCategory::Bitmask, .referencedIn = referencedIn } ) );
+        }
+        else
+        {
+          assert( m_types.find( flagBits ) != m_types.end() );
+          enumIt->second.isBitmask = true;
+        }
+
+        newTypes.push_back( flagBits );
+      }
+    }
+    // add all the newly created flagBits types to the require list as if they had been part of the vk.xml!
+    require.types.insert( require.types.end(), newTypes.begin(), newTypes.end() );
+  }
+}
+
+std::string VulkanHppGenerator::addTitleAndProtection( std::string const & title, std::string const & strIf, std::string const & strElse ) const
+{
+  std::string str;
+  if ( !strIf.empty() )
+  {
+    auto [enter, leave] = generateProtection( getProtectFromTitle( title ) );
+    str                 = "\n" + enter + "  //=== " + title + " ===\n" + strIf;
+    if ( !enter.empty() && !strElse.empty() )
+    {
+      str += "#else \n" + strElse;
+    }
+    str += leave;
+  }
+  return str;
+}
+
+bool VulkanHppGenerator::allVectorSizesSupported( std::vector<ParamData> const & params, std::map<size_t, VectorParamData> const & vectorParams ) const
+{
+  // check if all vector sizes are by value and their type is one of "uint32_t", "VkDeviceSize", or "VkSampleCountFlagBits"
+  return std::find_if_not( vectorParams.begin(),
+                           vectorParams.end(),
+                           [&params]( auto const & vpi )
+                           {
+                             return params[vpi.second.lenParam].type.isValue() &&
+                                    ( ( params[vpi.second.lenParam].type.type == "uint32_t" ) || ( params[vpi.second.lenParam].type.type == "VkDeviceSize" ) ||
+                                      ( params[vpi.second.lenParam].type.type == "VkSampleCountFlagBits" ) );
+                           } ) == vectorParams.end();
+}
+
+void VulkanHppGenerator::appendDispatchLoaderDynamicCommands( std::vector<RequireData> const & requireData,
+                                                              std::set<std::string> &          listedCommands,
+                                                              std::string const &              title,
+                                                              std::string &                    commandMembers,
+                                                              std::string &                    initialCommandAssignments,
+                                                              std::string &                    instanceCommandAssignments,
+                                                              std::string &                    deviceCommandAssignments ) const
+{
+  std::string members, initial, instance, device, placeholders;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      if ( listedCommands.insert( command ).second )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+
+        members += "    PFN_" + commandIt->first + " " + commandIt->first + " = 0;\n";
+        placeholders += "    PFN_dummy " + commandIt->first + "_placeholder = 0;\n";
+        if ( commandIt->second.handle.empty() )
+        {
+          initial += generateDispatchLoaderDynamicCommandAssignment( commandIt->first, commandIt->second, "NULL" );
+        }
+        else
+        {
+          instance += generateDispatchLoaderDynamicCommandAssignment( commandIt->first, commandIt->second, "instance" );
+          if ( isDeviceCommand( commandIt->second ) )
+          {
+            device += generateDispatchLoaderDynamicCommandAssignment( commandIt->first, commandIt->second, "device" );
+          }
+        }
+      }
+    }
+  }
+  auto [enter, leave] = generateProtection( getProtectFromTitle( title ) );
+  std::string header  = "\n" + enter + "  //=== " + title + " ===\n";
+  if ( !members.empty() )
+  {
+    commandMembers += header + members;
+    if ( !enter.empty() )
+    {
+      commandMembers += "#else\n" + placeholders;
+    }
+    commandMembers += leave;
+  }
+  if ( !initial.empty() )
+  {
+    initialCommandAssignments += header + initial + leave;
+  }
+  if ( !instance.empty() )
+  {
+    instanceCommandAssignments += header + instance + leave;
+  }
+  if ( !device.empty() )
+  {
+    deviceCommandAssignments += header + device + leave;
+  }
+}
+
+void VulkanHppGenerator::appendRAIIDispatcherCommands( std::vector<RequireData> const & requireData,
+                                                       std::set<std::string> &          listedCommands,
+                                                       std::string const &              title,
+                                                       std::string &                    contextInitializers,
+                                                       std::string &                    contextMembers,
+                                                       std::string &                    deviceAssignments,
+                                                       std::string &                    deviceMembers,
+                                                       std::string &                    instanceAssignments,
+                                                       std::string &                    instanceMembers ) const
+{
+  std::string ci, cm, da, dm, dmp, ia, im, imp;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      if ( listedCommands.insert( command ).second )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+        if ( commandIt->second.handle.empty() )
+        {
+          assert( commandIt->second.alias.empty() );
+
+          ci += ", " + commandIt->first + "( PFN_" + commandIt->first + "( getProcAddr( NULL, \"" + commandIt->first + "\" ) ) )";
+
+          cm += "      PFN_" + commandIt->first + " " + commandIt->first + " = 0;\n";
+        }
+        else if ( ( commandIt->second.handle == "VkDevice" ) || hasParentHandle( commandIt->second.handle, "VkDevice" ) )
+        {
+          da += "        " + commandIt->first + " = PFN_" + commandIt->first + "( vkGetDeviceProcAddr( device, \"" + commandIt->first + "\" ) );\n";
+          // if this is an alias'ed function, use it as a fallback for the original one
+          if ( !commandIt->second.alias.empty() )
+          {
+            da += "        if ( !" + commandIt->second.alias + " ) " + commandIt->second.alias + " = " + commandIt->first + ";\n";
+          }
+
+          dm += "      PFN_" + commandIt->first + " " + commandIt->first + " = 0;\n";
+          dmp += "      PFN_dummy " + commandIt->first + "_placeholder = 0;\n";
+        }
+        else
+        {
+          assert( ( commandIt->second.handle == "VkInstance" ) || hasParentHandle( commandIt->second.handle, "VkInstance" ) );
+
+          // filter out vkGetInstanceProcAddr, as starting with Vulkan 1.2 it can resolve itself only (!) with an
+          // instance nullptr !
+          if ( command != "vkGetInstanceProcAddr" )
+          {
+            ia += "        " + commandIt->first + " = PFN_" + commandIt->first + "( vkGetInstanceProcAddr( instance, \"" + commandIt->first + "\" ) );\n";
+            // if this is an alias'ed function, use it as a fallback for the original one
+            if ( !commandIt->second.alias.empty() )
+            {
+              ia += "        if ( !" + commandIt->second.alias + " ) " + commandIt->second.alias + " = " + commandIt->first + ";\n";
+            }
+          }
+
+          im += +"      PFN_" + commandIt->first + " " + commandIt->first + " = 0;\n";
+          imp += "      PFN_dummy " + commandIt->first + "_placeholder = 0;\n";
+        }
+      }
+    }
+  }
+  contextInitializers += addTitleAndProtection( title, ci );
+  contextMembers += addTitleAndProtection( title, cm );
+  deviceAssignments += addTitleAndProtection( title, da );
+  deviceMembers += addTitleAndProtection( title, dm, dmp );
+  instanceAssignments += addTitleAndProtection( title, ia );
+  instanceMembers += addTitleAndProtection( title, im, imp );
+}
+
+void VulkanHppGenerator::checkBitmaskCorrectness() const
+{
+  for ( auto const & bitmask : m_bitmasks )
+  {
+    // check that a bitmask is referenced somewhere
+    // I think, it's not forbidden to not reference a bitmask, but it would probably be not intended?
+    auto typeIt = m_types.find( bitmask.first );
+    assert( typeIt != m_types.end() );
+    checkForError( !typeIt->second.referencedIn.empty(), bitmask.second.xmlLine, "bitmask <" + bitmask.first + "> not listed in any feature or extension" );
+
+    // check that the requirement is an enum
+    if ( !bitmask.second.requirements.empty() )
+    {
+      checkForError( m_enums.find( bitmask.second.requirements ) != m_enums.end(),
+                     bitmask.second.xmlLine,
+                     "bitmask requires unknown <" + bitmask.second.requirements + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::checkCommandCorrectness() const
+{
+  // prepare command checks by gathering all result codes and aliases into one set of resultCodes
+  auto resultIt = m_enums.find( "VkResult" );
+  assert( resultIt != m_enums.end() );
+  std::set<std::string> resultCodes;
+  for ( auto rc : resultIt->second.values )
+  {
+    resultCodes.insert( rc.name );
+  }
+  for ( auto rc : resultIt->second.aliases )
+  {
+    resultCodes.insert( rc.first );
+  }
+
+  // command checks
+  for ( auto const & command : m_commands )
+  {
+    // check that a command is referenced somewhere
+    // I think, it's not forbidden to not reference a function, but it would probably be not intended?
+    checkForError( !command.second.referencedIn.empty(), command.second.xmlLine, "command <" + command.first + "> not listed in any feature or extension" );
+
+    // check for unknown error or succes codes
+    for ( auto const & ec : command.second.errorCodes )
+    {
+      checkForError( resultCodes.find( ec ) != resultCodes.end(), command.second.xmlLine, "command uses unknown error code <" + ec + ">" );
+    }
+    for ( auto const & sc : command.second.successCodes )
+    {
+      checkForError( resultCodes.find( sc ) != resultCodes.end(), command.second.xmlLine, "command uses unknown success code <" + sc + ">" );
+    }
+
+    // check that functions returning a VkResult specify successcodes
+    checkForError( ( command.second.returnType != "VkResult" ) || !command.second.successCodes.empty(),
+                   command.second.xmlLine,
+                   "missing successcodes on command <" + command.first + "> returning VkResult!" );
+
+    // check that all parameter types as well as the return type are known types
+    for ( auto const & p : command.second.params )
+    {
+      checkForError( m_types.find( p.type.type ) != m_types.end(), p.xmlLine, "comand uses parameter of unknown type <" + p.type.type + ">" );
+    }
+    checkForError( m_types.find( command.second.returnType ) != m_types.end(),
+                   command.second.xmlLine,
+                   "command uses unknown return type <" + command.second.returnType + ">" );
+  }
+}
+
+void VulkanHppGenerator::checkCorrectness() const
+{
+  checkForError( !m_vulkanLicenseHeader.empty(), -1, "missing license header" );
+  checkBitmaskCorrectness();
+  checkCommandCorrectness();
+  checkDefineCorrectness();
+  checkEnumCorrectness();
+  checkExtensionCorrectness();
+  checkFuncPointerCorrectness();
+  checkHandleCorrectness();
+  checkStructCorrectness();
+}
+
+void VulkanHppGenerator::checkDefineCorrectness() const
+{
+  for ( auto const & d : m_defines )
+  {
+    checkForError( d.second.require.empty() || ( m_defines.find( d.second.require ) != m_defines.end() ),
+                   d.second.xmlLine,
+                   "using undefined require <" + d.second.require + ">" );
+  }
+}
+
+void VulkanHppGenerator::checkEnumCorrectness() const
+{
+  for ( auto const & e : m_enums )
+  {
+    // check that a bitmask is referenced somewhere
+    // it's not forbidden to not reference a bitmask, and in fact that happens! So just warn here
+    auto typeIt = m_types.find( e.first );
+    assert( typeIt != m_types.end() );
+    checkForWarning( !typeIt->second.referencedIn.empty(), e.second.xmlLine, "enum <" + e.first + "> not listed in any feature or extension" );
+
+    // check that the aliasNames are known enum values or known aliases
+    for ( auto const & alias : e.second.aliases )
+    {
+      checkForError(
+        ( std::find_if( e.second.values.begin(), e.second.values.end(), [&alias]( EnumValueData const & evd ) { return evd.name == alias.second.name; } ) !=
+          e.second.values.end() ) ||
+          ( e.second.aliases.find( alias.second.name ) != e.second.aliases.end() ),
+        alias.second.xmlLine,
+        "enum <" + alias.first + "> uses unknown alias <" + alias.second.name + ">" );
+    }
+
+    // check that any protection fits to the corresponding extension
+    for ( auto const & v : e.second.values )
+    {
+      if ( !v.protect.empty() )
+      {
+        auto extIt = m_extensions.find( v.extension );
+        assert( extIt != m_extensions.end() );
+        auto platformIt = m_platforms.find( extIt->second.platform );
+        assert( platformIt != m_platforms.end() );
+        checkForError( v.protect == platformIt->second.protect,
+                       v.xmlLine,
+                       "attribute <protect> of enum value <" + v.name + "> is \"" + v.protect + "\" but corresponding extension <" + v.extension +
+                         "> belongs to platform <" + platformIt->first + "> with protection \"" + platformIt->second.protect + "\"" );
+      }
+    }
+  }
+
+  // enum checks by features and extensions
+  for ( auto & feature : m_features )
+  {
+    checkEnumCorrectness( feature.second.requireData );
+  }
+  for ( auto & ext : m_extensions )
+  {
+    checkEnumCorrectness( ext.second.requireData );
+  }
+
+  // special check for VkFormat
+  if ( !m_formats.empty() )
+  {
+    auto enumIt = m_enums.find( "VkFormat" );
+    assert( enumIt != m_enums.end() );
+    assert( enumIt->second.values.front().name == "VK_FORMAT_UNDEFINED" );
+    for ( auto enumValueIt = std::next( enumIt->second.values.begin() ); enumValueIt != enumIt->second.values.end(); ++enumValueIt )
+    {
+      auto formatIt = m_formats.find( enumValueIt->name );
+      if ( formatIt == m_formats.end() )
+      {
+        auto aliasIt = std::find_if(
+          enumIt->second.aliases.begin(), enumIt->second.aliases.end(), [&enumValueIt]( auto const & ead ) { return ead.second.name == enumValueIt->name; } );
+        checkForError( aliasIt != enumIt->second.aliases.end(), enumValueIt->xmlLine, "missing format specification for <" + enumValueIt->name + ">" );
+      }
+    }
+  }
+}
+
+void VulkanHppGenerator::checkEnumCorrectness( std::vector<RequireData> const & requireData ) const
+{
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto typeIt = m_types.find( type );
+      assert( typeIt != m_types.end() );
+      switch ( typeIt->second.category )
+      {
+        case TypeCategory::Bitmask:
+          {
+            // check that each "require" listed for a bitmask is listed for a feature or an extension
+            auto bitmaskIt = m_bitmasks.find( type );
+            if ( bitmaskIt != m_bitmasks.end() )
+            {
+              // not for every bitmask is a "require" listed
+              if ( !bitmaskIt->second.requirements.empty() )
+              {
+                auto requireTypeIt = m_types.find( bitmaskIt->second.requirements );
+                assert( requireTypeIt != m_types.end() );
+                checkForError( !requireTypeIt->second.referencedIn.empty(),
+                               bitmaskIt->second.xmlLine,
+                               "bitmask <" + bitmaskIt->first + "> requires <" + bitmaskIt->second.requirements +
+                                 "> which is not listed for any feature or extension!" );
+              }
+            }
+            else
+            {
+              // every bitmask not listed in the m_bitmasks, should be an alias of such a thing
+              assert( std::find_if( m_bitmasks.begin(),
+                                    m_bitmasks.end(),
+                                    [&type]( std::pair<const std::string, BitmaskData> const & bd ) { return bd.second.alias == type; } ) != m_bitmasks.end() );
+            }
+          }
+          break;
+        case TypeCategory::Enum:
+          {
+            auto enumIt = m_enums.find( type );
+            if ( enumIt != m_enums.end() )
+            {
+              if ( enumIt->second.isBitmask )
+              {
+                // check that any enum of a bitmask is listed as "require" or "bitvalues" for a bitmask
+                auto bitmaskIt = std::find_if(
+                  m_bitmasks.begin(), m_bitmasks.end(), [&enumIt]( auto const & bitmask ) { return bitmask.second.requirements == enumIt->first; } );
+                checkForError( bitmaskIt != m_bitmasks.end(),
+                               enumIt->second.xmlLine,
+                               "enum <" + enumIt->first + "> is not listed as an requires or bitvalues for any bitmask in the types section" );
+
+                // check that bitwidth of the enum and type of the corresponding bitmask are equal
+                checkForError( ( enumIt->second.bitwidth != "64" ) || ( bitmaskIt->second.type == "VkFlags64" ),
+                               enumIt->second.xmlLine,
+                               "enum <" + enumIt->first + "> is marked with bitwidth <64> but corresponding bitmask <" + bitmaskIt->first +
+                                 "> is not of type <VkFlags64>" );
+              }
+            }
+            else
+            {
+              // every enum not listed in the m_enums, should be an alias of such a thing
+              assert( std::find_if( m_enums.begin(),
+                                    m_enums.end(),
+                                    [&type]( std::pair<const std::string, EnumData> const & ed ) { return ed.second.alias == type; } ) != m_enums.end() );
+            }
+          }
+          break;
+        default: break;
+      }
+    }
+  }
+}
+
+bool VulkanHppGenerator::checkEquivalentSingularConstructor( std::vector<std::map<std::string, CommandData>::const_iterator> const & constructorIts,
+                                                             std::map<std::string, CommandData>::const_iterator                      constructorIt,
+                                                             std::vector<ParamData>::const_iterator                                  lenIt ) const
+{
+  // check, if there is no singular constructor with the very same arguments as this array constructor
+  // (besides the size, of course)
+  auto isEquivalentSingularConstructor = [constructorIt, lenIt]( std::map<std::string, CommandData>::const_iterator it )
+  {
+    if ( it->second.params.size() + 1 != constructorIt->second.params.size() )
+    {
+      return false;
+    }
+    size_t lenIdx = std::distance( constructorIt->second.params.begin(), lenIt );
+    for ( size_t i = 0, j = 0; i < it->second.params.size(); ++i, ++j )
+    {
+      assert( j < constructorIt->second.params.size() );
+      if ( j == lenIdx )
+      {
+        ++j;
+      }
+      if ( it->second.params[i].type.type != constructorIt->second.params[j].type.type )
+      {
+        return false;
+      }
+    }
+    return true;
+  };
+  return ( std::find_if( constructorIts.begin(), constructorIts.end(), isEquivalentSingularConstructor ) != constructorIts.end() );
+}
+
+void VulkanHppGenerator::checkExtensionCorrectness() const
+{
+  for ( auto const & extension : m_extensions )
+  {
+    // check for existence of any deprecation, obsoletion, or promotion
+    if ( !extension.second.deprecatedBy.empty() )
+    {
+      checkForError( ( m_extensions.find( extension.second.deprecatedBy ) != m_extensions.end() ) ||
+                       ( m_features.find( extension.second.deprecatedBy ) != m_features.end() ),
+                     extension.second.xmlLine,
+                     "extension deprecated by unknown extension/version <" + extension.second.promotedTo + ">" );
+    }
+    if ( !extension.second.obsoletedBy.empty() )
+    {
+      checkForError( ( m_extensions.find( extension.second.obsoletedBy ) != m_extensions.end() ) ||
+                       ( m_features.find( extension.second.obsoletedBy ) != m_features.end() ),
+                     extension.second.xmlLine,
+                     "extension obsoleted by unknown extension/version <" + extension.second.promotedTo + ">" );
+    }
+    if ( !extension.second.promotedTo.empty() )
+    {
+      checkForError( ( m_extensions.find( extension.second.promotedTo ) != m_extensions.end() ) ||
+                       ( m_features.find( extension.second.promotedTo ) != m_features.end() ),
+                     extension.second.xmlLine,
+                     "extension promoted to unknown extension/version <" + extension.second.promotedTo + ">" );
+    }
+
+    // check for existence of any requirement
+    for ( auto const & require : extension.second.requireData )
+    {
+      if ( !require.depends.empty() )
+      {
+        for ( auto const & depends : require.depends )
+        {
+          checkForError( ( m_features.find( depends ) != m_features.end() ) || ( m_extensions.find( depends ) != m_extensions.end() ),
+                         require.xmlLine,
+                         "extension <" + extension.first + "> lists an unknown depends <" + depends + ">" );
+        }
+      }
+    }
+  }
+}
+
+void VulkanHppGenerator::checkFuncPointerCorrectness() const
+{
+  for ( auto const & funcPointer : m_funcPointers )
+  {
+    if ( !funcPointer.second.requirements.empty() )
+    {
+      checkForError( m_types.find( funcPointer.second.requirements ) != m_types.end(),
+                     funcPointer.second.xmlLine,
+                     "funcpointer requires unknown <" + funcPointer.second.requirements + ">" );
+    }
+    for ( auto const & argument : funcPointer.second.arguments )
+    {
+      checkForError( m_types.find( argument.type ) != m_types.end(), argument.xmlLine, "funcpointer argument of unknown type <" + argument.type + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::checkHandleCorrectness() const
+{
+  // prepare handle checks by getting the VkObjectType enum
+  auto objectTypeIt = m_enums.find( "VkObjectType" );
+  assert( objectTypeIt != m_enums.end() );
+
+  // handle checks
+  for ( auto const & handle : m_handles )
+  {
+    // check the existence of the parent
+    checkForError( m_handles.find( handle.second.parent ) != m_handles.end(),
+                   handle.second.xmlLine,
+                   "handle <" + handle.first + "> with unknown parent <" + handle.second.parent + ">" );
+
+    // check existence of objTypeEnum used with this handle type
+    if ( !handle.first.empty() )
+    {
+      assert( !handle.second.objTypeEnum.empty() );
+      checkForError( std::find_if( objectTypeIt->second.values.begin(),
+                                   objectTypeIt->second.values.end(),
+                                   [&handle]( EnumValueData const & evd )
+                                   { return evd.name == handle.second.objTypeEnum; } ) != objectTypeIt->second.values.end(),
+                     handle.second.xmlLine,
+                     "handle <" + handle.first + "> specifies unknown \"objtypeenum\" <" + handle.second.objTypeEnum + ">" );
+    }
+  }
+
+  // check that all specified objectType values are used with a handle type
+  for ( auto const & objectTypeValue : objectTypeIt->second.values )
+  {
+    if ( objectTypeValue.name != "VK_OBJECT_TYPE_UNKNOWN" )
+    {
+      checkForError( std::find_if( m_handles.begin(),
+                                   m_handles.end(),
+                                   [&objectTypeValue]( std::pair<std::string, HandleData> const & hd )
+                                   { return hd.second.objTypeEnum == objectTypeValue.name; } ) != m_handles.end(),
+                     objectTypeValue.xmlLine,
+                     "VkObjectType value <" + objectTypeValue.name + "> not specified as \"objtypeenum\" for any handle" );
+    }
+  }
+}
+
+void VulkanHppGenerator::checkStructCorrectness() const
+{
+  for ( auto const & structAlias : m_structureAliases )
+  {
+    auto structIt = m_structures.find( structAlias.second.alias );
+    checkForError( structIt != m_structures.end(), structAlias.second.xmlLine, "unknown struct alias <" + structAlias.second.alias + ">" );
+  }
+
+  for ( auto const & structAliasInverse : m_structureAliasesInverse )
+  {
+    auto structIt = m_structures.find( structAliasInverse.first );
+    if ( structIt == m_structures.end() )
+    {
+      assert( !structAliasInverse.second.empty() );
+      auto aliasIt = m_structureAliases.find( *structAliasInverse.second.begin() );
+      assert( aliasIt != m_structureAliases.end() );
+      checkForError( false, aliasIt->second.xmlLine, "struct <" + aliasIt->first + "> uses unknown alias <" + aliasIt->second.alias + ">" );
+    }
+  }
+
+  std::set<std::string> sTypeValues;
+  for ( auto const & structure : m_structures )
+  {
+    // check that a struct is referenced somewhere
+    // I think, it's not forbidden to not reference a struct, but it would probably be not intended?
+    auto typeIt = m_types.find( structure.first );
+    assert( typeIt != m_types.end() );
+    checkForError(
+      !typeIt->second.referencedIn.empty(), structure.second.xmlLine, "structure <" + structure.first + "> not listed in any feature or extension" );
+
+    // check for existence of all structs that are extended by this struct
+    for ( auto const & extend : structure.second.structExtends )
+    {
+      checkForError( ( m_structures.find( extend ) != m_structures.end() ) || ( m_structureAliases.find( extend ) != m_structureAliases.end() ),
+                     structure.second.xmlLine,
+                     "struct <" + structure.first + "> extends unknown <" + extend + ">" );
+    }
+
+    // checks on the members of a struct
+    checkStructMemberCorrectness( structure.first, structure.second.members, sTypeValues );
+  }
+
+  // enum VkStructureType checks (need to be after structure checks because of sTypeValues gathered there)
+  auto structureTypeIt = m_enums.find( "VkStructureType" );
+  assert( structureTypeIt != m_enums.end() );
+  for ( auto const & enumValue : structureTypeIt->second.values )
+  {
+    if ( ( enumValue.name == "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO" ) || ( enumValue.name == "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO" ) )
+    {
+      checkForError(
+        sTypeValues.find( enumValue.name ) == sTypeValues.end(), enumValue.xmlLine, "Reserved VkStructureType enum value <" + enumValue.name + "> is used" );
+    }
+    else
+    {
+      checkForError( sTypeValues.erase( enumValue.name ) == 1, enumValue.xmlLine, "VkStructureType enum value <" + enumValue.name + "> never used" );
+    }
+  }
+  assert( sTypeValues.empty() );
+}
+
+void VulkanHppGenerator::checkStructMemberCorrectness( std::string const &             structureName,
+                                                       std::vector<MemberData> const & members,
+                                                       std::set<std::string> &         sTypeValues ) const
+{
+  for ( auto const & member : members )
+  {
+    // check that all member types are required in some feature or extension
+    if ( member.type.type.starts_with( "Vk" ) )
+    {
+      auto typeIt = m_types.find( member.type.type );
+      assert( typeIt != m_types.end() );
+      checkForError( !typeIt->second.referencedIn.empty(),
+                     member.xmlLine,
+                     "struct member type <" + member.type.type + "> used in struct <" + structureName + "> is never listed for any feature or extension" );
+    }
+
+    // if a member specifies a selector, that member is a union and the selector is an enum
+    // check that there's a 1-1 connection between the specified selections and the values of that enum
+    if ( !member.selector.empty() )
+    {
+      auto selectorIt = findStructMemberIt( member.selector, members );
+      assert( selectorIt != members.end() );
+      auto selectorEnumIt = m_enums.find( selectorIt->type.type );
+      assert( selectorEnumIt != m_enums.end() );
+      auto unionIt = m_structures.find( member.type.type );
+      assert( ( unionIt != m_structures.end() ) && unionIt->second.isUnion );
+      for ( auto const & unionMember : unionIt->second.members )
+      {
+        // check that each union member has a selection, that is a value of the seleting enum
+        assert( !unionMember.selection.empty() );
+        for ( auto const & selection : unionMember.selection )
+        {
+          checkForError( std::find_if( selectorEnumIt->second.values.begin(),
+                                       selectorEnumIt->second.values.end(),
+                                       [&selection]( EnumValueData const & evd ) { return evd.name == selection; } ) != selectorEnumIt->second.values.end(),
+                         unionMember.xmlLine,
+                         "union member <" + unionMember.name + "> uses selection <" + selection + "> that is not part of the selector type <" +
+                           selectorIt->type.type + ">" );
+        }
+      }
+    }
+
+    // check that each member type is known
+    checkForError( m_types.find( member.type.type ) != m_types.end(), member.xmlLine, "struct member uses unknown type <" + member.type.type + ">" );
+
+    // check that any used constant is a known constant
+    if ( !member.usedConstant.empty() )
+    {
+      checkForError( m_constants.find( member.usedConstant ) != m_constants.end(),
+                     member.xmlLine,
+                     "struct member array size uses unknown constant <" + member.usedConstant + ">" );
+    }
+
+    // checks if a value is specified
+    if ( !member.value.empty() )
+    {
+      auto enumIt = m_enums.find( member.type.type );
+      if ( enumIt != m_enums.end() )
+      {
+        // check that the value exists in the specified enum
+        checkForError( std::find_if( enumIt->second.values.begin(),
+                                     enumIt->second.values.end(),
+                                     [&member]( auto const & evd ) { return member.value == evd.name; } ) != enumIt->second.values.end(),
+                       member.xmlLine,
+                       "value <" + member.value + "> for member <" + member.name + "> in structure <" + structureName + "> of enum type <" + member.type.type +
+                         "> not listed" );
+        // special handling for sType: no value should appear more than once
+        if ( member.name == "sType" )
+        {
+          checkForError( sTypeValues.insert( member.value ).second, member.xmlLine, "sType value <" + member.value + "> has been used before" );
+        }
+      }
+      else if ( member.type.type == "uint32_t" )
+      {
+        // check that a value for a uint32_t is all digits
+        checkForError( member.value.find_first_not_of( "0123456789" ) == std::string::npos,
+                       member.xmlLine,
+                       "value <" + member.value + "> for member <" + member.name + "> in structure <" + structureName + "> of type <" + member.type.type +
+                         "> is not a number" );
+      }
+      else
+      {
+        // don't know the type of the value -> error out
+        checkForError( false,
+                       member.xmlLine,
+                       "member <" + member.name + "> in structure <" + structureName + "> holds value <" + member.value + "> for an unhandled type <" +
+                         member.type.type + ">" );
+      }
+    }
+  }
+}
+
+std::string VulkanHppGenerator::combineDataTypes( std::map<size_t, VectorParamData> const & vectorParams,
+                                                  std::vector<size_t> const &               returnParams,
+                                                  bool                                      enumerating,
+                                                  std::vector<std::string> const &          dataTypes,
+                                                  CommandFlavourFlags                       flavourFlags,
+                                                  bool                                      raii ) const
+{
+  assert( dataTypes.size() == returnParams.size() );
+
+  std::vector<std::string> modifiedDataTypes( dataTypes.size() );
+  for ( size_t i = 0; i < returnParams.size(); ++i )
+  {
+    auto vectorParamIt   = vectorParams.find( returnParams[i] );
+    modifiedDataTypes[i] = ( vectorParamIt == vectorParams.end() || ( flavourFlags & CommandFlavourFlagBits::singular ) )
+                           ? dataTypes[i]
+                           : ( "std::vector<" + dataTypes[i] +
+                               ( raii || ( flavourFlags & CommandFlavourFlagBits::unique )
+                                   ? ">"
+                                   : ( ", " + startUpperCase( stripPrefix( dataTypes[i], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator>" ) ) );
+  }
+
+  std::string combinedType;
+  switch ( modifiedDataTypes.size() )
+  {
+    case 0: combinedType = "void"; break;
+    case 1: combinedType = modifiedDataTypes[0]; break;
+    case 2:
+      assert( !enumerating || ( ( vectorParams.find( returnParams[1] ) != vectorParams.end() ) &&
+                                ( vectorParams.find( returnParams[1] )->second.lenParam == returnParams[0] ) ) );
+      combinedType = enumerating ? modifiedDataTypes[1] : ( "std::pair<" + modifiedDataTypes[0] + ", " + modifiedDataTypes[1] + ">" );
+      break;
+    case 3:
+      assert( enumerating && ( vectorParams.size() == 2 ) && ( vectorParams.begin()->first == returnParams[1] ) &&
+              ( vectorParams.begin()->second.lenParam == returnParams[0] ) && ( std::next( vectorParams.begin() )->first == returnParams[2] ) &&
+              ( std::next( vectorParams.begin() )->second.lenParam == returnParams[0] ) );
+      combinedType = "std::pair<" + modifiedDataTypes[1] + ", " + modifiedDataTypes[2] + ">";
+      break;
+    default: assert( false ); break;
+  }
+  return combinedType;
+}
+
+bool VulkanHppGenerator::containsArray( std::string const & type ) const
+{
+  // a simple recursive check if a type is or contains an array
+  auto structureIt = m_structures.find( type );
+  bool found       = false;
+  if ( structureIt != m_structures.end() )
+  {
+    for ( auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; ++memberIt )
+    {
+      found = !memberIt->arraySizes.empty() || containsArray( memberIt->type.type );
+    }
+  }
+  return found;
+}
+
+bool VulkanHppGenerator::containsFuncPointer( std::string const & type ) const
+{
+  // a simple recursive check if a type contains a funcpointer
+  auto structureIt = m_structures.find( type );
+  bool found       = false;
+  if ( structureIt != m_structures.end() )
+  {
+    for ( auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; ++memberIt )
+    {
+      found = ( m_funcPointers.find( memberIt->type.type ) != m_funcPointers.end() ) ||
+              ( ( memberIt->type.type != type ) && containsFuncPointer( memberIt->type.type ) );
+    }
+  }
+  return found;
+}
+
+bool VulkanHppGenerator::containsFloatingPoints( std::vector<MemberData> const & members ) const
+{
+  for ( auto const & m : members )
+  {
+    if ( ( ( m.type.type == "float" ) || ( m.type.type == "double" ) ) && m.type.isValue() )
+    {
+      return true;
+    }
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::containsUnion( std::string const & type ) const
+{
+  // a simple recursive check if a type is or contains a union
+  auto structureIt = m_structures.find( type );
+  bool found       = false;
+  if ( structureIt != m_structures.end() )
+  {
+    found = structureIt->second.isUnion;
+    for ( auto memberIt = structureIt->second.members.begin(); memberIt != structureIt->second.members.end() && !found; ++memberIt )
+    {
+      found = memberIt->type.isValue() && containsUnion( memberIt->type.type );
+    }
+  }
+  return found;
+}
+
+std::vector<size_t> VulkanHppGenerator::determineConstPointerParams( std::vector<ParamData> const & params ) const
+{
+  std::vector<size_t> constPointerParams;
+
+  for ( size_t i = 0; i < params.size(); i++ )
+  {
+    // very special handling for some types, which come in as non-const pointers, but are meant as const-pointers
+    if ( params[i].type.isConstPointer() ||
+         ( params[i].type.isNonConstPointer() && ( specialPointerTypes.find( params[i].type.type ) != specialPointerTypes.end() ) ) )
+    {
+      constPointerParams.push_back( i );
+    }
+  }
+  return constPointerParams;
+}
+
+std::vector<std::string> VulkanHppGenerator::determineDataTypes( std::vector<VulkanHppGenerator::ParamData> const & params,
+                                                                 std::map<size_t, VectorParamData> const &          vectorParams,
+                                                                 std::vector<size_t> const &                        returnParams,
+                                                                 std::set<size_t> const &                           templatedParams ) const
+{
+  std::vector<std::string> dataTypes;
+  for ( auto rp : returnParams )
+  {
+    if ( templatedParams.find( rp ) != templatedParams.end() )
+    {
+      auto vectorParamIt = vectorParams.find( rp );
+      if ( ( vectorParamIt != vectorParams.end() ) && ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->first ) != returnParams.end() ) &&
+           ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->second.lenParam ) != returnParams.end() ) )
+      {
+        dataTypes.push_back( "uint8_t" );
+      }
+      else
+      {
+        dataTypes.push_back( ( stripPrefix( params[rp].name, "p" ) + "Type" ) );
+      }
+    }
+    else
+    {
+      dataTypes.push_back( trimEnd( stripPostfix( params[rp].type.compose( "VULKAN_HPP_NAMESPACE" ), "*" ) ) );
+    }
+  }
+  return dataTypes;
+}
+
+size_t VulkanHppGenerator::determineDefaultStartIndex( std::vector<ParamData> const & params, std::set<size_t> const & skippedParams ) const
+{
+  // determine the index where the arguments start to have defaults
+  size_t defaultStartIndex = INVALID_INDEX;
+  for ( int i = static_cast<int>( params.size() ) - 1; ( 0 <= i ) && ( params[i].optional || ( skippedParams.find( i ) != skippedParams.end() ) ); --i )
+  {
+    defaultStartIndex = i;
+  }
+  return defaultStartIndex;
+}
+
+bool VulkanHppGenerator::determineEnumeration( std::map<size_t, VectorParamData> const & vectorParams, std::vector<size_t> const & returnParams ) const
+{
+  // a command is considered to be enumerating some data, if for at least one vectorParam both, the data and the counter, are returnParams
+  return std::find_if( vectorParams.begin(),
+                       vectorParams.end(),
+                       [&returnParams]( auto const & vp )
+                       {
+                         return std::find( returnParams.begin(), returnParams.end(), vp.first ) != returnParams.end() &&
+                                std::find( returnParams.begin(), returnParams.end(), vp.second.lenParam ) != returnParams.end();
+                       } ) != vectorParams.end();
+}
+
+size_t VulkanHppGenerator::determineInitialSkipCount( std::string const & command ) const
+{
+  // determine the number of arguments to skip for a function
+  // -> 0: the command is not bound to an instance or a device (the corresponding handle has no name)
+  // -> 1: the command bound to an instance or a device (the corresponding handle has a name)
+  // -> 2: the command has been moved to a second handle
+  auto commandIt = m_commands.find( command );
+  assert( commandIt != m_commands.end() );
+  auto handleIt = m_handles.find( commandIt->second.handle );
+  assert( handleIt != m_handles.end() );
+  if ( handleIt->second.commands.find( command ) == handleIt->second.commands.end() )
+  {
+    assert( 1 < commandIt->second.params.size() );
+    assert( m_handles.find( commandIt->second.params[1].type.type ) != m_handles.end() );
+    return 2;
+  }
+  else
+  {
+    return handleIt->first.empty() ? 0 : 1;
+  }
+}
+
+std::vector<size_t> VulkanHppGenerator::determineReturnParams( std::vector<ParamData> const & params ) const
+{
+  std::vector<size_t> nonConstPointerParamIndices;
+
+  for ( size_t i = 0; i < params.size(); i++ )
+  {
+    // very special handling of parameters of some types, which always come as a non-const pointer but are not meant
+    // to be a potential return value!
+    if ( params[i].type.isNonConstPointer() && ( specialPointerTypes.find( params[i].type.type ) == specialPointerTypes.end() ) )
+    {
+      nonConstPointerParamIndices.push_back( i );
+    }
+  }
+  return nonConstPointerParamIndices;
+}
+
+std::vector<std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator>
+  VulkanHppGenerator::determineRAIIHandleConstructors( std::string const & handleType, std::map<std::string, CommandData>::const_iterator destructorIt ) const
+{
+  std::vector<std::map<std::string, CommandData>::const_iterator> constructorIts;
+  auto                                                            isConstructorCandidate = [&handleType]( std::pair<std::string, CommandData> const & cd )
+  {
+    return std::find_if( cd.second.params.begin(),
+                         cd.second.params.end(),
+                         [&handleType]( ParamData const & pd )
+                         { return ( pd.type.type == handleType ) && pd.type.isNonConstPointer(); } ) != cd.second.params.end();
+  };
+  for ( auto commandIt = m_commands.begin(); commandIt != m_commands.end(); )
+  {
+    // find the commands that get a non-const pointer to the handleType, that is, return a handle type
+    commandIt = std::find_if( commandIt, m_commands.end(), isConstructorCandidate );
+    if ( commandIt != m_commands.end() )
+    {
+      // only commands that provide all information needed for the destructor can be considered a constructor!
+      bool valid = true;
+      if ( destructorIt != m_commands.end() )
+      {
+        // get the destructors parameter to the handleType
+        auto desctructorHandleParamIt = std::find_if( destructorIt->second.params.begin(),
+                                                      destructorIt->second.params.end(),
+                                                      [&handleType]( ParamData const & pd ) { return pd.type.type == handleType; } );
+        assert( desctructorHandleParamIt != destructorIt->second.params.end() );
+
+        // lambda to check if a destructor parameter is a parameter of the constructor candidate
+        // (or it's just the len parameter, which is not needed for the constructor)
+        auto isConstructorCandidateParam = [&desctructorHandleParamIt, &commandIt, this]( ParamData const & destructorParam )
+        {
+          // check if the destructor param type equals this param type, or, if this param type is a struct, is part of
+          // that struct
+          auto isDestructorParamType = [&destructorParam, this]( ParamData const & pd )
+          {
+            if ( pd.type.type != destructorParam.type.type )
+            {
+              // check if the destructor param type equals a structure member type
+              auto structureIt = m_structures.find( pd.type.type );
+              return ( structureIt != m_structures.end() ) &&
+                     ( findStructMemberItByType( destructorParam.type.type, structureIt->second.members ) != structureIt->second.members.end() );
+            }
+            return true;
+          };
+
+          return ( destructorParam.name == desctructorHandleParamIt->len ) ||
+                 ( std::find_if( commandIt->second.params.begin(), commandIt->second.params.end(), isDestructorParamType ) != commandIt->second.params.end() );
+        };
+
+        // the constructor candidate is valid, if none of the (relevant) destructor parameters is missing in the
+        // constructor candidate params
+        valid = ( std::find_if_not( destructorIt->second.params.begin(), destructorIt->second.params.end(), isConstructorCandidateParam ) ==
+                  destructorIt->second.params.end() );
+      }
+      if ( valid )
+      {
+        // filter out alias functions
+        if ( commandIt->second.alias.empty() )
+        {
+          constructorIts.push_back( commandIt );
+        }
+      }
+      ++commandIt;
+    }
+  }
+  assert( !constructorIts.empty() );
+  return constructorIts;
+}
+
+std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator VulkanHppGenerator::determineRAIIHandleDestructor( std::string const & handleType ) const
+{
+  std::string type         = stripPrefix( handleType, "Vk" );
+  auto        destructorIt = m_commands.find( "vkDestroy" + type );
+  if ( destructorIt == m_commands.end() )
+  {
+    destructorIt = m_commands.find( "vkFree" + type + "s" );
+    if ( destructorIt == m_commands.end() )
+    {
+      destructorIt = m_commands.find( "vkRelease" + type );
+      if ( destructorIt == m_commands.end() )
+      {
+        if ( handleType == "VkDeviceMemory" )
+        {
+          // special handling for vkDeviceMemory
+          destructorIt = m_commands.find( "vkFreeMemory" );
+          assert( destructorIt != m_commands.end() );
+        }
+        else if ( handleType == "VkDisplayKHR" )
+        {
+          // special handling for VkDisplayKHR
+          destructorIt = m_commands.find( "vkReleaseDisplayEXT" );
+          assert( destructorIt != m_commands.end() );
+        }
+        else
+        {
+          assert( ( handleType == "VkDisplayModeKHR" ) || ( handleType == "VkPhysicalDevice" ) || ( handleType == "VkQueue" ) );
+        }
+      }
+    }
+  }
+  return destructorIt;
+}
+
+std::set<size_t> VulkanHppGenerator::determineSingularParams( size_t returnParam, std::map<size_t, VectorParamData> const & vectorParams ) const
+{
+  auto returnVectorIt = vectorParams.find( returnParam );
+  assert( returnVectorIt != vectorParams.end() );
+  std::set<size_t> singularParams;
+  singularParams.insert( returnVectorIt->second.lenParam );
+  for ( auto const & vpi : vectorParams )
+  {
+    if ( vpi.second.lenParam == returnVectorIt->second.lenParam )
+    {
+      singularParams.insert( vpi.first );
+    }
+  }
+  return singularParams;
+}
+
+std::set<size_t> VulkanHppGenerator::determineSkippedParams( std::vector<ParamData> const &            params,
+                                                             size_t                                    initialSkipCount,
+                                                             std::map<size_t, VectorParamData> const & vectorParams,
+                                                             std::vector<size_t> const &               returnParams,
+                                                             bool                                      singular ) const
+{
+  // skip the initial skips (get fed by the object)
+  assert( initialSkipCount <= params.size() );
+  std::set<size_t> skippedParams;
+  for ( size_t i = 0; i < initialSkipCount; ++i )
+  {
+    skippedParams.insert( i );
+  }
+
+  // skip the size parameters (get derived from an array), and a stride parameter
+  for ( auto const & vpi : vectorParams )
+  {
+    assert( !params[vpi.first].len.empty() );
+    if ( ( ( std::find_if( returnParams.begin(), returnParams.end(), [&vpi]( size_t rpi ) { return vpi.first == rpi; } ) == returnParams.end() ) &&
+           isParam( params[vpi.first].len, params ) ) ||
+         ( singular && params[vpi.second.lenParam].type.isValue() ) )
+    {
+      skippedParams.insert( vpi.second.lenParam );
+    }
+    if ( vpi.second.strideParam != INVALID_INDEX )
+    {
+      skippedParams.insert( vpi.second.strideParam );
+    }
+  }
+
+  // skip the return parameters (get resolved by local variables to be returned)
+  skippedParams.insert( returnParams.begin(), returnParams.end() );
+
+  return skippedParams;
+}
+
+std::string VulkanHppGenerator::determineSubStruct( std::pair<std::string, StructureData> const & structure ) const
+{
+  if ( structure.second.members.front().name != "sType" )
+  {
+    // check if sd is a substruct of structure
+    auto isSubStruct = [&structure]( std::pair<std::string, StructureData> const & sd )
+    {
+      // member-by-member comparison of type and name
+      auto memberIt = structure.second.members.begin();
+      auto isMember = [&memberIt]( MemberData const & md )
+      {
+        if ( ( md.type == memberIt->type ) && ( md.name == memberIt->name ) )
+        {
+          ++memberIt;
+          return true;
+        }
+        return false;
+      };
+
+      return ( sd.second.members.size() < structure.second.members.size() ) &&
+             ( std::find_if_not( sd.second.members.begin(), sd.second.members.end(), isMember ) == sd.second.members.end() );
+    };
+
+    // look for a struct in m_structures that starts identically to structure
+    auto structIt = std::find_if( m_structures.begin(), m_structures.end(), isSubStruct );
+    return ( structIt == m_structures.end() ) ? "" : structIt->first;
+  }
+  return "";
+}
+
+std::map<size_t, VulkanHppGenerator::VectorParamData> VulkanHppGenerator::determineVectorParams( std::vector<ParamData> const & params ) const
+{
+  std::map<size_t, VectorParamData> vectorParams;
+
+  // look for the parameters whose len equals the name of an other parameter
+  for ( size_t i = 0; i < params.size(); i++ )
+  {
+    if ( !params[i].len.empty() && ( params[i].len != "null-terminated" ) )
+    {
+      VectorParamData & vpd = vectorParams[i];
+
+      std::string len;
+      if ( altLens.find( params[i].len ) != altLens.end() )
+      {
+        checkForError( params[i].len == "(samples + 31) / 32", params[i].xmlLine, "unknown command parameter len <" + params[i].len + ">" );
+        len = "samples";
+      }
+      else
+      {
+        len = params[i].len;
+      }
+      auto lenIt =
+        std::find_if( params.begin(), params.end(), [&len, this]( auto const & pd ) { return ( len == pd.name ) || isLenByStructMember( len, pd ); } );
+      assert( lenIt != params.end() );
+      vpd.lenParam = std::distance( params.begin(), lenIt );
+      if ( !params[i].stride.empty() )
+      {
+        std::string const & stride   = params[i].stride;
+        auto                strideIt = std::find_if( params.begin(), params.end(), [&stride]( auto const & pd ) { return stride == pd.name; } );
+        assert( strideIt != params.end() );
+        vpd.strideParam = std::distance( params.begin(), strideIt );
+      }
+    }
+  }
+  return vectorParams;
+}
+
+std::set<size_t> VulkanHppGenerator::determineVoidPointerParams( std::vector<ParamData> const & params ) const
+{
+  std::set<size_t> voidPointerParams;
+
+  for ( size_t i = 0; i < params.size(); i++ )
+  {
+    if ( !params[i].type.isValue() && ( params[i].type.type == "void" ) && ( params[i].type.postfix != "**" ) )
+    {
+      voidPointerParams.insert( i );
+    }
+  }
+  return voidPointerParams;
+}
+
+void VulkanHppGenerator::distributeSecondLevelCommands( std::set<std::string> const & specialFunctions )
+{
+  // distribute commands from instance/device to second-level handles, like Queue, Event,... for RAII handles
+  for ( auto & handle : m_handles )
+  {
+    if ( !handle.first.empty() )
+    {
+      for ( auto command = handle.second.commands.begin(); command != handle.second.commands.end(); )
+      {
+        bool foundCommand = false;
+        if ( specialFunctions.find( *command ) == specialFunctions.end() )
+        {
+          auto commandIt = m_commands.find( *command );
+          assert( commandIt != m_commands.end() );
+          assert( commandIt->second.params.front().type.type == handle.first );
+          if ( ( 1 < commandIt->second.params.size() ) && ( isHandleType( commandIt->second.params[1].type.type ) ) && !commandIt->second.params[1].optional )
+          {
+            auto handleIt = m_handles.find( commandIt->second.params[1].type.type );
+            assert( handleIt != m_handles.end() );
+            // filter out functions seem to fit due to taking handles as first and second argument, but the first argument is not the
+            // type to create the second one, and so it's unknown to the raii handle!
+            assert( !handleIt->second.constructorIts.empty() );
+            if ( ( *handleIt->second.constructorIts.begin() )->second.handle == handle.first )
+            {
+              assert( std::find_if( handleIt->second.constructorIts.begin(),
+                                    handleIt->second.constructorIts.end(),
+                                    [&handle]( auto const & constructorIt )
+                                    { return constructorIt->second.handle != handle.first; } ) == handleIt->second.constructorIts.end() );
+              handleIt->second.secondLevelCommands.insert( *command );
+              command      = handle.second.commands.erase( command );
+              foundCommand = true;
+            }
+          }
+        }
+        if ( !foundCommand )
+        {
+          ++command;
+        }
+      }
+    }
+  }
+}
+
+std::string VulkanHppGenerator::findBaseName( std::string aliasName, std::map<std::string, EnumAliasData> const & aliases ) const
+{
+  std::string baseName = aliasName;
+  auto        aliasIt  = aliases.find( baseName );
+  while ( aliasIt != aliases.end() )
+  {
+    baseName = aliasIt->second.name;
+    aliasIt  = aliases.find( baseName );
+  }
+  return baseName;
+}
+
+std::vector<VulkanHppGenerator::MemberData>::const_iterator VulkanHppGenerator::findStructMemberIt( std::string const &             name,
+                                                                                                    std::vector<MemberData> const & memberData ) const
+{
+  return std::find_if( memberData.begin(), memberData.end(), [&name]( MemberData const & md ) { return md.name == name; } );
+}
+
+std::vector<VulkanHppGenerator::MemberData>::const_iterator VulkanHppGenerator::findStructMemberItByType( std::string const &             type,
+                                                                                                          std::vector<MemberData> const & memberData ) const
+{
+  return std::find_if( memberData.begin(), memberData.end(), [&type]( MemberData const & md ) { return md.type.type == type; } );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateAllocatorTemplates( std::vector<size_t> const &               returnParams,
+                                                                                    std::vector<std::string> const &          returnDataTypes,
+                                                                                    std::map<size_t, VectorParamData> const & vectorParams,
+                                                                                    CommandFlavourFlags                       flavourFlags,
+                                                                                    bool                                      definition ) const
+{
+  bool chained  = flavourFlags & CommandFlavourFlagBits::chained;
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+  bool unique   = flavourFlags & CommandFlavourFlagBits::unique;
+
+  assert( returnParams.size() == returnDataTypes.size() );
+  std::string allocatorTemplates;
+  if ( !singular )
+  {
+    for ( size_t i = 0; i < returnParams.size(); i++ )
+    {
+      if ( vectorParams.find( returnParams[i] ) != vectorParams.end() )
+      {
+        if ( chained )
+        {
+          allocatorTemplates += "typename StructureChainAllocator";
+          if ( !definition )
+          {
+            allocatorTemplates += " = std::allocator<StructureChain>";
+          }
+        }
+        else
+        {
+          allocatorTemplates += "typename " + startUpperCase( stripPrefix( returnDataTypes[i], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator";
+          if ( !definition )
+          {
+            allocatorTemplates += " = std::allocator<" + ( unique ? ( "UniqueHandle<" + returnDataTypes[i] + ", Dispatch>" ) : returnDataTypes[i] ) + ">";
+          }
+        }
+        allocatorTemplates += ", ";
+      }
+    }
+  }
+  std::string uniqueHandleAllocatorTemplates;
+  if ( unique && !allocatorTemplates.empty() )
+  {
+    uniqueHandleAllocatorTemplates = ", " + stripPostfix( allocatorTemplates, ", " );
+    allocatorTemplates.clear();
+  }
+  return std::make_pair( allocatorTemplates, uniqueHandleAllocatorTemplates );
+}
+
+std::string VulkanHppGenerator::generateArgumentListEnhanced( std::vector<ParamData> const &            params,
+                                                              std::vector<size_t> const &               returnParams,
+                                                              std::map<size_t, VectorParamData> const & vectorParams,
+                                                              std::set<size_t> const &                  skippedParams,
+                                                              std::set<size_t> const &                  singularParams,
+                                                              std::set<size_t> const &                  templatedParams,
+                                                              bool                                      definition,
+                                                              CommandFlavourFlags                       flavourFlags,
+                                                              bool                                      withDispatcher ) const
+{
+  bool withAllocators = flavourFlags & CommandFlavourFlagBits::withAllocator;
+
+  size_t defaultStartIndex = withAllocators ? ~0 : determineDefaultStartIndex( params, skippedParams );
+
+  std::string argumentList;
+  bool        encounteredArgument = false;
+  for ( size_t i = 0; i < params.size(); ++i )
+  {
+    if ( skippedParams.find( i ) == skippedParams.end() )
+    {
+      if ( encounteredArgument )
+      {
+        argumentList += ", ";
+      }
+      bool hasDefaultAssignment = false;
+
+      std::string composedType = params[i].type.compose( "VULKAN_HPP_NAMESPACE" );
+
+      if ( singularParams.find( i ) != singularParams.end() )
+      {
+        assert( !params[i].optional );
+        assert( params[i].type.isConstPointer() && !params[i].len.empty() && !isLenByStructMember( params[i].len, params ) &&
+                params[i].type.type.starts_with( "Vk" ) );
+        assert( !isHandleType( params[i].type.type ) );
+        assert( composedType.ends_with( " *" ) );
+        argumentList += stripPostfix( composedType, " *" ) + " & " + stripPluralS( startLowerCase( stripPrefix( params[i].name, "p" ) ), m_tags );
+      }
+      else if ( params[i].type.isConstPointer() )
+      {
+        assert( composedType.ends_with( " *" ) );
+        std::string name = startLowerCase( stripPrefix( params[i].name, "p" ) );
+        if ( params[i].len.empty() )
+        {
+          assert( withDispatcher || !isHandleType( params[i].type.type ) );
+          assert( !params[i].type.prefix.empty() && ( params[i].type.postfix == "*" ) );
+          assert( params[i].arraySizes.empty() );
+          if ( params[i].type.type == "void" )
+          {
+            argumentList += ( templatedParams.find( i ) == templatedParams.end() ) ? ( composedType + " " + params[i].name )
+                                                                                   : ( stripPrefix( params[i].name, "p" ) + "Type const & " + name );
+          }
+          else if ( params[i].optional )
+          {
+            argumentList += "Optional<" + stripPostfix( composedType, " *" ) + "> " + name +
+                            ( ( definition || withAllocators ) ? "" : " VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT" );
+            hasDefaultAssignment = true;
+          }
+          else
+          {
+            argumentList += stripPostfix( composedType, " *" ) + " & " + name;
+          }
+        }
+        else
+        {
+          // a const-pointer with a non-empty len is either null-terminated (aka a string) or represented by an
+          // ArrayProxy
+          assert( params[i].arraySizes.empty() );
+          if ( params[i].len == "null-terminated" )
+          {
+            assert( params[i].type.type == "char" );
+            if ( params[i].optional )
+            {
+              argumentList +=
+                "Optional<const std::string> " + name + ( ( definition || withAllocators ) ? "" : " VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT" );
+              hasDefaultAssignment = true;
+            }
+            else
+            {
+              argumentList += "const std::string & " + name;
+            }
+          }
+          else
+          {
+            // an ArrayProxy also covers no data, so any optional flag can be ignored here
+            std::string type = stripPostfix( composedType, " *" );
+            size_t      pos  = type.find( "void" );
+            if ( pos != std::string::npos )
+            {
+              type.replace( pos, 4, stripPrefix( params[i].name, "p" ) + "Type" );
+            }
+            argumentList +=
+              std::string( "VULKAN_HPP_NAMESPACE::" ) + ( params[i].stride.empty() ? "" : "Strided" ) + "ArrayProxy<" + type + "> const & " + name;
+            if ( params[i].optional && !definition )
+            {
+              assert( params[i].stride.empty() );
+              argumentList += " VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT";
+              hasDefaultAssignment = true;
+            }
+          }
+        }
+      }
+      else if ( params[i].type.isNonConstPointer() )
+      {
+        assert( withDispatcher || !isHandleType( params[i].type.type ) );
+        assert( params[i].len.empty() && !params[i].optional );
+        assert( composedType.ends_with( " *" ) );
+        argumentList += stripPostfix( composedType, " *" ) + " & " + params[i].name;
+      }
+      else
+      {
+        assert( params[i].type.isValue() );
+        argumentList += composedType + " " + params[i].name + generateCArraySizes( params[i].arraySizes );
+      }
+      argumentList += std::string( !definition && ( defaultStartIndex <= i ) && !hasDefaultAssignment ? " VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT" : "" );
+      encounteredArgument = true;
+    }
+  }
+  if ( withAllocators )
+  {
+    if ( flavourFlags & CommandFlavourFlagBits::chained )
+    {
+      if ( encounteredArgument )
+      {
+        argumentList += ", ";
+      }
+      argumentList += "StructureChainAllocator & structureChainAllocator";
+      encounteredArgument = true;
+    }
+    else
+    {
+      for ( auto sp : skippedParams )
+      {
+        if ( !params[sp].len.empty() )
+        {
+          if ( encounteredArgument )
+          {
+            argumentList += ", ";
+          }
+          std::string type;
+          if ( templatedParams.find( sp ) != templatedParams.end() )
+          {
+            auto vectorParamIt = vectorParams.find( sp );
+            if ( ( vectorParamIt != vectorParams.end() ) &&
+                 ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->first ) != returnParams.end() ) &&
+                 ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->second.lenParam ) != returnParams.end() ) )
+            {
+              type = "Uint8_t";
+            }
+            else
+            {
+              type = stripPrefix( params[sp].name, "p" ) + "Type";
+            }
+          }
+          else
+          {
+            type = ( params[sp].type.type == "void" ) ? "Uint8_t" : startUpperCase( stripPrefix( params[sp].type.type, "Vk" ) );
+          }
+          argumentList += type + "Allocator & " + startLowerCase( type ) + "Allocator";
+          encounteredArgument = true;
+        }
+      }
+    }
+  }
+  if ( withDispatcher )
+  {
+    if ( encounteredArgument )
+    {
+      argumentList += ", ";
+    }
+    argumentList += std::string( "Dispatch const & d" ) + ( definition ? "" : " VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT" );
+  }
+  return argumentList;
+}
+
+std::string VulkanHppGenerator::generateArgumentListStandard( std::vector<ParamData> const & params, std::set<size_t> const & skippedParams ) const
+{
+  std::string argumentList;
+  for ( size_t i = 0; i < params.size(); ++i )
+  {
+    if ( skippedParams.find( i ) == skippedParams.end() )
+    {
+      argumentList += params[i].type.compose( "VULKAN_HPP_NAMESPACE" ) + " " + params[i].name + generateCArraySizes( params[i].arraySizes ) + ", ";
+    }
+  }
+  argumentList += "Dispatch const & d ";
+  return argumentList;
+}
+
+std::string VulkanHppGenerator::generateArgumentTemplates( std::vector<ParamData> const &            params,
+                                                           std::vector<size_t> const &               returnParams,
+                                                           std::map<size_t, VectorParamData> const & vectorParams,
+                                                           std::set<size_t> const &                  templatedParams,
+                                                           CommandFlavourFlags                       flavourFlags,
+                                                           bool                                      raii ) const
+{
+  std::string argumentTemplates;
+  if ( !templatedParams.empty() )
+  {
+    assert( !( flavourFlags & CommandFlavourFlagBits::chained ) );
+    for ( auto t : templatedParams )
+    {
+      assert( params[t].name.starts_with( "p" ) );
+      auto vectorParamIt = vectorParams.find( t );
+      if ( ( vectorParamIt == vectorParams.end() ) || ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->first ) == returnParams.end() ) ||
+           ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->second.lenParam ) == returnParams.end() ) )
+      {
+        // only templated parameters that are not part of an enumeration are really templated
+        argumentTemplates += "typename " + stripPrefix( params[t].name, "p" ) + "Type, ";
+      }
+    }
+  }
+  else if ( flavourFlags & CommandFlavourFlagBits::chained )
+  {
+    argumentTemplates = ( returnParams.size() == 1 ) ? "typename X, typename Y, typename... Z, " : "typename StructureChain, ";
+  }
+  if ( !argumentTemplates.empty() && raii )
+  {
+    argumentTemplates = "template <" + stripPostfix( argumentTemplates, ", " ) + ">";
+  }
+  return argumentTemplates;
+}
+
+std::string VulkanHppGenerator::generateBaseTypes() const
+{
+  assert( !m_baseTypes.empty() );
+  const std::string basetypesTemplate = R"(
+  //==================
+  //=== BASE TYPEs ===
+  //==================
+
+${basetypes}
+)";
+
+  std::string basetypes;
+  for ( auto const & baseType : m_baseTypes )
+  {
+    // filter out VkFlags and VkFlags64, as they are mapped to our own Flags class
+    if ( ( baseType.first != "VkFlags" ) && ( baseType.first != "VkFlags64" ) )
+    {
+      basetypes += "  using " + stripPrefix( baseType.first, "Vk" ) + " = " + baseType.second.typeInfo.compose( "VULKAN_HPP_NAMESPACE" ) + ";\n";
+    }
+  }
+
+  return replaceWithMap( basetypesTemplate, { { "basetypes", basetypes } } );
+}
+
+std::string VulkanHppGenerator::generateBitmask( std::map<std::string, BitmaskData>::const_iterator bitmaskIt, std::string const & surroundingProtect ) const
+{
+  auto bitmaskBitsIt = m_enums.find( bitmaskIt->second.requirements );
+  assert( bitmaskBitsIt != m_enums.end() );
+
+  std::string bitmaskName = stripPrefix( bitmaskIt->first, "Vk" );
+  std::string enumName    = stripPrefix( bitmaskBitsIt->first, "Vk" );
+  std::string alias       = bitmaskIt->second.alias.empty() ? "" : ( "  using " + stripPrefix( bitmaskIt->second.alias, "Vk" ) + " = " + bitmaskName + ";\n" );
+
+  std::string allFlags;
+  if ( bitmaskBitsIt->second.values.empty() )
+  {
+    allFlags = " {};";
+  }
+  else
+  {
+    bool        encounteredFlag = false;
+    std::string previousEnter, previousLeave;
+    for ( auto const & value : bitmaskBitsIt->second.values )
+    {
+      // determine the values protect, if any
+      std::string valueProtect = getProtect( value );
+
+      // if the value's protect differs from the surrounding protect, generate protection code
+      std::string enter, leave;
+      if ( !valueProtect.empty() && ( valueProtect != surroundingProtect ) )
+      {
+        tie( enter, leave ) = generateProtection( valueProtect );
+      }
+      std::string valueName = generateEnumValueName( bitmaskBitsIt->first, value.name, true, m_tags );
+      allFlags +=
+        ( ( previousEnter != enter ) ? ( "\n" + previousLeave + enter ) : "\n" ) + "        " + ( encounteredFlag ? "| " : "  " ) + enumName + "::" + valueName;
+      encounteredFlag = true;
+      previousEnter   = enter;
+      previousLeave   = leave;
+    }
+    if ( !previousLeave.empty() )
+    {
+      allFlags += "\n" + previousLeave;
+    }
+    allFlags += ";";
+  }
+
+  static const std::string bitmaskTemplate = R"(
+  using ${bitmaskName} = Flags<${enumName}>;
+${alias}
+
+  template <> struct FlagTraits<${enumName}>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ${bitmaskName} allFlags = ${allFlags}
+  };
+)";
+
+  return replaceWithMap( bitmaskTemplate, { { "alias", alias }, { "allFlags", allFlags }, { "bitmaskName", bitmaskName }, { "enumName", enumName } } );
+}
+
+std::string VulkanHppGenerator::generateBitmasksToString() const
+{
+  const std::string bitmasksToStringTemplate = R"(
+  //==========================
+  //=== BITMASKs to_string ===
+  //==========================
+
+${bitmasksToString}
+)";
+
+  std::string           bitmasksToString;
+  std::set<std::string> listedBitmasks;
+  for ( auto const & feature : m_features )
+  {
+    bitmasksToString += generateBitmasksToString( feature.second.requireData, listedBitmasks, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    bitmasksToString += generateBitmasksToString( extIt.second->second.requireData, listedBitmasks, extIt.second->first );
+  }
+
+  return replaceWithMap( bitmasksToStringTemplate, { { "bitmasksToString", bitmasksToString } } );
+}
+
+std::string VulkanHppGenerator::generateBitmasksToString( std::vector<RequireData> const & requireData,
+                                                          std::set<std::string> &          listedBitmasks,
+                                                          std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto bitmaskIt = m_bitmasks.find( type );
+      if ( ( bitmaskIt != m_bitmasks.end() ) && ( listedBitmasks.find( type ) == listedBitmasks.end() ) )
+      {
+        listedBitmasks.insert( type );
+        str += generateBitmaskToString( bitmaskIt );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateBitmaskToString( std::map<std::string, BitmaskData>::const_iterator bitmaskIt ) const
+{
+  auto bitmaskBitsIt = m_enums.find( bitmaskIt->second.requirements );
+  assert( bitmaskBitsIt != m_enums.end() );
+
+  std::string bitmaskName = stripPrefix( bitmaskIt->first, "Vk" );
+  std::string enumName    = stripPrefix( bitmaskBitsIt->first, "Vk" );
+
+  std::string str;
+  if ( bitmaskBitsIt->second.values.empty() )
+  {
+    static std::string bitmaskToStringTemplate = R"(
+  VULKAN_HPP_INLINE std::string to_string( ${bitmaskName} )
+  {
+    return "{}";
+  }
+)";
+    str += replaceWithMap( bitmaskToStringTemplate, { { "bitmaskName", bitmaskName } } );
+  }
+  else
+  {
+    static const std::string bitmaskToStringTemplate = R"(
+  VULKAN_HPP_INLINE std::string to_string( ${bitmaskName} value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+${toStringChecks}
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+)";
+
+    std::string toStringChecks;
+    std::string previousEnter, previousLeave;
+    for ( auto const & value : bitmaskBitsIt->second.values )
+    {
+      auto [enter, leave]   = generateProtection( getProtect( value ) );
+      std::string valueName = generateEnumValueName( bitmaskBitsIt->first, value.name, true, m_tags );
+      if ( value.singleBit )
+      {
+        toStringChecks += ( ( previousEnter != enter ) ? ( previousLeave + enter ) : "" ) + "    if ( value & " + enumName + "::" + valueName +
+                          " ) result += \"" + valueName.substr( 1 ) + " | \";\n";
+      }
+      previousEnter = enter;
+      previousLeave = leave;
+    }
+    if ( !previousLeave.empty() )
+    {
+      assert( previousLeave.ends_with( "\n" ) );
+      toStringChecks += previousLeave;
+      previousLeave.resize( previousLeave.size() - strlen( "\n" ) );
+    }
+
+    str += replaceWithMap( bitmaskToStringTemplate, { { "bitmaskName", bitmaskName }, { "toStringChecks", toStringChecks } } );
+  }
+
+  return str;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentsEnhanced( CommandData const &      commandData,
+                                                               size_t                   initialSkipCount,
+                                                               bool                     nonConstPointerAsNullptr,
+                                                               std::set<size_t> const & singularParams,
+                                                               std::set<size_t> const & templatedParams,
+                                                               bool                     raiiHandleMemberFunction ) const
+{
+  assert( initialSkipCount <= commandData.params.size() );
+  std::string arguments;
+  bool        encounteredArgument = false;
+  if ( raiiHandleMemberFunction )
+  {
+    switch ( initialSkipCount )
+    {
+      case 1:
+        assert( isHandleType( commandData.params[0].type.type ) && commandData.params[0].type.isValue() );
+        assert( commandData.params[0].arraySizes.empty() && commandData.params[0].len.empty() );
+        assert( commandData.params[0].type.type == commandData.handle );
+        arguments           = "static_cast<" + commandData.handle + ">( m_" + startLowerCase( stripPrefix( commandData.handle, "Vk" ) ) + " )";
+        encounteredArgument = true;
+        break;
+      case 2:
+        {
+          assert( isHandleType( commandData.params[0].type.type ) && commandData.params[0].type.isValue() );
+          assert( commandData.params[0].arraySizes.empty() && commandData.params[0].len.empty() );
+          assert( commandData.params[0].type.type == commandData.handle );
+          auto handleIt = m_handles.find( commandData.params[1].type.type );
+          assert( handleIt != m_handles.end() );
+          arguments = "static_cast<" + commandData.handle + ">( m_" + startLowerCase( stripPrefix( commandData.handle, "Vk" ) ) + " )";
+
+          assert( commandData.params[1].type.isValue() && commandData.params[1].arraySizes.empty() && commandData.params[1].len.empty() );
+          arguments += ", static_cast<" + commandData.params[1].type.type + ">( m_" +
+                       generateRAIIHandleConstructorParamName( handleIt->first, handleIt->second.destructorIt ) + " )";
+          encounteredArgument = true;
+        }
+        break;
+    }
+  }
+  else
+  {
+    for ( size_t i = 0; i < initialSkipCount; ++i )
+    {
+      if ( encounteredArgument )
+      {
+        arguments += ", ";
+      }
+      assert( isHandleType( commandData.params[i].type.type ) && commandData.params[i].type.isValue() );
+      assert( commandData.params[i].arraySizes.empty() && commandData.params[i].len.empty() );
+      arguments += "m_" + startLowerCase( stripPrefix( commandData.params[i].type.type, "Vk" ) );
+      encounteredArgument = true;
+    }
+  }
+  for ( size_t i = initialSkipCount; i < commandData.params.size(); ++i )
+  {
+    if ( encounteredArgument )
+    {
+      arguments += ", ";
+    }
+    arguments += generateCallArgumentEnhanced( commandData.params, i, nonConstPointerAsNullptr, singularParams, templatedParams );
+    encounteredArgument = true;
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentsRAIIFactory( std::vector<ParamData> const & params,
+                                                                  size_t                         initialSkipCount,
+                                                                  std::set<size_t> const &       skippedParams,
+                                                                  std::set<size_t> const &       singularParams ) const
+{
+  assert( initialSkipCount <= params.size() );
+  std::string arguments = "*this";
+  // skip the last parameter!
+  for ( size_t i = initialSkipCount; i < params.size() - 1; ++i )
+  {
+    if ( skippedParams.find( i ) == skippedParams.end() )
+    {
+      std::string argument = params[i].name;
+      if ( !params[i].type.isValue() )
+      {
+        argument = startLowerCase( stripPrefix( argument, "p" ) );
+        if ( singularParams.find( i ) != singularParams.end() )
+        {
+          argument = stripPluralS( argument, m_tags );
+        }
+      }
+      else
+      {
+        assert( singularParams.find( i ) == singularParams.end() );
+      }
+      arguments += ", " + argument;
+    }
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentsStandard( std::string const & handle, std::vector<ParamData> const & params ) const
+{
+  std::string arguments;
+  bool        encounteredArgument = false;
+  for ( auto const & param : params )
+  {
+    if ( encounteredArgument )
+    {
+      arguments += ", ";
+    }
+    if ( ( param.type.type == handle ) && param.type.isValue() )
+    {
+      assert( param.arraySizes.empty() && param.len.empty() );
+      arguments += "m_" + startLowerCase( stripPrefix( param.type.type, "Vk" ) );
+    }
+    else
+    {
+      std::string argument = param.name;
+      if ( param.type.type.starts_with( "Vk" ) )
+      {
+        if ( !param.arraySizes.empty() )
+        {
+          assert( param.arraySizes.size() == 1 );
+          assert( param.type.isValue() );
+          assert( param.type.postfix.empty() );
+          argument = "reinterpret_cast<" + param.type.compose( "" ) + " *>( " + argument + " )";
+        }
+        else if ( param.type.isValue() )
+        {
+          argument = "static_cast<" + param.type.type + ">( " + argument + " )";
+        }
+        else
+        {
+          assert( !param.type.postfix.empty() );
+          argument = "reinterpret_cast<" + param.type.compose( "" ) + ">( " + argument + " )";
+        }
+      }
+      arguments += argument;
+    }
+    encounteredArgument = true;
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentEnhanced( std::vector<ParamData> const & params,
+                                                              size_t                         paramIndex,
+                                                              bool                           nonConstPointerAsNullptr,
+                                                              std::set<size_t> const &       singularParams,
+                                                              std::set<size_t> const &       templatedParams ) const
+{
+  std::string       argument;
+  ParamData const & param = params[paramIndex];
+  if ( param.type.isConstPointer() || ( specialPointerTypes.find( param.type.type ) != specialPointerTypes.end() ) )
+  {
+    // parameter is a const-pointer or one of the special pointer types that are considered to be const-pointers
+    argument = generateCallArgumentEnhancedConstPointer( param, paramIndex, singularParams, templatedParams );
+  }
+  else if ( param.type.isNonConstPointer() && ( specialPointerTypes.find( param.type.type ) == specialPointerTypes.end() ) )
+  {
+    // parameter is a non-const pointer and none of the special pointer types, that are considered const-pointers
+    argument = generateCallArgumentEnhancedNonConstPointer( param, paramIndex, nonConstPointerAsNullptr, singularParams );
+  }
+  else
+  {
+    argument = generateCallArgumentEnhancedValue( params, paramIndex, singularParams );
+  }
+  assert( !argument.empty() );
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentEnhancedConstPointer( ParamData const &        param,
+                                                                          size_t                   paramIndex,
+                                                                          std::set<size_t> const & singularParams,
+                                                                          std::set<size_t> const & templatedParams ) const
+{
+  std::string argument;
+  std::string name = startLowerCase( stripPrefix( param.name, "p" ) );
+  if ( isHandleType( param.type.type ) && param.type.isValue() )
+  {
+    assert( !param.optional );
+    // if at all, this is the first argument, and it's the implicitly provided member handle
+    assert( paramIndex == 0 );
+    assert( param.arraySizes.empty() && param.len.empty() );
+    argument = "m_" + startLowerCase( stripPrefix( param.type.type, "Vk" ) );
+  }
+  else if ( param.len.empty() )
+  {
+    // this const-pointer parameter has no length, that is it's a const-pointer to a single value
+    if ( param.type.type == "void" )
+    {
+      argument = ( templatedParams.find( paramIndex ) == templatedParams.end() )
+                 ? param.name
+                 : "reinterpret_cast<" + param.type.compose( "VULKAN_HPP_NAMESPACE" ) + ">( &" + name + " )";
+    }
+    else if ( param.optional )
+    {
+      argument = "static_cast<" + param.type.compose( "VULKAN_HPP_NAMESPACE" ) + ">( " + name + " )";
+    }
+    else
+    {
+      argument = "&" + name;
+    }
+    if ( param.type.type.starts_with( "Vk" ) )
+    {
+      argument = "reinterpret_cast<" + param.type.compose( "" ) + ">( " + argument + " )";
+    }
+  }
+  else if ( param.len == "null-terminated" )
+  {
+    // this const-pointer parameter is "null-terminated", that is it's a string
+    assert( ( param.type.type == "char" ) && param.arraySizes.empty() );
+    if ( param.optional )
+    {
+      argument = name + " ? " + name + "->c_str() : nullptr";
+    }
+    else
+    {
+      argument = name + ".c_str()";
+    }
+  }
+  else
+  {
+    // this const-pointer parameter has some explicit length
+    if ( singularParams.find( paramIndex ) != singularParams.end() )
+    {
+      assert( !param.optional );
+      argument = "&" + stripPluralS( name, m_tags );
+    }
+    else
+    {
+      // this const-parameter is represented by some array, where data() also works with no data (optional)
+      argument = name + ".data()";
+    }
+    if ( param.type.type.starts_with( "Vk" ) || ( param.type.type == "void" ) )
+    {
+      argument = "reinterpret_cast<" + param.type.compose( "" ) + ">( " + argument + " )";
+    }
+  }
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentEnhancedNonConstPointer( ParamData const &        param,
+                                                                             size_t                   paramIndex,
+                                                                             bool                     nonConstPointerAsNullptr,
+                                                                             std::set<size_t> const & singularParams ) const
+{
+  std::string argument;
+  std::string name = startLowerCase( stripPrefix( param.name, "p" ) );
+  if ( param.len.empty() )
+  {
+    assert( param.arraySizes.empty() );
+    if ( param.type.type.starts_with( "Vk" ) )
+    {
+      argument = "reinterpret_cast<" + param.type.compose( "" ) + ">( &" + name + " )";
+    }
+    else
+    {
+      assert( !param.optional );
+      argument = "&" + name;
+    }
+  }
+  else
+  {
+    // the non-const pointer has a len -> it will be represented by some array
+    assert( param.arraySizes.empty() );
+    if ( nonConstPointerAsNullptr )
+    {
+      argument = "nullptr";
+    }
+    else
+    {
+      if ( singularParams.find( paramIndex ) != singularParams.end() )
+      {
+        argument = "&" + stripPluralS( name, m_tags );
+      }
+      else
+      {
+        // get the data of the array, which also covers no data -> no need to look at param.optional
+        argument = name + ".data()";
+      }
+      if ( param.type.type.starts_with( "Vk" ) || ( param.type.type == "void" ) )
+      {
+        argument = "reinterpret_cast<" + param.type.compose( "" ) + ">( " + argument + " )";
+      }
+    }
+  }
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateCallArgumentEnhancedValue( std::vector<ParamData> const & params,
+                                                                   size_t                         paramIndex,
+                                                                   std::set<size_t> const &       singularParams ) const
+{
+  std::string       argument;
+  ParamData const & param = params[paramIndex];
+  assert( param.len.empty() );
+  if ( param.type.type.starts_with( "Vk" ) )
+  {
+    if ( param.arraySizes.empty() )
+    {
+      auto pointerIt = std::find_if( params.begin(), params.end(), [&param]( ParamData const & pd ) { return pd.len == param.name; } );
+      if ( pointerIt != params.end() )
+      {
+        assert( !param.optional );
+        argument = startLowerCase( stripPrefix( pointerIt->name, "p" ) ) + ".size()";
+        if ( pointerIt->type.type == "void" )
+        {
+          argument += " * sizeof( " + stripPrefix( pointerIt->name, "p" ) + "Type )";
+        }
+      }
+      else
+      {
+        argument = "static_cast<" + param.type.compose( "" ) + ">( " + param.name + " )";
+      }
+      assert( std::find_if( params.begin(), params.end(), [&param]( ParamData const & pd ) { return pd.stride == param.name; } ) == params.end() );
+    }
+    else
+    {
+      assert( !param.optional );
+      assert( param.arraySizes.size() == 1 );
+      assert( param.type.prefix == "const" );
+      argument = "reinterpret_cast<" + param.type.compose( "" ) + " *>( " + param.name + " )";
+    }
+  }
+  else
+  {
+    auto pointerIt = std::find_if( params.begin(), params.end(), [&param]( ParamData const & pd ) { return pd.len == param.name; } );
+    if ( pointerIt != params.end() )
+    {
+      // this parameter is the len of some other -> replace it with that parameter's size
+      assert( param.arraySizes.empty() );
+      assert( ( param.type.type == "size_t" ) || ( param.type.type == "uint32_t" ) );
+      if ( singularParams.find( paramIndex ) == singularParams.end() )
+      {
+        argument = startLowerCase( stripPrefix( pointerIt->name, "p" ) ) + ".size()";
+        if ( pointerIt->type.type == "void" )
+        {
+          argument += " * sizeof( " + stripPrefix( pointerIt->name, "p" ) + "Type )";
+        }
+      }
+      else
+      {
+        if ( pointerIt->type.type == "void" )
+        {
+          argument = "sizeof( " + stripPrefix( pointerIt->name, "p" ) + "Type )";
+        }
+        else
+        {
+          argument = "1";
+        }
+      }
+    }
+    else
+    {
+      assert( !param.optional );
+      assert( param.arraySizes.size() <= 1 );
+      pointerIt = std::find_if( params.begin(), params.end(), [&param]( ParamData const & pd ) { return pd.stride == param.name; } );
+      if ( pointerIt != params.end() )
+      {
+        // this parameter is the stride of some other -> replace it with that parameter's stride
+        assert( param.arraySizes.empty() );
+        assert( param.type.type == "uint32_t" );
+        argument = startLowerCase( stripPrefix( pointerIt->name, "p" ) ) + ".stride()";
+      }
+      else
+      {
+        argument = param.name;
+      }
+    }
+  }
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateCallSequence( std::string const &                       name,
+                                                      CommandData const &                       commandData,
+                                                      std::vector<size_t> const &               returnParams,
+                                                      std::map<size_t, VectorParamData> const & vectorParams,
+                                                      size_t                                    initialSkipCount,
+                                                      std::set<size_t> const &                  singularParams,
+                                                      std::set<size_t> const &                  templatedParams,
+                                                      CommandFlavourFlags                       flavourFlags,
+                                                      bool                                      raii ) const
+{
+  std::string dispatcher = raii ? "getDispatcher()->" : "d.";
+  // if at least one returnParam is a size value of a vector param (and no singular params), we need two calls
+  if ( singularParams.empty() &&
+       ( std::find_if( returnParams.begin(),
+                       returnParams.end(),
+                       [&vectorParams]( size_t rp )
+                       {
+                         return ( std::find_if( vectorParams.begin(), vectorParams.end(), [rp]( auto const & vp ) { return vp.second.lenParam == rp; } ) !=
+                                  vectorParams.end() );
+                       } ) != returnParams.end() ) )
+  {
+    auto vectorParamIt = vectorParams.find( returnParams[1] );
+    assert( ( vectorParamIt != vectorParams.end() ) && ( vectorParamIt->second.lenParam == returnParams[0] ) );
+
+    std::string firstCallArguments  = generateCallArgumentsEnhanced( commandData, initialSkipCount, true, {}, templatedParams, raii );
+    std::string secondCallArguments = generateCallArgumentsEnhanced( commandData, initialSkipCount, false, {}, templatedParams, raii );
+    std::string vectorName          = startLowerCase( stripPrefix( commandData.params[vectorParamIt->first].name, "p" ) );
+    std::string vectorSize          = startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) );
+
+    if ( flavourFlags & CommandFlavourFlagBits::chained )
+    {
+      assert( vectorParams.size() == 1 );
+      // chained data needs some more handling!!
+      std::string vectorElementType = stripPostfix( commandData.params[vectorParamIt->first].type.compose( "VULKAN_HPP_NAMESPACE" ), " *" );
+
+      if ( commandData.returnType == "VkResult" )
+      {
+        const std::string callSequenceTemplate = R"(VkResult result;
+    do
+    {
+      result = ${dispatcher}${vkCommand}( ${firstCallArguments} );
+      if ( ( result == VK_SUCCESS ) && ${counterName} )
+      {
+        structureChains.resize( ${counterName} );
+        ${vectorName}.resize( ${counterName} );
+        for ( ${counterType} i = 0; i < ${counterName}; i++ )
+        {
+          ${vectorName}[i].pNext = structureChains[i].template get<${vectorElementType}>().pNext;
+        }
+        result = ${dispatcher}${vkCommand}( ${secondCallArguments} );
+      }
+    } while ( result == VK_INCOMPLETE );)";
+
+        return replaceWithMap( callSequenceTemplate,
+                               { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                                 { "counterType", commandData.params[vectorParamIt->second.lenParam].type.type },
+                                 { "dispatcher", dispatcher },
+                                 { "firstCallArguments", firstCallArguments },
+                                 { "secondCallArguments", secondCallArguments },
+                                 { "vectorElementType", vectorElementType },
+                                 { "vectorName", vectorName },
+                                 { "vkCommand", name } } );
+      }
+      else
+      {
+        const std::string callSequenceTemplate =
+          R"(${dispatcher}${vkCommand}( ${firstCallArguments} );
+    structureChains.resize( ${counterName} );
+    ${vectorName}.resize( ${counterName} );
+    for ( ${counterType} i = 0; i < ${counterName}; i++ )
+    {
+      ${vectorName}[i].pNext = structureChains[i].template get<${vectorElementType}>().pNext;
+    }
+    ${dispatcher}${vkCommand}( ${secondCallArguments} );)";
+
+        return replaceWithMap( callSequenceTemplate,
+                               { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                                 { "counterType", commandData.params[vectorParamIt->second.lenParam].type.type },
+                                 { "dispatcher", dispatcher },
+                                 { "firstCallArguments", firstCallArguments },
+                                 { "secondCallArguments", secondCallArguments },
+                                 { "vectorElementType", vectorElementType },
+                                 { "vectorName", vectorName },
+                                 { "vkCommand", name } } );
+      }
+    }
+    else if ( commandData.returnType == "VkResult" )
+    {
+      assert( ( commandData.successCodes.size() == 2 ) && ( commandData.successCodes[0] == "VK_SUCCESS" ) &&
+              ( commandData.successCodes[1] == "VK_INCOMPLETE" ) );
+
+      std::string resizes;
+      for ( auto const & vp : vectorParams )
+      {
+        assert( ( std::find( returnParams.begin(), returnParams.end(), vp.first ) != returnParams.end() ) &&
+                ( std::find( returnParams.begin(), returnParams.end(), vp.second.lenParam ) != returnParams.end() ) );
+        resizes += startLowerCase( stripPrefix( commandData.params[vp.first].name, "p" ) ) + ".resize( " +
+                   startLowerCase( stripPrefix( commandData.params[vp.second.lenParam].name, "p" ) ) + " );\n";
+      }
+      resizes.pop_back();
+
+      std::string const callSequenceTemplate = R"(VkResult result;
+    do
+    {
+      result = ${dispatcher}${vkCommand}( ${firstCallArguments} );
+      if ( ( result == VK_SUCCESS ) && ${counterName} )
+      {
+        ${resizes}
+        result = ${dispatcher}${vkCommand}( ${secondCallArguments} );
+      }
+    } while ( result == VK_INCOMPLETE );)";
+
+      return replaceWithMap( callSequenceTemplate,
+                             { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                               { "dispatcher", dispatcher },
+                               { "firstCallArguments", firstCallArguments },
+                               { "secondCallArguments", secondCallArguments },
+                               { "resizes", resizes },
+                               { "vkCommand", name } } );
+    }
+    else
+    {
+      // no need to enumerate here, just two calls
+      assert( commandData.returnType == "void" );
+      std::string const callSequenceTemplate = R"(${dispatcher}${vkCommand}( ${firstCallArguments} );
+    ${vectorName}.resize( ${vectorSize} );
+    ${dispatcher}${vkCommand}( ${secondCallArguments} );)";
+
+      return replaceWithMap( callSequenceTemplate,
+                             { { "dispatcher", dispatcher },
+                               { "firstCallArguments", firstCallArguments },
+                               { "secondCallArguments", secondCallArguments },
+                               { "vectorName", vectorName },
+                               { "vectorSize", vectorSize },
+                               { "vkCommand", name } } );
+    }
+  }
+  else
+  {
+    std::string const callSequenceTemplate = R"(${resultAssignment}${dispatcher}${vkCommand}( ${callArguments} );)";
+
+    std::string callArguments    = generateCallArgumentsEnhanced( commandData, initialSkipCount, false, singularParams, templatedParams, raii );
+    std::string resultAssignment = generateResultAssignment( commandData );
+
+    return replaceWithMap(
+      callSequenceTemplate,
+      { { "callArguments", callArguments }, { "dispatcher", dispatcher }, { "resultAssignment", resultAssignment }, { "vkCommand", name } } );
+  }
+}
+
+std::string VulkanHppGenerator::generateCommand( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const
+{
+  if ( commandData.returnType == "VkResult" )
+  {
+    assert( !commandData.successCodes.empty() );
+    if ( commandData.successCodes.size() == 1 )
+    {
+      if ( commandData.errorCodes.empty() )
+      {
+        return generateCommandResultSingleSuccessNoErrors( name, commandData, initialSkipCount, definition );
+      }
+      else
+      {
+        return generateCommandResultSingleSuccessWithErrors( name, commandData, initialSkipCount, definition );
+      }
+    }
+    else
+    {
+      if ( commandData.errorCodes.empty() )
+      {
+        return generateCommandResultMultiSuccessNoErrors( name, commandData, initialSkipCount, definition );
+      }
+      else
+      {
+        return generateCommandResultMultiSuccessWithErrors( name, commandData, initialSkipCount, definition );
+      }
+    }
+  }
+  else if ( commandData.returnType == "void" )
+  {
+    std::vector<size_t> returnParams = determineReturnParams( commandData.params );
+    switch ( returnParams.size() )
+    {
+      case 0: return generateCommandVoid0Return( name, commandData, initialSkipCount, definition );
+      case 1: return generateCommandVoid1Return( name, commandData, initialSkipCount, definition, returnParams[0] );
+      case 2: return generateCommandVoid2Return( name, commandData, initialSkipCount, definition, returnParams );
+    }
+  }
+  else
+  {
+    return generateCommandValue( name, commandData, initialSkipCount, definition );
+  }
+
+  throw std::runtime_error( "Never encountered a function like <" + name + "> !" );
+}
+
+std::string VulkanHppGenerator::generateCommandDefinitions() const
+{
+  const std::string commandDefinitionsTemplate = R"(
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+
+${commandDefinitions}
+)";
+
+  std::string           commandDefinitions;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension!
+  for ( auto const & feature : m_features )
+  {
+    commandDefinitions += generateCommandDefinitions( feature.second.requireData, listedCommands, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    commandDefinitions += generateCommandDefinitions( extIt.second->second.requireData, listedCommands, extIt.second->first );
+  }
+
+  return replaceWithMap( commandDefinitionsTemplate, { { "commandDefinitions", commandDefinitions } } );
+}
+
+std::string VulkanHppGenerator::generateCommandDefinitions( std::vector<RequireData> const & requireData,
+                                                            std::set<std::string> &          listedCommands,
+                                                            std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      if ( listedCommands.insert( command ).second )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+        str += generateCommandDefinitions( command, commandIt->second.handle );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateCommandDefinitions( std::string const & command, std::string const & handle ) const
+{
+  auto commandIt = m_commands.find( command );
+  assert( commandIt != m_commands.end() );
+
+  std::string str = "\n" + generateCommand( commandIt->first, commandIt->second, handle.empty() ? 0 : 1, true );
+
+  // special handling for destroy functions, filter out alias functions
+  std::string commandName = generateCommandName( commandIt->first, commandIt->second.params, 1, m_tags );
+  if ( commandIt->second.alias.empty() &&
+       ( ( ( commandIt->first.substr( 2, 7 ) == "Destroy" ) && ( commandName != "destroy" ) ) || ( commandIt->first.substr( 2, 4 ) == "Free" ) ||
+         ( commandIt->first == "vkReleasePerformanceConfigurationINTEL" ) ) )
+  {
+    CommandData commandData = commandIt->second;
+    assert( ( 1 < commandData.params.size() ) && ( commandData.params[0].type.type == handle ) );
+    commandData.params[1].optional = false;  // make sure, the object to destroy/free/release is not optional in the shortened version!
+
+    std::string destroyCommandString = generateCommand( commandIt->first, commandData, handle.empty() ? 0 : 1, true );
+    std::string shortenedName;
+    if ( commandIt->first.substr( 2, 7 ) == "Destroy" )
+    {
+      shortenedName = "destroy";
+    }
+    else if ( commandIt->first.substr( 2, 4 ) == "Free" )
+    {
+      shortenedName = "free";
+    }
+    else
+    {
+      assert( commandIt->first == "vkReleasePerformanceConfigurationINTEL" );
+      shortenedName = "release";
+    }
+    size_t pos = destroyCommandString.find( commandName );
+    while ( pos != std::string::npos )
+    {
+      destroyCommandString.replace( pos, commandName.length(), shortenedName );
+      pos = destroyCommandString.find( commandName, pos );
+    }
+
+    // special handling for "free", to prevent interfering with MSVC debug free!
+    if ( shortenedName == "free" )
+    {
+      std::string toEncloseString = stripPrefix( handle, "Vk" ) + "::free";
+      std::string enclosedString  = "( " + toEncloseString + " )";
+      pos                         = destroyCommandString.find( toEncloseString );
+      while ( pos != std::string::npos )
+      {
+        destroyCommandString.replace( pos, toEncloseString.length(), enclosedString );
+        pos = destroyCommandString.find( toEncloseString, pos + enclosedString.length() );
+      }
+    }
+
+    // we need to remove the default argument for the first argument, to prevent ambiguities!
+    assert( 1 < commandIt->second.params.size() );
+    pos = destroyCommandString.find( commandIt->second.params[1].name );  // skip the standard version of the function
+    assert( pos != std::string::npos );
+    pos = destroyCommandString.find( commandIt->second.params[1].name,
+                                     pos + 1 );  // get the argument to destroy in the advanced version
+    assert( pos != std::string::npos );
+    pos = destroyCommandString.find( " VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT", pos );
+    if ( pos != std::string::npos )
+    {
+      destroyCommandString.erase( pos, strlen( " VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT" ) );
+    }
+    str += "\n" + destroyCommandString;
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateCommandEnhanced( std::string const &                       name,
+                                                         CommandData const &                       commandData,
+                                                         size_t                                    initialSkipCount,
+                                                         bool                                      definition,
+                                                         std::map<size_t, VectorParamData> const & vectorParams,
+                                                         std::vector<size_t> const &               returnParams,
+                                                         CommandFlavourFlags                       flavourFlags ) const
+{
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+
+  assert( vectorParams.empty() || ( vectorParams.begin()->second.lenParam != INVALID_INDEX ) );
+  assert( !singular || !returnParams.empty() );  // if singular is true, then there is at least one returnParam !
+
+  std::set<size_t> skippedParams = determineSkippedParams( commandData.params, initialSkipCount, vectorParams, returnParams, singular );
+  // special handling for vkGetMemoryHostPointerPropertiesEXT: here, we really need to stick with the const void * parameter !
+  std::set<size_t> templatedParams = ( name == "vkGetMemoryHostPointerPropertiesEXT" ) ? std::set<size_t>() : determineVoidPointerParams( commandData.params );
+  std::set<size_t> singularParams  = singular ? determineSingularParams( returnParams[0], vectorParams ) : std::set<size_t>();
+  std::pair<bool, std::map<size_t, std::vector<size_t>>> vectorSizeCheck =
+    needsVectorSizeCheck( commandData.params, vectorParams, returnParams, singularParams );
+  bool enumerating = determineEnumeration( vectorParams, returnParams );
+
+  std::vector<std::string> dataTypes = determineDataTypes( commandData.params, vectorParams, returnParams, templatedParams );
+  std::string              dataType  = combineDataTypes( vectorParams, returnParams, enumerating, dataTypes, flavourFlags, false );
+
+  std::string argumentTemplates = generateArgumentTemplates( commandData.params, returnParams, vectorParams, templatedParams, flavourFlags, false );
+  auto [allocatorTemplates, uniqueHandleAllocatorTemplates] = generateAllocatorTemplates( returnParams, dataTypes, vectorParams, flavourFlags, definition );
+  std::string typenameCheck                                 = generateTypenameCheck( returnParams, vectorParams, definition, dataTypes, flavourFlags );
+  std::string nodiscard      = generateNoDiscard( !returnParams.empty(), 1 < commandData.successCodes.size(), 1 < commandData.errorCodes.size() );
+  std::string returnType     = generateReturnType( commandData, returnParams, vectorParams, flavourFlags, false, dataType );
+  std::string className      = initialSkipCount ? stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" ) : "";
+  std::string classSeparator = commandData.handle.empty() ? "" : "::";
+  std::string commandName    = generateCommandName( name, commandData.params, initialSkipCount, m_tags, flavourFlags );
+  std::string argumentList   = generateArgumentListEnhanced(
+    commandData.params, returnParams, vectorParams, skippedParams, singularParams, templatedParams, definition, flavourFlags, true );
+  std::string constString    = commandData.handle.empty() ? "" : " const";
+  std::string noexceptString = generateNoExcept( commandData.errorCodes, returnParams, vectorParams, flavourFlags, vectorSizeCheck.first, false );
+
+  if ( definition )
+  {
+    std::string vectorSizeCheckString =
+      vectorSizeCheck.first ? generateVectorSizeCheck( name, commandData, initialSkipCount, vectorSizeCheck.second, skippedParams, false ) : "";
+    std::string returnVariable   = generateReturnVariable( commandData, returnParams, vectorParams, flavourFlags );
+    std::string dataDeclarations = generateDataDeclarations(
+      commandData, returnParams, vectorParams, templatedParams, flavourFlags, false, dataTypes, dataType, returnType, returnVariable );
+    std::string dataPreparation =
+      generateDataPreparation( commandData, initialSkipCount, returnParams, vectorParams, templatedParams, flavourFlags, enumerating );
+    std::string dataSizeChecks = generateDataSizeChecks( commandData, returnParams, dataTypes, vectorParams, templatedParams, singular );
+    std::string callSequence =
+      generateCallSequence( name, commandData, returnParams, vectorParams, initialSkipCount, singularParams, templatedParams, flavourFlags, false );
+    std::string resultCheck     = generateResultCheck( commandData, className, classSeparator, commandName, enumerating );
+    std::string returnStatement = generateReturnStatement( name,
+                                                           commandData,
+                                                           returnVariable,
+                                                           returnType,
+                                                           dataType,
+                                                           initialSkipCount,
+                                                           returnParams.empty() ? INVALID_INDEX : returnParams[0],
+                                                           flavourFlags,
+                                                           enumerating,
+                                                           false );
+
+    std::string const functionTemplate =
+      R"(  template <${argumentTemplates}${allocatorTemplates}typename Dispatch${uniqueHandleAllocatorTemplates}${typenameCheck}>
+  ${nodiscard}VULKAN_HPP_INLINE ${returnType} ${className}${classSeparator}${commandName}( ${argumentList} )${const}${noexcept}
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+${vectorSizeCheck}
+    ${dataSizeChecks}
+    ${dataDeclarations}
+    ${callSequence}
+    ${resultCheck}
+    ${dataPreparation}
+    ${returnStatement}
+  })";
+
+    return replaceWithMap( functionTemplate,
+                           { { "allocatorTemplates", allocatorTemplates },
+                             { "argumentList", argumentList },
+                             { "argumentTemplates", argumentTemplates },
+                             { "callSequence", callSequence },
+                             { "className", className },
+                             { "classSeparator", classSeparator },
+                             { "commandName", commandName },
+                             { "const", constString },
+                             { "dataDeclarations", dataDeclarations },
+                             { "dataPreparation", dataPreparation },
+                             { "dataSizeChecks", dataSizeChecks },
+                             { "nodiscard", nodiscard },
+                             { "noexcept", noexceptString },
+                             { "resultCheck", resultCheck },
+                             { "returnStatement", returnStatement },
+                             { "returnType", returnType },
+                             { "typenameCheck", typenameCheck },
+                             { "uniqueHandleAllocatorTemplates", uniqueHandleAllocatorTemplates },
+                             { "vectorSizeCheck", vectorSizeCheckString } } );
+  }
+  else
+  {
+    std::string const functionTemplate =
+      R"(    template <${argumentTemplates}${allocatorTemplates}typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE${uniqueHandleAllocatorTemplates}${typenameCheck}>
+    ${nodiscard}${returnType} ${commandName}( ${argumentList} )${const}${noexcept};)";
+
+    return replaceWithMap( functionTemplate,
+                           { { "allocatorTemplates", allocatorTemplates },
+                             { "argumentList", argumentList },
+                             { "argumentTemplates", argumentTemplates },
+                             { "commandName", commandName },
+                             { "const", commandData.handle.empty() ? "" : " const" },
+                             { "nodiscard", nodiscard },
+                             { "noexcept", noexceptString },
+                             { "returnType", returnType },
+                             { "typenameCheck", typenameCheck },
+                             { "uniqueHandleAllocatorTemplates", uniqueHandleAllocatorTemplates } } );
+  }
+}
+
+std::string VulkanHppGenerator::generateCommandName( std::string const &            vulkanCommandName,
+                                                     std::vector<ParamData> const & params,
+                                                     size_t                         initialSkipCount,
+                                                     std::set<std::string> const &  tags,
+                                                     CommandFlavourFlags            flavourFlags ) const
+{
+  std::string commandName( startLowerCase( stripPrefix( vulkanCommandName, "vk" ) ) );
+  for ( size_t i = initialSkipCount - 1; i < initialSkipCount; --i )  // count down to zero, then wrap around and stop
+  {
+    std::string const & argumentType = params[i].type.type;
+    std::string         searchName   = stripPrefix( argumentType, "Vk" );
+    std::string         argumentTag  = findTag( tags, argumentType );
+    if ( !argumentTag.empty() )
+    {
+      searchName = stripPostfix( searchName, argumentTag );
+    }
+    size_t pos = commandName.find( searchName );
+    if ( pos == std::string::npos )
+    {
+      searchName = startLowerCase( searchName );
+      pos        = commandName.find( searchName );
+    }
+    if ( pos != std::string::npos )
+    {
+      size_t len = searchName.length();
+      if ( commandName.find( searchName + "s" ) == pos )
+      {
+        // filter out any plural of the searchName as well!
+        ++len;
+      }
+      commandName.erase( pos, len );
+    }
+    else if ( ( searchName == "commandBuffer" ) && commandName.starts_with( "cmd" ) )
+    {
+      commandName.erase( 0, 3 );
+      pos = 0;
+    }
+    if ( pos == 0 )
+    {
+      commandName = startLowerCase( commandName );
+    }
+    std::string commandTag = findTag( tags, commandName );
+    if ( !argumentTag.empty() && ( argumentTag == commandTag ) )
+    {
+      commandName = stripPostfix( commandName, argumentTag );
+    }
+  }
+  if ( flavourFlags & CommandFlavourFlagBits::singular )
+  {
+    commandName = stripPluralS( commandName, m_tags );
+  }
+  if ( flavourFlags & CommandFlavourFlagBits::unique )
+  {
+    commandName += "Unique";
+  }
+  return commandName;
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessNoErrors( std::string const & name,
+                                                                           CommandData const & commandData,
+                                                                           size_t              initialSkipCount,
+                                                                           bool                definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandData.params );
+  switch ( returnParams.size() )
+  {
+    case 0: return generateCommandResultMultiSuccessNoErrors0Return( name, commandData, initialSkipCount, definition ); break;
+    case 2: return generateCommandResultMultiSuccessNoErrors2Return( name, commandData, initialSkipCount, definition, returnParams ); break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessNoErrors0Return( std::string const & name,
+                                                                                  CommandData const & commandData,
+                                                                                  size_t              initialSkipCount,
+                                                                                  bool                definition ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  if ( vectorParams.empty() )
+  {
+    std::vector<size_t> constPointerParams = determineConstPointerParams( commandData.params );
+    if ( constPointerParams.empty() )
+    {
+      return generateCommandSet( generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, {} ) );
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessNoErrors2Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const
+{
+  if ( ( commandData.successCodes.size() == 2 ) && ( commandData.successCodes[0] == "VK_SUCCESS" ) && ( commandData.successCodes[1] == "VK_INCOMPLETE" ) )
+  {
+    if ( ( commandData.params[returnParams[0]].type.type == "size_t" ) || ( commandData.params[returnParams[0]].type.type == "uint32_t" ) )
+    {
+      if ( ( commandData.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[1]].type.type ) &&
+           !isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+        if ( vectorParams.size() == 1 )
+        {
+          if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+          {
+            if ( returnParams[1] == vectorParams.begin()->first )
+            {
+              return generateCommandSet(
+                definition,
+                generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                  generateCommandEnhanced(
+                    name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ) } );
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessWithErrors( std::string const & name,
+                                                                             CommandData const & commandData,
+                                                                             size_t              initialSkipCount,
+                                                                             bool                definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandData.params );
+  switch ( returnParams.size() )
+  {
+    case 0: return generateCommandResultWithErrors0Return( name, commandData, initialSkipCount, definition ); break;
+    case 1: return generateCommandResultMultiSuccessWithErrors1Return( name, commandData, initialSkipCount, definition, returnParams[0] ); break;
+    case 2: return generateCommandResultMultiSuccessWithErrors2Return( name, commandData, initialSkipCount, definition, returnParams ); break;
+    case 3: return generateCommandResultMultiSuccessWithErrors3Return( name, commandData, initialSkipCount, definition, returnParams ); break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessWithErrors1Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  if ( commandData.params[returnParam].type.type == "void" )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.size() == 1 )
+    {
+      if ( returnParam == vectorParams.begin()->first )
+      {
+        if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+        {
+          return generateCommandSet(
+            definition,
+            generateCommandStandard( name, commandData, initialSkipCount, definition ),
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) } );
+        }
+      }
+    }
+  }
+  else if ( isHandleType( commandData.params[returnParam].type.type ) )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.size() == 2 )
+    {
+      if ( returnParam == std::next( vectorParams.begin() )->first )
+      {
+        if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+        {
+          if ( commandData.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+          {
+            if ( isStructureChainAnchor( commandData.params[vectorParams.begin()->first].type.type ) )
+            {
+              return generateCommandSet(
+                definition,
+                generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+                  generateCommandEnhanced(
+                    name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::withAllocator ),
+                  generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) },
+                { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::unique ),
+                  generateCommandEnhanced( name,
+                                           commandData,
+                                           initialSkipCount,
+                                           definition,
+                                           vectorParams,
+                                           { returnParam },
+                                           CommandFlavourFlagBits::unique | CommandFlavourFlagBits::withAllocator ),
+                  generateCommandEnhanced( name,
+                                           commandData,
+                                           initialSkipCount,
+                                           definition,
+                                           vectorParams,
+                                           { returnParam },
+                                           CommandFlavourFlagBits::unique | CommandFlavourFlagBits::singular ) } );
+            }
+          }
+        }
+      }
+    }
+  }
+  else if ( isStructureChainAnchor( commandData.params[returnParam].type.type ) )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.empty() )
+    {
+      return generateCommandSet(
+        definition,
+        generateCommandStandard( name, commandData, initialSkipCount, definition ),
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+          generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::chained ) } );
+    }
+  }
+  else
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.empty() )
+    {
+      return generateCommandSet( definition,
+                                 generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) } );
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessWithErrors2Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const
+{
+  if ( ( commandData.successCodes.size() == 2 ) && ( commandData.successCodes[0] == "VK_SUCCESS" ) && ( commandData.successCodes[1] == "VK_INCOMPLETE" ) )
+  {
+    if ( ( commandData.params[returnParams[0]].type.type == "size_t" ) || ( commandData.params[returnParams[0]].type.type == "uint32_t" ) )
+    {
+      if ( isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+        if ( vectorParams.size() == 1 )
+        {
+          if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+          {
+            if ( returnParams[1] == vectorParams.begin()->first )
+            {
+              return generateCommandSet(
+                definition,
+                generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                  generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ),
+                  generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::chained ),
+                  generateCommandEnhanced( name,
+                                           commandData,
+                                           initialSkipCount,
+                                           definition,
+                                           vectorParams,
+                                           returnParams,
+                                           CommandFlavourFlagBits::chained | CommandFlavourFlagBits::withAllocator ) } );
+            }
+          }
+        }
+      }
+      else
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+        if ( vectorParams.size() == 1 )
+        {
+          if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+          {
+            if ( returnParams[1] == vectorParams.begin()->first )
+            {
+              return generateCommandSet(
+                definition,
+                generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                  generateCommandEnhanced(
+                    name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ) } );
+            }
+          }
+        }
+      }
+    }
+    else if ( ( commandData.params[returnParams[0]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[0]].type.type ) &&
+              !isStructureChainAnchor( commandData.params[returnParams[0]].type.type ) )
+    {
+      if ( ( commandData.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[1]].type.type ) &&
+           !isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+        if ( vectorParams.empty() )
+        {
+          return generateCommandSet( definition,
+                                     generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                     { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ) } );
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultMultiSuccessWithErrors3Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const
+{
+  if ( commandData.params[returnParams[0]].type.type == "uint32_t" )
+  {
+    if ( ( commandData.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[1]].type.type ) &&
+         !isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+    {
+      if ( ( commandData.params[returnParams[2]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[2]].type.type ) &&
+           !isStructureChainAnchor( commandData.params[returnParams[2]].type.type ) )
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+        if ( vectorParams.size() == 2 )
+        {
+          if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+          {
+            if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+            {
+              if ( returnParams[1] == vectorParams.begin()->first )
+              {
+                if ( returnParams[2] == std::next( vectorParams.begin() )->first )
+                {
+                  return generateCommandSet(
+                    definition,
+                    generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                    { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                      generateCommandEnhanced(
+                        name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ) } );
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessNoErrors( std::string const & name,
+                                                                            CommandData const & commandData,
+                                                                            size_t              initialSkipCount,
+                                                                            bool                definition ) const
+{
+  std::vector<size_t>               returnParams = determineReturnParams( commandData.params );
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  switch ( returnParams.size() )
+  {
+    case 0:
+      switch ( vectorParams.size() )
+      {
+        case 0:
+          {
+            std::vector<size_t> constPointerParams = determineConstPointerParams( commandData.params );
+            switch ( constPointerParams.size() )
+            {
+              case 0:
+                return generateCommandSet( generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                           generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, {} ) );
+              case 1:
+                return generateCommandSet( definition,
+                                           generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                           { generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, {} ) } );
+            }
+          }
+          break;
+        case 1:
+          if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+          {
+            if ( isHandleType( commandData.params[vectorParams.begin()->first].type.type ) )
+            {
+              return generateCommandSet( definition,
+                                         generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                         { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, {} ) } );
+            }
+          }
+          break;
+      }
+      break;
+    case 1:
+      if ( vectorParams.empty() )
+      {
+        if ( ( commandData.params[returnParams[0]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[0]].type.type ) &&
+             !isStructureChainAnchor( commandData.params[returnParams[0]].type.type ) )
+        {
+          return generateCommandSet( definition,
+                                     generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                     { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ) } );
+        }
+      }
+      break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors( std::string const & name,
+                                                                              CommandData const & commandData,
+                                                                              size_t              initialSkipCount,
+                                                                              bool                definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandData.params );
+  switch ( returnParams.size() )
+  {
+    case 0: return generateCommandResultWithErrors0Return( name, commandData, initialSkipCount, definition ); break;
+    case 1: return generateCommandResultSingleSuccessWithErrors1Return( name, commandData, initialSkipCount, definition, returnParams[0] ); break;
+    case 2: return generateCommandResultSingleSuccessWithErrors2Return( name, commandData, initialSkipCount, definition, returnParams ); break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  if ( commandData.params[returnParam].type.type == "void" )
+  {
+    return generateCommandResultSingleSuccessWithErrors1ReturnVoid( name, commandData, initialSkipCount, definition, returnParam );
+  }
+  else if ( isHandleType( commandData.params[returnParam].type.type ) )
+  {
+    return generateCommandResultSingleSuccessWithErrors1ReturnHandle( name, commandData, initialSkipCount, definition, returnParam );
+  }
+  else if ( isStructureChainAnchor( commandData.params[returnParam].type.type ) )
+  {
+    return generateCommandResultSingleSuccessWithErrors1ReturnChain( name, commandData, initialSkipCount, definition, returnParam );
+  }
+  else
+  {
+    return generateCommandResultSingleSuccessWithErrors1ReturnValue( name, commandData, initialSkipCount, definition, returnParam );
+  }
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnChain(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  if ( vectorParams.empty() )
+  {
+    return generateCommandSet(
+      definition,
+      generateCommandStandard( name, commandData, initialSkipCount, definition ),
+      { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+        generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::chained ) } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnHandle(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  switch ( vectorParams.size() )
+  {
+    case 0:
+      return generateCommandSet(
+        definition,
+        generateCommandStandard( name, commandData, initialSkipCount, definition ),
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) },
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::unique ) } );
+      break;
+    case 1:
+      return generateCommandResultSingleSuccessWithErrors1ReturnHandle1Vector(
+        name, commandData, initialSkipCount, definition, returnParam, *vectorParams.begin() );
+      break;
+    case 2:
+      return generateCommandResultSingleSuccessWithErrors1ReturnHandle2Vector( name, commandData, initialSkipCount, definition, returnParam, vectorParams );
+      break;
+  }
+  return "";
+}
+
+std::string
+  VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnHandle1Vector( std::string const &                        name,
+                                                                                        CommandData const &                        commandData,
+                                                                                        size_t                                     initialSkipCount,
+                                                                                        bool                                       definition,
+                                                                                        size_t                                     returnParam,
+                                                                                        std::pair<size_t, VectorParamData> const & vectorParamIndex ) const
+{
+  if ( returnParam == vectorParamIndex.first )
+  {
+    if ( isLenByStructMember( commandData.params[vectorParamIndex.first].len, commandData.params[vectorParamIndex.second.lenParam] ) )
+    {
+      return generateCommandSet(
+        definition,
+        generateCommandStandard( name, commandData, initialSkipCount, definition ),
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, { vectorParamIndex }, { returnParam } ),
+          generateCommandEnhanced(
+            name, commandData, initialSkipCount, definition, { vectorParamIndex }, { returnParam }, CommandFlavourFlagBits::withAllocator ) },
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, { vectorParamIndex }, { returnParam }, CommandFlavourFlagBits::unique ),
+          generateCommandEnhanced( name,
+                                   commandData,
+                                   initialSkipCount,
+                                   definition,
+                                   { vectorParamIndex },
+                                   { returnParam },
+                                   CommandFlavourFlagBits::unique | CommandFlavourFlagBits::withAllocator ) } );
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnHandle2Vector( std::string const &                       name,
+                                                                                                  CommandData const &                       commandData,
+                                                                                                  size_t                                    initialSkipCount,
+                                                                                                  bool                                      definition,
+                                                                                                  size_t                                    returnParam,
+                                                                                                  std::map<size_t, VectorParamData> const & vectorParams ) const
+{
+  if ( returnParam == std::next( vectorParams.begin() )->first )
+  {
+    if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+    {
+      if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+      {
+        if ( ( commandData.params[vectorParams.begin()->first].type.type != "void" ) &&
+             !isHandleType( commandData.params[vectorParams.begin()->first].type.type ) )
+        {
+          return generateCommandSet(
+            definition,
+            generateCommandStandard( name, commandData, initialSkipCount, definition ),
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::withAllocator ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) },
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::unique ),
+              generateCommandEnhanced( name,
+                                       commandData,
+                                       initialSkipCount,
+                                       definition,
+                                       vectorParams,
+                                       { returnParam },
+                                       CommandFlavourFlagBits::unique | CommandFlavourFlagBits::withAllocator ),
+              generateCommandEnhanced( name,
+                                       commandData,
+                                       initialSkipCount,
+                                       definition,
+                                       vectorParams,
+                                       { returnParam },
+                                       CommandFlavourFlagBits::singular | CommandFlavourFlagBits::unique ) } );
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnValue(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  switch ( vectorParams.size() )
+  {
+    case 0:
+      return generateCommandSet( definition,
+                                 generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) } );
+    case 2:
+      return generateCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( name, commandData, initialSkipCount, definition, returnParam, vectorParams );
+      break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( std::string const &                       name,
+                                                                                                  CommandData const &                       commandData,
+                                                                                                  size_t                                    initialSkipCount,
+                                                                                                  bool                                      definition,
+                                                                                                  size_t                                    returnParam,
+                                                                                                  std::map<size_t, VectorParamData> const & vectorParams ) const
+{
+  if ( returnParam == std::next( vectorParams.begin() )->first )
+  {
+    if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+    {
+      if ( commandData.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+      {
+        if ( ( commandData.params[vectorParams.begin()->first].type.type != "void" ) &&
+             !isHandleType( commandData.params[vectorParams.begin()->first].type.type ) &&
+             !isStructureChainAnchor( commandData.params[vectorParams.begin()->first].type.type ) )
+        {
+          return generateCommandSet(
+            definition,
+            generateCommandStandard( name, commandData, initialSkipCount, definition ),
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::withAllocator ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) } );
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors1ReturnVoid(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  switch ( vectorParams.size() )
+  {
+    case 0:
+      return generateCommandSet( definition,
+                                 generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) } );
+      break;
+    case 1:
+      if ( returnParam == vectorParams.begin()->first )
+      {
+        if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+        {
+          return generateCommandSet(
+            definition,
+            generateCommandStandard( name, commandData, initialSkipCount, definition ),
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+              generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) } );
+        }
+      }
+      break;
+    case 2:
+      if ( returnParam == std::next( vectorParams.begin() )->first )
+      {
+        if ( vectorParams.begin()->second.lenParam != std::next( vectorParams.begin() )->second.lenParam )
+        {
+          if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+          {
+            if ( isHandleType( commandData.params[vectorParams.begin()->first].type.type ) )
+            {
+              if ( commandData.params[std::next( vectorParams.begin() )->second.lenParam].type.isValue() )
+              {
+                return generateCommandSet(
+                  definition,
+                  generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                  { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+                    generateCommandEnhanced(
+                      name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) } );
+              }
+            }
+          }
+        }
+      }
+      break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultSingleSuccessWithErrors2Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const
+{
+  if ( ( commandData.params[returnParams[0]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[0]].type.type ) &&
+       !isStructureChainAnchor( commandData.params[returnParams[0]].type.type ) )
+  {
+    if ( ( commandData.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandData.params[returnParams[1]].type.type ) &&
+         !isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+    {
+      std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+      if ( vectorParams.size() == 2 )
+      {
+        if ( returnParams[0] == std::next( vectorParams.begin() )->first )
+        {
+          if ( vectorParams.find( returnParams[1] ) == vectorParams.end() )
+          {
+            assert( ( returnParams[1] != vectorParams.begin()->second.lenParam ) && ( returnParams[1] != std::next( vectorParams.begin() )->second.lenParam ) );
+            if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+            {
+              if ( commandData.params[vectorParams.begin()->second.lenParam].type.isValue() )
+              {
+                if ( ( commandData.params[vectorParams.begin()->first].type.type != "void" ) &&
+                     !isHandleType( commandData.params[vectorParams.begin()->first].type.type ) &&
+                     !isStructureChainAnchor( commandData.params[vectorParams.begin()->first].type.type ) )
+                {
+                  return generateCommandSet(
+                    definition,
+                    generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                    { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                      generateCommandEnhanced(
+                        name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ),
+                      generateCommandEnhanced(
+                        name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::singular ) } );
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandResultWithErrors0Return( std::string const & name,
+                                                                        CommandData const & commandData,
+                                                                        size_t              initialSkipCount,
+                                                                        bool                definition ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  if ( vectorParams.empty() && determineConstPointerParams( commandData.params ).empty() )
+  {
+    return generateCommandSet( generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                               generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, {} ) );
+  }
+  else if ( allVectorSizesSupported( commandData.params, vectorParams ) )
+  {
+    return generateCommandSet( definition,
+                               generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                               { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, {} ) } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandSet( bool                             definition,
+                                                    std::string const &              standard,
+                                                    std::vector<std::string> const & enhanced,
+                                                    std::vector<std::string> const & unique ) const
+{
+  assert( unique.empty() || ( enhanced.size() == unique.size() ) );
+
+  std::string commandSet = "\n" + standard;
+  if ( !enhanced.empty() )
+  {
+    std::string separator = definition ? "\n" : "";
+    commandSet += separator + "\n#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE\n";
+    bool firstEnhanced = true;
+    for ( auto const & e : enhanced )
+    {
+      if ( !firstEnhanced )
+      {
+        commandSet += separator + "\n";
+      }
+      firstEnhanced = false;
+      commandSet += e;
+    }
+    if ( !unique.empty() )
+    {
+      commandSet += separator + "\n#  ifndef VULKAN_HPP_NO_SMART_HANDLE\n";
+      bool firstUnique = true;
+      for ( auto const & u : unique )
+      {
+        if ( !firstUnique )
+        {
+          commandSet += separator + "\n";
+        }
+        firstUnique = false;
+        commandSet += u;
+      }
+      commandSet += "\n#  endif /* VULKAN_HPP_NO_SMART_HANDLE */";
+    }
+    commandSet += "\n#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */";
+  }
+  commandSet += "\n";
+  return commandSet;
+}
+
+std::string VulkanHppGenerator::generateCommandSet( std::string const & standard, std::string const & enhanced ) const
+{
+  const std::string commandTemplate = R"(
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+${commandStandard}
+#else
+${commandEnhanced}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+)";
+
+  return replaceWithMap( commandTemplate, std::map<std::string, std::string>( { { "commandEnhanced", enhanced }, { "commandStandard", standard } } ) );
+}
+
+std::string
+  VulkanHppGenerator::generateCommandStandard( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const
+{
+  std::set<size_t> skippedParams = determineSkippedParams( commandData.params, initialSkipCount, {}, {}, false );
+
+  std::string argumentList = generateArgumentListStandard( commandData.params, skippedParams );
+  std::string commandName  = generateCommandName( name, commandData.params, initialSkipCount, m_tags );
+  std::string nodiscard    = ( 1 < commandData.successCodes.size() + commandData.errorCodes.size() ) ? "VULKAN_HPP_NODISCARD " : "";
+  std::string returnType   = stripPrefix( commandData.returnType, "Vk" );
+
+  if ( definition )
+  {
+    std::string functionBody = "d." + name + "( " + generateCallArgumentsStandard( commandData.handle, commandData.params ) + " )";
+    if ( commandData.returnType.starts_with( "Vk" ) )
+    {
+      functionBody = "return static_cast<" + returnType + ">( " + functionBody + " )";
+    }
+    else if ( commandData.returnType != "void" )
+    {
+      functionBody = "return " + functionBody;
+    }
+
+    std::string const functionTemplate =
+      R"(  template <typename Dispatch>
+  ${nodiscard}VULKAN_HPP_INLINE ${returnType} ${className}${classSeparator}${commandName}( ${argumentList} )${const} VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    ${functionBody};
+  })";
+
+    return replaceWithMap( functionTemplate,
+                           { { "argumentList", argumentList },
+                             { "className", initialSkipCount ? stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" ) : "" },
+                             { "classSeparator", commandData.handle.empty() ? "" : "::" },
+                             { "commandName", commandName },
+                             { "const", commandData.handle.empty() ? "" : " const" },
+                             { "functionBody", functionBody },
+                             { "nodiscard", nodiscard },
+                             { "returnType", returnType } } );
+  }
+  else
+  {
+    std::string const functionTemplate =
+      R"(    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ${nodiscard}${returnType} ${commandName}( ${argumentList} VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )${const} VULKAN_HPP_NOEXCEPT;)";
+
+    return replaceWithMap( functionTemplate,
+                           { { "argumentList", argumentList },
+                             { "commandName", commandName },
+                             { "const", commandData.handle.empty() ? "" : " const" },
+                             { "nodiscard", nodiscard },
+                             { "returnType", returnType } } );
+  }
+}
+
+std::string
+  VulkanHppGenerator::generateCommandValue( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandData.params );
+  if ( returnParams.empty() )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.empty() && determineConstPointerParams( commandData.params ).empty() )
+    {
+      return generateCommandSet( definition, generateCommandStandard( name, commandData, initialSkipCount, definition ) );
+    }
+    else if ( vectorParams.size() <= 1 )
+    {
+      return generateCommandSet( definition,
+                                 generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ) } );
+    }
+  }
+  return "";
+}
+
+std::string
+  VulkanHppGenerator::generateCommandVoid0Return( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  if ( vectorParams.empty() && determineConstPointerParams( commandData.params ).empty() )
+  {
+    return generateCommandSet( definition, generateCommandStandard( name, commandData, initialSkipCount, definition ) );
+  }
+  else if ( allVectorSizesSupported( commandData.params, vectorParams ) )
+  {
+    // All the vectorParams have a counter by value, of type "uint32_t", "VkDeviceSize", or "VkSampleCountFlagBits" (!)
+    return generateCommandSet( definition,
+                               generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                               { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, {} ) } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandVoid1Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+  if ( commandData.params[returnParam].type.postfix == "**" )
+  {
+    // get a pointer to something
+    if ( commandData.params[returnParam].type.type == "void" )
+    {
+      if ( vectorParams.empty() )
+      {
+        return generateCommandSet( definition,
+                                   generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                   { generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, { returnParam } ) } );
+      }
+    }
+  }
+  else if ( isHandleType( commandData.params[returnParam].type.type ) )
+  {
+    if ( vectorParams.empty() )
+    {
+      return generateCommandSet( definition,
+                                 generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                 { generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, { returnParam } ) } );
+    }
+  }
+  else if ( isStructureChainAnchor( commandData.params[returnParam].type.type ) )
+  {
+    if ( vectorParams.empty() )
+    {
+      return generateCommandSet(
+        definition,
+        generateCommandStandard( name, commandData, initialSkipCount, definition ),
+        { generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, { returnParam } ),
+          generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, { returnParam }, CommandFlavourFlagBits::chained ) } );
+    }
+  }
+  else if ( commandData.params[returnParam].type.type == "void" )
+  {
+    switch ( vectorParams.size() )
+    {
+      case 0:
+        return generateCommandSet( definition,
+                                   generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                   { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) } );
+      case 1:
+        if ( returnParam == vectorParams.begin()->first )
+        {
+          if ( name == stripPluralS( name, m_tags ) )
+          {
+            return generateCommandSet(
+              definition,
+              generateCommandStandard( name, commandData, initialSkipCount, definition ),
+              { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::singular ) } );
+          }
+        }
+        break;
+    }
+  }
+  else
+  {
+    switch ( vectorParams.size() )
+    {
+      case 0:
+        return generateCommandSet( definition,
+                                   generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                   { generateCommandEnhanced( name, commandData, initialSkipCount, definition, {}, { returnParam } ) } );
+        break;
+      case 1:
+        if ( returnParam == vectorParams.begin()->first )
+        {
+          // you get a vector of stuff, with the size being one of the parameters
+          return generateCommandSet(
+            definition,
+            generateCommandStandard( name, commandData, initialSkipCount, definition ),
+            { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ),
+              generateCommandEnhanced(
+                name, commandData, initialSkipCount, definition, vectorParams, { returnParam }, CommandFlavourFlagBits::withAllocator ) } );
+        }
+        else
+        {
+          if ( !isHandleType( commandData.params[vectorParams.begin()->first].type.type ) &&
+               !isStructureChainAnchor( commandData.params[vectorParams.begin()->first].type.type ) &&
+               ( commandData.params[vectorParams.begin()->first].type.type != "void" ) )
+          {
+            if ( isLenByStructMember( commandData.params[vectorParams.begin()->first].len, commandData.params[vectorParams.begin()->second.lenParam] ) )
+            {
+              return generateCommandSet( definition,
+                                         generateCommandStandard( name, commandData, initialSkipCount, definition ),
+                                         { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, { returnParam } ) } );
+            }
+          }
+        }
+        break;
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateCommandVoid2Return(
+  std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const
+{
+  if ( commandData.params[returnParams[0]].type.type == "uint32_t" )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandData.params );
+    if ( vectorParams.size() == 1 )
+    {
+      if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+      {
+        if ( returnParams[1] == vectorParams.begin()->first )
+        {
+          if ( isStructureChainAnchor( commandData.params[returnParams[1]].type.type ) )
+          {
+            return generateCommandSet(
+              definition,
+              generateCommandStandard( name, commandData, initialSkipCount, definition ),
+              { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ),
+                generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::chained ),
+                generateCommandEnhanced( name,
+                                         commandData,
+                                         initialSkipCount,
+                                         definition,
+                                         vectorParams,
+                                         returnParams,
+                                         CommandFlavourFlagBits::chained | CommandFlavourFlagBits::withAllocator ) } );
+          }
+          else if ( !isHandleType( commandData.params[returnParams[1]].type.type ) )
+          {
+            return generateCommandSet(
+              definition,
+              generateCommandStandard( name, commandData, initialSkipCount, definition ),
+              { generateCommandEnhanced( name, commandData, initialSkipCount, definition, vectorParams, returnParams ),
+                generateCommandEnhanced(
+                  name, commandData, initialSkipCount, definition, vectorParams, returnParams, CommandFlavourFlagBits::withAllocator ) } );
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateConstexprString( std::string const & structName ) const
+{
+  // structs with a VkBaseInStructure and VkBaseOutStructure can't be a constexpr!
+  bool isConstExpression = ( structName != "VkBaseInStructure" ) && ( structName != "VkBaseOutStructure" );
+  return isConstExpression ? ( std::string( "VULKAN_HPP_CONSTEXPR" ) + ( ( containsUnion( structName ) || containsArray( structName ) ) ? "_14 " : " " ) ) : "";
+}
+
+std::string VulkanHppGenerator::generateDataDeclarations( CommandData const &                       commandData,
+                                                          std::vector<size_t> const &               returnParams,
+                                                          std::map<size_t, VectorParamData> const & vectorParams,
+                                                          std::set<size_t> const &                  templatedParams,
+                                                          CommandFlavourFlags                       flavourFlags,
+                                                          bool                                      raii,
+                                                          std::vector<std::string> const &          dataTypes,
+                                                          std::string const &                       dataType,
+                                                          std::string const &                       returnType,
+                                                          std::string const &                       returnVariable ) const
+{
+  assert( dataTypes.size() == returnParams.size() );
+
+  switch ( returnParams.size() )
+  {
+    case 0: return "";  // no returnParams -> no data declarations
+    case 1:
+      return generateDataDeclarations1Return(
+        commandData, returnParams, vectorParams, templatedParams, flavourFlags, dataTypes, dataType, returnType, returnVariable );
+    case 2:
+      assert( !( flavourFlags & CommandFlavourFlagBits::unique ) );
+      return generateDataDeclarations2Returns( commandData, returnParams, vectorParams, flavourFlags, raii, dataTypes, dataType, returnVariable );
+    case 3:
+      assert( ( vectorParams.size() == 2 ) && ( returnParams[0] == vectorParams.begin()->second.lenParam ) &&
+              ( returnParams[1] == vectorParams.begin()->first ) && ( returnParams[2] == std::next( vectorParams.begin() )->first ) &&
+              ( returnParams[0] == std::next( vectorParams.begin() )->second.lenParam ) && templatedParams.empty() &&
+              !( flavourFlags & ( CommandFlavourFlagBits::chained | CommandFlavourFlagBits::singular | CommandFlavourFlagBits::unique ) ) );
+      return generateDataDeclarations3Returns( commandData, returnParams, flavourFlags, raii, dataTypes );
+    default: assert( false ); return "";
+  }
+}
+
+std::string VulkanHppGenerator::generateDataDeclarations1Return( CommandData const &                       commandData,
+                                                                 std::vector<size_t> const &               returnParams,
+                                                                 std::map<size_t, VectorParamData> const & vectorParams,
+                                                                 std::set<size_t> const &                  templatedParams,
+                                                                 CommandFlavourFlags                       flavourFlags,
+                                                                 std::vector<std::string> const &          dataTypes,
+                                                                 std::string const &                       dataType,
+                                                                 std::string const &                       returnType,
+                                                                 std::string const &                       returnVariable ) const
+{
+  auto vectorParamIt = vectorParams.find( returnParams[0] );
+  if ( !( flavourFlags & CommandFlavourFlagBits::chained ) )
+  {
+    if ( ( vectorParamIt == vectorParams.end() ) || ( flavourFlags & CommandFlavourFlagBits::singular ) )
+    {
+      std::string const dataDeclarationsTemplate = R"(${returnType} ${returnVariable};)";
+
+      return replaceWithMap( dataDeclarationsTemplate, { { "returnType", dataType }, { "returnVariable", returnVariable } } );
+    }
+    else
+    {
+      std::string allocator       = stripPrefix( dataTypes[0], "VULKAN_HPP_NAMESPACE::" ) + "Allocator";
+      std::string vectorAllocator = ( ( flavourFlags & CommandFlavourFlagBits::withAllocator ) && !( flavourFlags & CommandFlavourFlagBits::unique ) )
+                                    ? ( ", " + startLowerCase( allocator ) )
+                                    : "";
+      std::string vectorSize      = getVectorSize( commandData.params, vectorParams, returnParams[0], dataTypes[0], templatedParams );
+
+      std::string const dataDeclarationsTemplate = R"(${dataType} ${returnVariable}( ${vectorSize}${vectorAllocator} );)";
+
+      return replaceWithMap(
+        dataDeclarationsTemplate,
+        { { "dataType", dataType }, { "returnVariable", returnVariable }, { "vectorAllocator", vectorAllocator }, { "vectorSize", vectorSize } } );
+    }
+  }
+  else
+  {
+    assert( ( vectorParamIt == vectorParams.end() ) || ( flavourFlags & CommandFlavourFlagBits::singular ) );
+
+    std::string dataVariable = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+
+    std::string const dataDeclarationsTemplate = R"(${returnType} ${returnVariable};
+    ${dataType} & ${dataVariable} = ${returnVariable}.template get<${dataType}>();)";
+
+    return replaceWithMap( dataDeclarationsTemplate,
+                           { { "dataType", dataTypes[0] },
+                             { "dataVariable", dataVariable },
+                             { "returnType", ( commandData.returnType == "void" ) ? returnType : "StructureChain<X, Y, Z...>" },
+                             { "returnVariable", returnVariable } } );
+  }
+}
+
+std::string VulkanHppGenerator::generateDataDeclarations2Returns( CommandData const &                       commandData,
+                                                                  std::vector<size_t> const &               returnParams,
+                                                                  std::map<size_t, VectorParamData> const & vectorParams,
+                                                                  CommandFlavourFlags                       flavourFlags,
+                                                                  bool                                      raii,
+                                                                  std::vector<std::string> const &          dataTypes,
+                                                                  std::string const &                       dataType,
+                                                                  std::string const &                       returnVariable ) const
+{
+  bool chained       = flavourFlags & CommandFlavourFlagBits::chained;
+  bool singular      = flavourFlags & CommandFlavourFlagBits::singular;
+  bool withAllocator = flavourFlags & CommandFlavourFlagBits::withAllocator;
+
+  switch ( vectorParams.size() )
+  {
+    case 0:
+      assert( !singular && !chained );
+      {
+        std::string firstDataVariable  = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+        std::string secondDataVariable = startLowerCase( stripPrefix( commandData.params[returnParams[1]].name, "p" ) );
+
+        std::string const dataDeclarationTemplate = R"(std::pair<${firstDataType},${secondDataType}> data;
+    ${firstDataType} & ${firstDataVariable} = data.first;
+    ${secondDataType} & ${secondDataVariable} = data.second;)";
+
+        return replaceWithMap( dataDeclarationTemplate,
+                               { { "firstDataType", dataTypes[0] },
+                                 { "firstDataVariable", firstDataVariable },
+                                 { "secondDataType", dataTypes[1] },
+                                 { "secondDataVariable", secondDataVariable } } );
+      }
+      break;
+    case 1:
+      assert( ( returnParams[0] == vectorParams.begin()->second.lenParam ) && ( returnParams[1] == vectorParams.begin()->first ) && !singular );
+      {
+        std::string counterVariable = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+        if ( !chained )
+        {
+          std::string vectorAllocator = withAllocator ? ( "( " + startLowerCase( stripPrefix( dataTypes[1], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator )" ) : "";
+
+          std::string const dataDeclarationTemplate = R"(${returnType} ${returnVariable}${vectorAllocator};
+    ${counterType} ${counterVariable};)";
+
+          return replaceWithMap( dataDeclarationTemplate,
+                                 { { "counterType", dataTypes[0] },
+                                   { "counterVariable", counterVariable },
+                                   { "returnType", dataType },
+                                   { "returnVariable", returnVariable },
+                                   { "vectorAllocator", vectorAllocator } } );
+        }
+        else
+        {
+          std::string structureChainAllocator   = raii ? "" : ", StructureChainAllocator";
+          std::string structureChainInitializer = withAllocator ? ( "( structureChainAllocator )" ) : "";
+          std::string vectorVariable            = startLowerCase( stripPrefix( commandData.params[returnParams[1]].name, "p" ) );
+
+          std::string const dataDeclarationTemplate =
+            R"(std::vector<StructureChain${structureChainAllocator}> structureChains${structureChainInitializer};
+    std::vector<${vectorElementType}> ${vectorVariable};
+    ${counterType} ${counterVariable};)";
+
+          return replaceWithMap( dataDeclarationTemplate,
+                                 {
+                                   { "counterType", dataTypes[0] },
+                                   { "counterVariable", counterVariable },
+                                   { "structureChainAllocator", structureChainAllocator },
+                                   { "structureChainInitializer", structureChainInitializer },
+                                   { "vectorElementType", dataTypes[1] },
+                                   { "vectorVariable", vectorVariable },
+                                 } );
+        }
+      }
+      break;
+    case 2:
+      assert( ( returnParams[0] == std::next( vectorParams.begin() )->first ) && ( vectorParams.find( returnParams[1] ) == vectorParams.end() ) && !chained );
+      {
+        std::string firstDataVariable  = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+        std::string secondDataVariable = startLowerCase( stripPrefix( commandData.params[returnParams[1]].name, "p" ) );
+        if ( singular )
+        {
+          firstDataVariable = stripPluralS( firstDataVariable, m_tags );
+
+          std::string const dataDeclarationTemplate = R"(std::pair<${firstDataType},${secondDataType}> data;
+    ${firstDataType} & ${firstDataVariable} = data.first;
+    ${secondDataType} & ${secondDataVariable} = data.second;)";
+
+          return replaceWithMap( dataDeclarationTemplate,
+                                 { { "firstDataType", dataTypes[0] },
+                                   { "firstDataVariable", firstDataVariable },
+                                   { "secondDataType", dataTypes[1] },
+                                   { "secondDataVariable", secondDataVariable } } );
+        }
+        else
+        {
+          std::string allocatorType       = raii ? "" : ( startUpperCase( stripPrefix( dataTypes[0], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator" );
+          std::string allocateInitializer = withAllocator ? ( ", " + startLowerCase( allocatorType ) ) : "";
+          if ( !raii )
+          {
+            allocatorType = ", " + allocatorType;
+          }
+          std::string vectorSize = startLowerCase( stripPrefix( commandData.params[vectorParams.begin()->first].name, "p" ) ) + ".size()";
+
+          std::string const dataDeclarationTemplate =
+            R"(std::pair<std::vector<${firstDataType}${allocatorType}>,${secondDataType}> data( std::piecewise_construct, std::forward_as_tuple( ${vectorSize}${allocateInitializer} ), std::forward_as_tuple( 0 ) );
+    std::vector<${firstDataType}${allocatorType}> & ${firstDataVariable} = data.first;
+    ${secondDataType} & ${secondDataVariable} = data.second;)";
+
+          return replaceWithMap( dataDeclarationTemplate,
+                                 { { "allocateInitializer", allocateInitializer },
+                                   { "allocatorType", allocatorType },
+                                   { "firstDataType", dataTypes[0] },
+                                   { "firstDataVariable", firstDataVariable },
+                                   { "secondDataType", dataTypes[1] },
+                                   { "secondDataVariable", secondDataVariable },
+                                   { "vectorSize", vectorSize } } );
+        }
+      }
+      break;
+    default: assert( false ); return "";
+  }
+}
+
+std::string VulkanHppGenerator::generateDataDeclarations3Returns( CommandData const &              commandData,
+                                                                  std::vector<size_t> const &      returnParams,
+                                                                  CommandFlavourFlags              flavourFlags,
+                                                                  bool                             raii,
+                                                                  std::vector<std::string> const & dataTypes ) const
+{
+  std::string counterVariable      = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+  std::string firstVectorVariable  = startLowerCase( stripPrefix( commandData.params[returnParams[1]].name, "p" ) );
+  std::string secondVectorVariable = startLowerCase( stripPrefix( commandData.params[returnParams[2]].name, "p" ) );
+  std::string firstVectorAllocatorType, secondVectorAllocatorType, pairConstructor;
+  if ( !raii )
+  {
+    firstVectorAllocatorType  = startUpperCase( stripPrefix( dataTypes[1], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator";
+    secondVectorAllocatorType = startUpperCase( stripPrefix( dataTypes[2], "VULKAN_HPP_NAMESPACE::" ) ) + "Allocator";
+    pairConstructor           = ( flavourFlags & CommandFlavourFlagBits::withAllocator )
+                                ? ( "( std::piecewise_construct, std::forward_as_tuple( " + startLowerCase( firstVectorAllocatorType ) + " ), std::forward_as_tuple( " +
+                          startLowerCase( secondVectorAllocatorType ) + " ) )" )
+                                : "";
+    firstVectorAllocatorType  = ", " + firstVectorAllocatorType;
+    secondVectorAllocatorType = ", " + secondVectorAllocatorType;
+  }
+
+  std::string const dataDeclarationsTemplate =
+    R"(std::pair<std::vector<${firstVectorElementType}${firstVectorAllocatorType}>, std::vector<${secondVectorElementType}${secondVectorAllocatorType}>> data${pairConstructor};
+    std::vector<${firstVectorElementType}${firstVectorAllocatorType}> & ${firstVectorVariable} = data.first;
+    std::vector<${secondVectorElementType}${secondVectorAllocatorType}> & ${secondVectorVariable} = data.second;
+    ${counterType} ${counterVariable};)";
+
+  return replaceWithMap( dataDeclarationsTemplate,
+                         { { "counterType", dataTypes[0] },
+                           { "counterVariable", counterVariable },
+                           { "firstVectorAllocatorType", firstVectorAllocatorType },
+                           { "firstVectorElementType", dataTypes[1] },
+                           { "firstVectorVariable", firstVectorVariable },
+                           { "pairConstructor", pairConstructor },
+                           { "secondVectorAllocatorType", secondVectorAllocatorType },
+                           { "secondVectorElementType", dataTypes[2] },
+                           { "secondVectorVariable", secondVectorVariable } } );
+}
+
+std::string VulkanHppGenerator::generateDataPreparation( CommandData const &                       commandData,
+                                                         size_t                                    initialSkipCount,
+                                                         std::vector<size_t> const &               returnParams,
+                                                         std::map<size_t, VectorParamData> const & vectorParams,
+                                                         std::set<size_t> const &                  templatedParams,
+                                                         CommandFlavourFlags                       flavourFlags,
+                                                         bool                                      enumerating ) const
+{
+  bool chained  = flavourFlags & CommandFlavourFlagBits::chained;
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+  bool unique   = flavourFlags & CommandFlavourFlagBits::unique;
+
+  auto vectorParamIt = ( 1 < returnParams.size() ) ? vectorParams.find( returnParams[1] ) : vectorParams.end();
+  if ( vectorParamIt != vectorParams.end() )
+  {
+    assert( !unique );
+
+    std::string vectorName = startLowerCase( stripPrefix( commandData.params[vectorParamIt->first].name, "p" ) );
+
+    if ( chained )
+    {
+      assert( !singular );
+      assert( templatedParams.empty() );
+      assert( returnParams.size() == 2 );
+      assert( vectorParams.find( returnParams[0] ) == vectorParams.end() );
+      assert( ( vectorParamIt != vectorParams.end() ) && ( vectorParamIt->second.lenParam == returnParams[0] ) );
+
+      std::string vectorElementType = stripPostfix( commandData.params[vectorParamIt->first].type.compose( "VULKAN_HPP_NAMESPACE" ), " *" );
+
+      if ( enumerating )
+      {
+        std::string const dataPreparationTemplate =
+          R"(VULKAN_HPP_ASSERT( ${counterName} <= ${vectorName}.size() );
+      if ( ${counterName} < ${vectorName}.size() )
+      {
+        structureChains.resize( ${counterName} );
+      }
+      for ( ${counterType} i = 0; i < ${counterName}; i++ )
+      {
+        structureChains[i].template get<${vectorElementType}>() = ${vectorName}[i];
+      })";
+
+        return replaceWithMap( dataPreparationTemplate,
+                               { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                                 { "counterType", commandData.params[vectorParamIt->second.lenParam].type.type },
+                                 { "vectorElementType", vectorElementType },
+                                 { "vectorName", vectorName } } );
+      }
+      else
+      {
+        std::string const dataPreparationTemplate =
+          R"(for ( ${counterType} i = 0; i < ${counterName}; i++ )
+    {
+      structureChains[i].template get<${vectorElementType}>() = ${vectorName}[i];
+    })";
+
+        return replaceWithMap( dataPreparationTemplate,
+                               { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                                 { "counterType", commandData.params[vectorParamIt->second.lenParam].type.type },
+                                 { "vectorElementType", vectorElementType },
+                                 { "vectorName", vectorName } } );
+      }
+    }
+    else if ( enumerating )
+    {
+      assert( !singular );
+      assert( ( vectorParams.size() != 2 ) ||
+              ( ( vectorParams.begin()->first == returnParams[1] ) && ( vectorParams.begin()->second.lenParam == returnParams[0] ) &&
+                ( std::next( vectorParams.begin() )->first == returnParams[2] ) &&
+                ( std::next( vectorParams.begin() )->second.lenParam == returnParams[0] ) ) );
+
+      std::string resizes;
+      for ( auto const & vp : vectorParams )
+      {
+        assert( ( std::find( returnParams.begin(), returnParams.end(), vp.first ) != returnParams.end() ) &&
+                ( std::find( returnParams.begin(), returnParams.end(), vp.second.lenParam ) != returnParams.end() ) );
+        resizes += startLowerCase( stripPrefix( commandData.params[vp.first].name, "p" ) ) + ".resize( " +
+                   startLowerCase( stripPrefix( commandData.params[vp.second.lenParam].name, "p" ) ) + " );\n";
+      }
+      resizes.pop_back();
+
+      std::string const dataPreparationTemplate =
+        R"(VULKAN_HPP_ASSERT( ${counterName} <= ${vectorName}.size() );
+    if ( ${counterName} < ${vectorName}.size() )
+    {
+      ${resizes}
+    })";
+
+      return replaceWithMap( dataPreparationTemplate,
+                             { { "counterName", startLowerCase( stripPrefix( commandData.params[vectorParamIt->second.lenParam].name, "p" ) ) },
+                               { "resizes", resizes },
+                               { "vectorName", vectorName } } );
+    }
+  }
+  else if ( unique && !singular && ( returnParams.size() == 1 ) && ( vectorParams.find( returnParams[0] ) != vectorParams.end() ) )
+  {
+    assert( !enumerating );
+    std::string              className = initialSkipCount ? stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" ) : "";
+    std::string              deleterDefinition;
+    std::vector<std::string> lenParts = tokenize( commandData.params[returnParams[0]].len, "->" );
+    switch ( lenParts.size() )
+    {
+      case 1: deleterDefinition = "ObjectDestroy<" + className + ", Dispatch> deleter( *this, allocator, d )"; break;
+      case 2:
+        {
+          auto vpiIt = vectorParams.find( returnParams[0] );
+          assert( vpiIt != vectorParams.end() );
+          std::string poolType, poolName;
+          std::tie( poolType, poolName ) = getPoolTypeAndName( commandData.params[vpiIt->second.lenParam].type.type );
+          assert( !poolType.empty() );
+          poolType          = stripPrefix( poolType, "Vk" );
+          poolName          = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + poolName;
+          deleterDefinition = "PoolFree<" + className + ", " + poolType + ", Dispatch> deleter( *this, " + poolName + ", d )";
+        }
+        break;
+    }
+
+    std::string handleType       = stripPrefix( commandData.params[returnParams[0]].type.type, "Vk" );
+    std::string uniqueVectorName = "unique" + stripPrefix( commandData.params[returnParams[0]].name, "p" );
+    std::string vectorAllocator  = ( flavourFlags & CommandFlavourFlagBits::withAllocator ) ? ( "( " + startLowerCase( handleType ) + "Allocator )" ) : "";
+    std::string vectorName       = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+    std::string elementName      = stripPluralS( vectorName, m_tags );
+    std::string vectorSize = getVectorSize( commandData.params, vectorParams, returnParams[0], commandData.params[returnParams[0]].type.type, templatedParams );
+
+    std::string const dataPreparationTemplate =
+      R"(std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::${handleType}, Dispatch>, ${handleType}Allocator> ${uniqueVectorName}${vectorAllocator};
+    ${uniqueVectorName}.reserve( ${vectorSize} );
+    ${deleterDefinition};
+    for ( auto const & ${elementName} : ${vectorName} )
+    {
+      ${uniqueVectorName}.push_back( UniqueHandle<${handleType}, Dispatch>( ${elementName}, deleter ) );
+    })";
+
+    return replaceWithMap( dataPreparationTemplate,
+                           { { "elementName", elementName },
+                             { "deleterDefinition", deleterDefinition },
+                             { "handleType", handleType },
+                             { "uniqueVectorName", uniqueVectorName },
+                             { "vectorAllocator", vectorAllocator },
+                             { "vectorName", vectorName },
+                             { "vectorSize", vectorSize } } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateDataSizeChecks( CommandData const &                       commandData,
+                                                        std::vector<size_t> const &               returnParams,
+                                                        std::vector<std::string> const &          returnParamTypes,
+                                                        std::map<size_t, VectorParamData> const & vectorParams,
+                                                        std::set<size_t> const &                  templatedParams,
+                                                        bool                                      singular ) const
+{
+  assert( returnParams.size() == returnParamTypes.size() );
+  std::string dataSizeChecks;
+  if ( !singular )
+  {
+    const std::string dataSizeCheckTemplate = R"(    VULKAN_HPP_ASSERT( ${dataSize} % sizeof( ${dataType} ) == 0 );)";
+    for ( size_t i = 0; i < returnParams.size(); i++ )
+    {
+      auto vectorParamIt = vectorParams.find( returnParams[i] );
+      if ( ( vectorParamIt != vectorParams.end() ) && ( templatedParams.find( returnParams[i] ) != templatedParams.end() ) &&
+           ( std::find( returnParams.begin(), returnParams.end(), vectorParamIt->second.lenParam ) == returnParams.end() ) )
+      {
+        dataSizeChecks += replaceWithMap( dataSizeCheckTemplate,
+                                          { { "dataSize", commandData.params[vectorParamIt->second.lenParam].name }, { "dataType", returnParamTypes[i] } } );
+      }
+    }
+  }
+
+  return dataSizeChecks;
+}
+
+std::string VulkanHppGenerator::generateDispatchLoaderDynamic() const
+{
+  const std::string dispatchLoaderDynamicTemplate = R"(
+  using PFN_dummy = void ( * )();
+
+  class DispatchLoaderDynamic : public DispatchLoaderBase
+  {
+  public:
+${commandMembers}
+
+  public:
+    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
+    DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
+    {
+      init(getInstanceProcAddr);
+    }
+
+    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(getInstanceProcAddr);
+
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+
+${initialCommandAssignments}
+    }
+
+    // This interface does not require a linked vulkan library.
+    DispatchLoaderDynamic( VkInstance                instance,
+                           PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+                           VkDevice                  device            = {},
+                           PFN_vkGetDeviceProcAddr   getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
+    }
+
+    // This interface does not require a linked vulkan library.
+    void init( VkInstance                instance,
+               PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+               VkDevice                  device              = {},
+               PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      init( VULKAN_HPP_NAMESPACE::Instance(instance) );
+      if (device) {
+        init( VULKAN_HPP_NAMESPACE::Device(device) );
+      }
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkInstance instance = static_cast<VkInstance>(instanceCpp);
+
+${instanceCommandAssignments}
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkDevice device = static_cast<VkDevice>(deviceCpp);
+
+${deviceCommandAssignments}
+    }
+
+    template <typename DynamicLoader>
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl) VULKAN_HPP_NOEXCEPT
+    {
+      PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+      PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+      init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
+    }
+
+    template <typename DynamicLoader
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      = VULKAN_HPP_NAMESPACE::DynamicLoader
+#endif
+    >
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device) VULKAN_HPP_NOEXCEPT
+    {
+      static DynamicLoader dl;
+      init(instance, device, dl);
+    }
+  };)";
+
+  std::string           commandMembers, deviceCommandAssignments, initialCommandAssignments, instanceCommandAssignments;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension!
+  for ( auto const & feature : m_features )
+  {
+    appendDispatchLoaderDynamicCommands( feature.second.requireData,
+                                         listedCommands,
+                                         feature.first,
+                                         commandMembers,
+                                         initialCommandAssignments,
+                                         instanceCommandAssignments,
+                                         deviceCommandAssignments );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    appendDispatchLoaderDynamicCommands( extIt.second->second.requireData,
+                                         listedCommands,
+                                         extIt.second->first,
+                                         commandMembers,
+                                         initialCommandAssignments,
+                                         instanceCommandAssignments,
+                                         deviceCommandAssignments );
+  }
+
+  return replaceWithMap( dispatchLoaderDynamicTemplate,
+                         { { "commandMembers", commandMembers },
+                           { "deviceCommandAssignments", deviceCommandAssignments },
+                           { "initialCommandAssignments", initialCommandAssignments },
+                           { "instanceCommandAssignments", instanceCommandAssignments } } );
+}
+
+std::string VulkanHppGenerator::generateDispatchLoaderStatic() const
+{
+  const std::string dispatchLoaderStaticTemplate = R"(
+#if !defined( VK_NO_PROTOTYPES )
+  class DispatchLoaderStatic : public DispatchLoaderBase
+  {
+  public:
+${commands}
+  };
+#endif
+)";
+
+  std::string           commands;
+  std::set<std::string> listedCommands;
+  for ( auto const & feature : m_features )
+  {
+    commands += generateDispatchLoaderStaticCommands( feature.second.requireData, listedCommands, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    commands += generateDispatchLoaderStaticCommands( extIt.second->second.requireData, listedCommands, extIt.second->first );
+  }
+
+  return replaceWithMap( dispatchLoaderStaticTemplate, { { "commands", commands } } );
+}
+
+std::string VulkanHppGenerator::generateDestroyCommand( std::string const & name, CommandData const & commandData ) const
+{
+  // special handling for destroy functions, filter out alias functions
+  std::string commandName = generateCommandName( name, commandData.params, 1, m_tags );
+  if ( commandData.alias.empty() && ( ( ( name.substr( 2, 7 ) == "Destroy" ) && ( commandName != "destroy" ) ) || ( name.substr( 2, 4 ) == "Free" ) ||
+                                      ( name == "vkReleasePerformanceConfigurationINTEL" ) ) )
+  {
+    assert( 1 < commandData.params.size() );
+    // make sure, the object to destroy/free/release is not optional in the shortened version!
+    CommandData localCommandData        = commandData;
+    localCommandData.params[1].optional = false;
+
+    std::string destroyCommandString = generateCommand( name, localCommandData, 1, false );
+    std::string shortenedName;
+    if ( name.substr( 2, 7 ) == "Destroy" )
+    {
+      shortenedName = "destroy";
+    }
+    else if ( name.substr( 2, 4 ) == "Free" )
+    {
+      // enclose "free" in parenthesis to prevent interference with MSVC debug free
+      shortenedName = "( free )";
+    }
+    else
+    {
+      assert( name == "vkReleasePerformanceConfigurationINTEL" );
+      shortenedName = "release";
+    }
+    size_t pos = destroyCommandString.find( commandName );
+    while ( pos != std::string::npos )
+    {
+      destroyCommandString.replace( pos, commandName.length(), shortenedName );
+      pos = destroyCommandString.find( commandName, pos );
+    }
+    // we need to remove the default argument for the first argument, to prevent ambiguities!
+    assert( 1 < localCommandData.params.size() );
+    pos = destroyCommandString.find( localCommandData.params[1].name );  // skip the standard version of the function
+    assert( pos != std::string::npos );
+    pos = destroyCommandString.find( localCommandData.params[1].name,
+                                     pos + 1 );  // get the argument to destroy in the advanced version
+    assert( pos != std::string::npos );
+    pos = destroyCommandString.find( " VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT", pos );
+    if ( pos != std::string::npos )
+    {
+      destroyCommandString.erase( pos, strlen( " VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT" ) );
+    }
+    return "\n" + destroyCommandString;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateDispatchLoaderDynamicCommandAssignment( std::string const & commandName,
+                                                                                CommandData const & commandData,
+                                                                                std::string const & firstArg ) const
+{
+  if ( commandName == "vkGetInstanceProcAddr" )
+  {
+    // Don't overwite vkGetInstanceProcAddr with NULL.
+    return "";
+  }
+  std::string str = "      " + commandName + " = PFN_" + commandName + "( vkGet" + ( ( firstArg == "device" ) ? "Device" : "Instance" ) + "ProcAddr( " +
+                    firstArg + ", \"" + commandName + "\" ) );\n";
+  // if this is an alias'ed function, use it as a fallback for the original one
+  if ( !commandData.alias.empty() )
+  {
+    str += "      if ( !" + commandData.alias + " ) " + commandData.alias + " = " + commandName + ";\n";
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateDispatchLoaderStaticCommands( std::vector<RequireData> const & requireData,
+                                                                      std::set<std::string> &          listedCommands,
+                                                                      std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      // some commands are listed for multiple extensions !
+      if ( listedCommands.insert( command ).second )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+
+        str += "\n";
+        std::string parameterList, parameters;
+        assert( !commandIt->second.params.empty() );
+        for ( auto param : commandIt->second.params )
+        {
+          parameterList += param.type.compose( "" ) + " " + param.name + generateCArraySizes( param.arraySizes ) + ", ";
+          parameters += param.name + ", ";
+        }
+        assert( parameterList.ends_with( ", " ) && parameters.ends_with( ", " ) );
+        parameterList.resize( parameterList.size() - 2 );
+        parameters.resize( parameters.size() - 2 );
+
+        const std::string commandTemplate = R"(
+    ${returnType} ${commandName}( ${parameterList} ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::${commandName}( ${parameters} );
+    }
+)";
+
+        str += replaceWithMap( commandTemplate,
+                               { { "commandName", commandIt->first },
+                                 { "parameterList", parameterList },
+                                 { "parameters", parameters },
+                                 { "returnType", commandIt->second.returnType } } );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateEnum( std::pair<std::string, EnumData> const & enumData, std::string const & surroundingProtect ) const
+{
+  std::string baseType, bitmask;
+  if ( enumData.second.isBitmask )
+  {
+    auto bitmaskIt =
+      std::find_if( m_bitmasks.begin(), m_bitmasks.end(), [&enumData]( auto const & bitmask ) { return bitmask.second.requirements == enumData.first; } );
+    assert( bitmaskIt != m_bitmasks.end() );
+    baseType = " : " + bitmaskIt->first;
+    bitmask  = generateBitmask( bitmaskIt, surroundingProtect );
+  }
+
+  std::string                        enumValues, previousEnter, previousLeave;
+  std::map<std::string, std::string> valueToNameMap;
+  for ( auto const & value : enumData.second.values )
+  {
+    // determine the values protect, if any
+    std::string valueProtect = getProtect( value );
+
+    // if the value's protect differs from the surrounding protect, generate protection code
+    std::string enter, leave;
+    if ( !valueProtect.empty() && ( valueProtect != surroundingProtect ) )
+    {
+      tie( enter, leave ) = generateProtection( valueProtect );
+    }
+    if ( previousEnter != enter )
+    {
+      enumValues += previousLeave + enter;
+    }
+    std::string valueName = generateEnumValueName( enumData.first, value.name, enumData.second.isBitmask, m_tags );
+    enumValues += "    " + valueName + " = " + value.name + ",\n";
+    assert( valueToNameMap.find( valueName ) == valueToNameMap.end() );
+    valueToNameMap[valueName] = value.name;
+
+    previousEnter = enter;
+    previousLeave = leave;
+  }
+  enumValues += previousLeave;
+
+  for ( auto const & alias : enumData.second.aliases )
+  {
+    std::string aliasName =
+      generateEnumValueName( enumData.second.alias.empty() ? enumData.first : enumData.second.alias, alias.first, enumData.second.isBitmask, m_tags );
+    // make sure to only list alias values that differ from all previous values
+    auto valueToNameIt = valueToNameMap.find( aliasName );
+    if ( valueToNameIt == valueToNameMap.end() )
+    {
+#if !defined( NDEBUG )
+      auto enumIt = std::find_if(
+        enumData.second.values.begin(), enumData.second.values.end(), [&alias]( EnumValueData const & evd ) { return alias.second.name == evd.name; } );
+      if ( enumIt == enumData.second.values.end() )
+      {
+        auto aliasIt = enumData.second.aliases.find( alias.second.name );
+        assert( aliasIt != enumData.second.aliases.end() );
+        auto nextAliasIt = enumData.second.aliases.find( aliasIt->second.name );
+        while ( nextAliasIt != enumData.second.aliases.end() )
+        {
+          aliasIt     = nextAliasIt;
+          nextAliasIt = enumData.second.aliases.find( aliasIt->second.name );
+        }
+        enumIt = std::find_if(
+          enumData.second.values.begin(), enumData.second.values.end(), [&aliasIt]( EnumValueData const & evd ) { return aliasIt->second.name == evd.name; } );
+      }
+      assert( enumIt != enumData.second.values.end() );
+      assert( enumIt->extension.empty() || generateProtection( getProtectFromTitle( enumIt->extension ) ).first.empty() );
+#endif
+      enumValues += "    " + aliasName + " = " + alias.first + ",\n";
+
+      // map the aliasName to the name of the base
+      std::string baseName = findBaseName( alias.second.name, enumData.second.aliases );
+      assert( std::find_if( enumData.second.values.begin(),
+                            enumData.second.values.end(),
+                            [&baseName]( EnumValueData const & evd ) { return evd.name == baseName; } ) != enumData.second.values.end() );
+      valueToNameMap[aliasName] = baseName;
+    }
+#if !defined( NDEBUG )
+    else
+    {
+      // verify, that the identical value represents the identical name
+      std::string baseName = findBaseName( alias.second.name, enumData.second.aliases );
+      assert( std::find_if( enumData.second.values.begin(),
+                            enumData.second.values.end(),
+                            [&baseName]( EnumValueData const & evd ) { return evd.name == baseName; } ) != enumData.second.values.end() );
+      assert( baseName == valueToNameIt->second );
+    }
+#endif
+  }
+  if ( !enumValues.empty() )
+  {
+    size_t pos = enumValues.rfind( ',' );
+    assert( pos != std::string::npos );
+    enumValues.erase( pos, 1 );
+    enumValues = "\n" + enumValues + "  ";
+  }
+
+  std::string enumUsing;
+  if ( !enumData.second.alias.empty() )
+  {
+    enumUsing += "  using " + stripPrefix( enumData.second.alias, "Vk" ) + " = " + stripPrefix( enumData.first, "Vk" ) + ";\n";
+  }
+
+  const std::string enumTemplate = R"(  enum class ${enumName}${baseType}
+  {${enumValues}};
+${enumUsing}${bitmask})";
+
+  return replaceWithMap( enumTemplate,
+                         { { "baseType", baseType },
+                           { "bitmask", bitmask },
+                           { "enumName", stripPrefix( enumData.first, "Vk" ) },
+                           { "enumUsing", enumUsing },
+                           { "enumValues", enumValues } } );
+}
+
+std::string VulkanHppGenerator::generateEnums() const
+{
+  const std::string enumsTemplate = R"(
+  //=============
+  //=== ENUMs ===
+  //=============
+
+${enums}
+)";
+
+  std::string           enums;
+  std::set<std::string> listedEnums;
+  for ( auto const & feature : m_features )
+  {
+    enums += generateEnums( feature.second.requireData, listedEnums, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    enums += generateEnums( extIt.second->second.requireData, listedEnums, extIt.second->first );
+  }
+
+  return replaceWithMap( enumsTemplate, { { "enums", enums } } );
+}
+
+std::string
+  VulkanHppGenerator::generateEnums( std::vector<RequireData> const & requireData, std::set<std::string> & listedEnums, std::string const & title ) const
+{
+  std::string surroundingProtect = getProtectFromTitle( title );
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto enumIt = m_enums.find( type );
+      if ( ( enumIt != m_enums.end() ) && ( listedEnums.find( type ) == listedEnums.end() ) )
+      {
+        listedEnums.insert( type );
+        str += "\n";
+        str += generateEnum( *enumIt, surroundingProtect );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateEnumsToString() const
+{
+  // start with toHexString, which is used in all the to_string functions here!
+  const std::string enumsToStringTemplate = R"(
+  //=======================
+  //=== ENUMs to_string ===
+  //=======================
+
+  VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
+  {
+#if __cpp_lib_format
+    return std::format( "{:x}", value );
+#else
+    std::stringstream stream;
+    stream << std::hex << value;
+    return stream.str();
+#endif
+  }
+
+${enumsToString}
+)";
+
+  std::string           enumsToString;
+  std::set<std::string> listedEnums;
+  for ( auto const & feature : m_features )
+  {
+    enumsToString += generateEnumsToString( feature.second.requireData, listedEnums, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    enumsToString += generateEnumsToString( extIt.second->second.requireData, listedEnums, extIt.second->first );
+  }
+
+  return replaceWithMap( enumsToStringTemplate, { { "enumsToString", enumsToString } } );
+}
+
+std::string VulkanHppGenerator::generateEnumsToString( std::vector<RequireData> const & requireData,
+                                                       std::set<std::string> &          listedEnums,
+                                                       std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto enumIt = m_enums.find( type );
+      if ( ( enumIt != m_enums.end() ) && ( listedEnums.find( type ) == listedEnums.end() ) )
+      {
+        listedEnums.insert( type );
+
+        str += "\n";
+        str += generateEnumToString( *enumIt );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateEnumInitializer( TypeInfo const &                   type,
+                                                         std::vector<std::string> const &   arraySizes,
+                                                         std::vector<EnumValueData> const & values,
+                                                         bool                               bitmask ) const
+{
+  // enum arguments might need special initialization
+  assert( type.prefix.empty() && !values.empty() );
+  std::string valueName = generateEnumValueName( type.type, values.front().name, bitmask, m_tags );
+  std::string value     = generateNamespacedType( type.type ) + "::" + valueName;
+  std::string str;
+  if ( arraySizes.empty() )
+  {
+    str += value;
+  }
+  else
+  {
+    assert( arraySizes.size() == 1 );
+    auto constIt = m_constants.find( arraySizes[0] );
+    int  count   = std::stoi( ( constIt == m_constants.end() ) ? arraySizes[0] : constIt->second );
+    assert( 1 < count );
+    str += "{ { " + value;
+    for ( int i = 1; i < count; i++ )
+    {
+      str += ", " + value;
+    }
+    str += " } }";
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateEnumToString( std::pair<std::string, EnumData> const & enumData ) const
+{
+  std::string enumName = stripPrefix( enumData.first, "Vk" );
+  std::string functionBody;
+  if ( enumData.second.values.empty() )
+  {
+    functionBody = R"x(    return "(void)";)x";
+  }
+  else
+  {
+    std::string cases, previousEnter, previousLeave;
+    for ( auto const & value : enumData.second.values )
+    {
+      auto [enter, leave] = generateProtection( getProtect( value ) );
+      if ( previousEnter != enter )
+      {
+        cases += previousLeave + enter;
+      }
+
+      const std::string caseTemplate = R"(      case ${enumName}::e${valueName} : return "${valueName}";
+)";
+      cases += replaceWithMap(
+        caseTemplate,
+        { { "enumName", enumName }, { "valueName", generateEnumValueName( enumData.first, value.name, enumData.second.isBitmask, m_tags ).substr( 1 ) } } );
+
+      previousEnter = enter;
+      previousLeave = leave;
+    }
+    cases += previousLeave;
+
+    const std::string functionBodyTemplate =
+      R"x(    switch ( value )
+    {
+${cases}      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+)x";
+
+    functionBody = replaceWithMap( functionBodyTemplate, { { "cases", cases } } );
+  }
+
+  const std::string enumToStringTemplate = R"(
+  VULKAN_HPP_INLINE std::string to_string( ${enumName}${argument} )
+  {
+${functionBody}
+  }
+)";
+
+  return replaceWithMap( enumToStringTemplate,
+                         { { "argument", enumData.second.values.empty() ? "" : " value" }, { "enumName", enumName }, { "functionBody", functionBody } } );
+}
+
+std::string VulkanHppGenerator::generateFailureCheck( std::vector<std::string> const & successCodes ) const
+{
+  assert( !successCodes.empty() );
+  std::string failureCheck = "result != " + generateSuccessCode( successCodes[0], m_tags );
+  if ( 1 < successCodes.size() )
+  {
+    failureCheck = "( " + failureCheck + " )";
+    for ( size_t i = 1; i < successCodes.size(); ++i )
+    {
+      failureCheck += "&& ( result != " + generateSuccessCode( successCodes[i], m_tags ) + " )";
+    }
+  }
+  return failureCheck;
+}
+
+std::string VulkanHppGenerator::generateFormatTraits() const
+{
+  if ( m_formats.empty() )
+  {
+    return "";
+  }
+
+  const std::string formatTraitsTemplate = R"(
+  //=====================
+  //=== Format Traits ===
+  //=====================
+
+  // The three-dimensional extent of a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array<uint8_t, 3> blockExtent( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${blockExtentCases}
+      default: return {{1, 1, 1 }};
+    }
+  }
+
+  // The texel block size in bytes.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${blockSizeCases}
+      default : VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+
+  // The class of the format (can't be just named "class"!)
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${classCases}
+      default : VULKAN_HPP_ASSERT( false ); return "";
+    }
+  }
+
+  // The number of bits in this component, if not compressed, otherwise 0.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+${componentBitsCases}
+      default: return 0;
+    }
+  }
+
+  // The number of components of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${componentCountCases}
+      default: return 0;
+    }
+  }
+
+  // The name of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+${componentNameCases}
+      default: return "";
+    }
+  }
+
+  // The numeric format of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+${componentNumericFormatCases}
+      default: return "";
+    }
+  }
+
+  // The plane this component lies in.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+${componentPlaneIndexCases}
+      default: return 0;
+    }
+  }
+
+  // True, if the components of this format are compressed, otherwise false.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${componentsAreCompressedCases}
+        return true;
+      default: return false;
+    }
+  }
+
+  // A textual description of the compression scheme, or an empty string if it is not compressed
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${compressionSchemeCases}
+      default: return "";
+    }
+  }
+
+  // True, if this format is a compressed one.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 );
+  }
+
+  // The number of bits into which the format is packed. A single image element in this format
+  // can be stored in the same space as a scalar type of this bit width.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${packedCases}
+      default: return 0;
+    }
+  }
+
+  // The single-plane format that this plane is compatible with.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+${planeCompatibleCases}
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return format;
+    }
+  }
+
+  // The number of image planes of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${planeCountCases}
+      default: return 1;
+    }
+  }
+
+  // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+${planeHeightDivisorCases}
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+${planeWidthDivisorCases}
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The number of texels in a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+${texelsPerBlockCases}
+      default: VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+)";
+
+  auto formatIt = m_enums.find( "VkFormat" );
+  assert( formatIt != m_enums.end() );
+  assert( formatIt->second.values.front().name == "VK_FORMAT_UNDEFINED" );
+
+  std::string blockSizeCases, blockExtentCases, classCases, componentBitsCases, componentCountCases, componentNameCases, componentNumericFormatCases,
+    componentPlaneIndexCases, componentsAreCompressedCases, compressionSchemeCases, packedCases, planeCompatibleCases, planeCountCases, planeHeightDivisorCases,
+    planeWidthDivisorCases, texelsPerBlockCases;
+  for ( auto formatValuesIt = std::next( formatIt->second.values.begin() ); formatValuesIt != formatIt->second.values.end(); ++formatValuesIt )
+  {
+    auto traitIt = m_formats.find( formatValuesIt->name );
+    assert( traitIt != m_formats.end() );
+    std::string caseString = "      case VULKAN_HPP_NAMESPACE::Format::" + generateEnumValueName( "VkFormat", traitIt->first, false, m_tags ) + ":";
+
+    blockSizeCases += caseString + " return " + traitIt->second.blockSize + ";\n";
+
+    if ( !traitIt->second.blockExtent.empty() )
+    {
+      std::vector<std::string> blockExtent = tokenize( traitIt->second.blockExtent, "," );
+      assert( blockExtent.size() == 3 );
+      blockExtentCases += caseString + " return {{ " + blockExtent[0] + ", " + blockExtent[1] + ", " + blockExtent[2] + " }};\n";
+    }
+
+    classCases += caseString + " return \"" + traitIt->second.classAttribute + "\";\n";
+
+    if ( traitIt->second.components.front().bits != "compressed" )
+    {
+      const std::string componentBitsCaseTemplate = R"(${caseString}
+        switch( component )
+        {
+${componentCases}
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+)";
+
+      std::string componentCases;
+      for ( size_t i = 0; i < traitIt->second.components.size(); ++i )
+      {
+        componentCases += "          case " + std::to_string( i ) + ": return " + traitIt->second.components[i].bits + ";\n";
+      }
+      componentCases.pop_back();
+      componentBitsCases += replaceWithMap( componentBitsCaseTemplate, { { "caseString", caseString }, { "componentCases", componentCases } } );
+    }
+
+    componentCountCases += caseString + " return " + std::to_string( traitIt->second.components.size() ) + ";\n";
+
+    {
+      const std::string componentNameCaseTemplate = R"(${caseString}
+        switch( component )
+        {
+${componentCases}
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+)";
+
+      std::string componentCases;
+      for ( size_t i = 0; i < traitIt->second.components.size(); ++i )
+      {
+        componentCases += "          case " + std::to_string( i ) + ": return \"" + traitIt->second.components[i].name + "\";\n";
+      }
+      componentCases.pop_back();
+      componentNameCases += replaceWithMap( componentNameCaseTemplate, { { "caseString", caseString }, { "componentCases", componentCases } } );
+    }
+
+    {
+      const std::string componentNumericFormatCaseTemplate = R"(${caseString}
+        switch( component )
+        {
+${componentCases}
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+)";
+
+      std::string componentCases;
+      for ( size_t i = 0; i < traitIt->second.components.size(); ++i )
+      {
+        componentCases += "          case " + std::to_string( i ) + ": return \"" + traitIt->second.components[i].numericFormat + "\";\n";
+      }
+      componentCases.pop_back();
+      componentNumericFormatCases +=
+        replaceWithMap( componentNumericFormatCaseTemplate, { { "caseString", caseString }, { "componentCases", componentCases } } );
+    }
+
+    if ( !traitIt->second.components.front().planeIndex.empty() )
+    {
+      const std::string componentPlaneIndexCaseTemplate = R"(${caseString}
+        switch( component )
+        {
+${componentCases}
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+)";
+
+      std::string componentCases;
+      for ( size_t i = 0; i < traitIt->second.components.size(); ++i )
+      {
+        componentCases += "          case " + std::to_string( i ) + ": return " + traitIt->second.components[i].planeIndex + ";\n";
+      }
+      componentCases.pop_back();
+      componentPlaneIndexCases += replaceWithMap( componentPlaneIndexCaseTemplate, { { "caseString", caseString }, { "componentCases", componentCases } } );
+    }
+
+    if ( traitIt->second.components.front().bits == "compressed" )
+    {
+      componentsAreCompressedCases += caseString + "\n";
+    }
+
+    if ( !traitIt->second.compressed.empty() )
+    {
+      compressionSchemeCases += caseString + " return \"" + traitIt->second.compressed + "\";\n";
+    }
+
+    if ( !traitIt->second.packed.empty() )
+    {
+      packedCases += caseString + " return " + traitIt->second.packed + ";\n";
+    }
+
+    if ( !traitIt->second.planes.empty() )
+    {
+      const std::string planeCompatibleCaseTemplate = R"(${caseString}
+        switch( plane )
+        {
+${compatibleCases}
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+)";
+
+      const std::string planeHeightDivisorCaseTemplate = R"(${caseString}
+        switch( plane )
+        {
+${heightDivisorCases}
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+)";
+
+      const std::string planeWidthDivisorCaseTemplate = R"(${caseString}
+        switch( plane )
+        {
+${widthDivisorCases}
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+)";
+
+      std::string compatibleCases, heightDivisorCases, widthDivisorCases;
+      for ( size_t i = 0; i < traitIt->second.planes.size(); ++i )
+      {
+        compatibleCases += "          case " + std::to_string( i ) + ": return VULKAN_HPP_NAMESPACE::Format::" +
+                           generateEnumValueName( "VkFormat", traitIt->second.planes[i].compatible, false, m_tags ) + ";\n";
+        heightDivisorCases += "          case " + std::to_string( i ) + ": return " + traitIt->second.planes[i].heightDivisor + ";\n";
+        widthDivisorCases += "          case " + std::to_string( i ) + ": return " + traitIt->second.planes[i].widthDivisor + ";\n";
+      }
+      compatibleCases.pop_back();
+      heightDivisorCases.pop_back();
+      widthDivisorCases.pop_back();
+
+      planeCompatibleCases += replaceWithMap( planeCompatibleCaseTemplate, { { "caseString", caseString }, { "compatibleCases", compatibleCases } } );
+
+      planeCountCases += caseString + " return " + std::to_string( traitIt->second.planes.size() ) + ";\n";
+
+      planeHeightDivisorCases +=
+        replaceWithMap( planeHeightDivisorCaseTemplate, { { "caseString", caseString }, { "heightDivisorCases", heightDivisorCases } } );
+
+      planeWidthDivisorCases += replaceWithMap( planeWidthDivisorCaseTemplate, { { "caseString", caseString }, { "widthDivisorCases", widthDivisorCases } } );
+    }
+
+    texelsPerBlockCases += caseString + " return " + traitIt->second.texelsPerBlock + ";\n";
+  }
+
+  return replaceWithMap( formatTraitsTemplate,
+                         { { "blockExtentCases", blockExtentCases },
+                           { "blockSizeCases", blockSizeCases },
+                           { "classCases", classCases },
+                           { "componentBitsCases", componentBitsCases },
+                           { "componentCountCases", componentCountCases },
+                           { "componentNameCases", componentNameCases },
+                           { "componentNumericFormatCases", componentNumericFormatCases },
+                           { "componentPlaneIndexCases", componentPlaneIndexCases },
+                           { "componentsAreCompressedCases", componentsAreCompressedCases },
+                           { "compressionSchemeCases", compressionSchemeCases },
+                           { "packedCases", packedCases },
+                           { "planeCompatibleCases", planeCompatibleCases },
+                           { "planeCountCases", planeCountCases },
+                           { "planeHeightDivisorCases", planeHeightDivisorCases },
+                           { "planeWidthDivisorCases", planeWidthDivisorCases },
+                           { "texelsPerBlockCases", texelsPerBlockCases } } );
+}
+
+std::string VulkanHppGenerator::generateFunctionPointerCheck( std::string const & function, std::string const & referencedIn ) const
+{
+  std::string functionPointerCheck;
+  if ( m_extensions.find( referencedIn ) != m_extensions.end() )
+  {
+    std::string message  = "Function <" + function + "> needs extension <" + referencedIn + "> enabled!";
+    functionPointerCheck = "VULKAN_HPP_ASSERT( getDispatcher()->" + function + " && \"" + message + "\" );";
+  }
+  return functionPointerCheck;
+}
+
+std::string VulkanHppGenerator::generateHandle( std::pair<std::string, HandleData> const & handleData, std::set<std::string> & listedHandles ) const
+{
+  assert( listedHandles.find( handleData.first ) == listedHandles.end() );
+
+  // first check for any handle that needs to be listed before this one
+  std::string str = generateHandleDependencies( handleData, listedHandles );
+
+  // list the commands of this handle
+  if ( handleData.first.empty() )
+  {
+    // the free functions, not bound to any handle
+    str += generateHandleEmpty( handleData.second );
+  }
+  else
+  {
+    // append any forward declaration of Deleters used by this handle
+    if ( !handleData.second.childrenHandles.empty() )
+    {
+      str += generateUniqueTypes( handleData.first, handleData.second.childrenHandles );
+    }
+    else if ( handleData.first == "VkPhysicalDevice" )
+    {
+      // special handling for class Device, as it's created from PhysicalDevice, but destroys itself
+      str += generateUniqueTypes( "", { "VkDevice" } );
+    }
+
+    // list all the commands that are mapped to members of this class
+    std::string commands = generateHandleCommandDeclarations( handleData.second.commands );
+
+    // create CPPType template specialization and the debugReportObjectType
+    std::string valueName = handleData.second.objTypeEnum;
+    valueName             = valueName.replace( 3, 0, "DEBUG_REPORT_" ) + "_EXT";
+    auto enumIt           = m_enums.find( "VkDebugReportObjectTypeEXT" );
+    assert( enumIt != m_enums.end() );
+    auto valueIt =
+      std::find_if( enumIt->second.values.begin(), enumIt->second.values.end(), [&valueName]( EnumValueData const & evd ) { return valueName == evd.name; } );
+
+    std::string className = stripPrefix( handleData.first, "Vk" );
+    std::string cppType, debugReportObjectType;
+    if ( valueIt == enumIt->second.values.end() )
+    {
+      debugReportObjectType = "eUnknown";
+    }
+    else
+    {
+      static const std::string cppTypeFromDebugReportObjectTypeEXTTemplate = R"(
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::e${className}>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::${className};
+  };
+)";
+      cppType               = replaceWithMap( cppTypeFromDebugReportObjectTypeEXTTemplate, { { "className", className } } );
+      debugReportObjectType = generateEnumValueName( enumIt->first, valueIt->name, false, m_tags );
+    }
+
+    auto [enter, leave] = generateProtection( handleData.second.alias.empty() ? getProtectFromType( handleData.first ) : "" );
+
+    assert( !handleData.second.objTypeEnum.empty() );
+    enumIt = m_enums.find( "VkObjectType" );
+    assert( enumIt != m_enums.end() );
+    valueIt = std::find_if( enumIt->second.values.begin(),
+                            enumIt->second.values.end(),
+                            [&handleData]( EnumValueData const & evd ) { return evd.name == handleData.second.objTypeEnum; } );
+    assert( valueIt != enumIt->second.values.end() );
+    std::string usingAlias;
+    if ( !handleData.second.alias.empty() )
+    {
+      usingAlias += "  using " + stripPrefix( handleData.second.alias, "Vk" ) + " = " + stripPrefix( handleData.first, "Vk" ) + ";\n";
+    }
+
+    const std::string typesafeExplicitKeyword          = handleData.second.isDispatchable ? "" : "VULKAN_HPP_TYPESAFE_EXPLICIT ";
+    const std::string typesafeConversionConditional    = handleData.second.isDispatchable ? "" : "#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)\n";
+    const std::string typesafeConversionConditionalEnd = handleData.second.isDispatchable ? "" : "#endif\n";
+
+    static const std::string templateString = R"(
+${enter}  class ${className}
+  {
+  public:
+    using CType = Vk${className};
+    using NativeType = Vk${className};
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::${objTypeEnum};
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::${debugReportObjectType};
+
+  public:
+    VULKAN_HPP_CONSTEXPR ${className}() = default;
+    VULKAN_HPP_CONSTEXPR ${className}( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    ${typesafeExplicitKeyword}${className}( Vk${className} ${memberName} ) VULKAN_HPP_NOEXCEPT
+      : m_${memberName}( ${memberName} )
+    {}
+
+${typesafeConversionConditional}    ${className} & operator=(Vk${className} ${memberName}) VULKAN_HPP_NOEXCEPT
+    {
+      m_${memberName} = ${memberName};
+      return *this;
+    }
+${typesafeConversionConditionalEnd}
+    ${className} & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_${memberName} = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ${className} const & ) const = default;
+#else
+    bool operator==( ${className} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName} == rhs.m_${memberName};
+    }
+
+    bool operator!=(${className} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName} != rhs.m_${memberName};
+    }
+
+    bool operator<(${className} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName} < rhs.m_${memberName};
+    }
+#endif
+${commands}
+    ${typesafeExplicitKeyword}operator Vk${className}() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName};
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName} != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${memberName} == VK_NULL_HANDLE;
+    }
+
+  private:
+    Vk${className} m_${memberName} = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::${objTypeEnum}>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::${className};
+  };
+
+${CppType}
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::${className}>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+${usingAlias}${leave})";
+
+    str += replaceWithMap( templateString,
+                           { { "className", className },
+                             { "commands", commands },
+                             { "CppType", cppType },
+                             { "debugReportObjectType", debugReportObjectType },
+                             { "enter", enter },
+                             { "leave", leave },
+                             { "memberName", startLowerCase( stripPrefix( handleData.first, "Vk" ) ) },
+                             { "objTypeEnum", generateEnumValueName( enumIt->first, valueIt->name, false, m_tags ) },
+                             { "usingAlias", usingAlias },
+                             { "typesafeExplicitKeyword", typesafeExplicitKeyword },
+                             { "typesafeConversionConditional", typesafeConversionConditional },
+                             { "typesafeConversionConditionalEnd", typesafeConversionConditionalEnd } } );
+  }
+
+  listedHandles.insert( handleData.first );
+  return str;
+}
+
+std::string VulkanHppGenerator::generateHandleCommandDeclarations( std::set<std::string> const & commands ) const
+{
+  std::string           str;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension!
+  for ( auto const & feature : m_features )
+  {
+    std::vector<std::string> commandNames = selectCommandsByHandle( feature.second.requireData, commands, listedCommands );
+    if ( !commandNames.empty() )
+    {
+      str += "\n  //=== " + feature.first + " ===\n";
+      for ( auto const & command : commandNames )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+
+        str += "\n";
+        str += generateCommand( commandIt->first, commandIt->second, 1, false );
+        str += generateDestroyCommand( commandIt->first, commandIt->second );
+      }
+    }
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    std::vector<std::string> commandNames = selectCommandsByHandle( extIt.second->second.requireData, commands, listedCommands );
+    if ( !commandNames.empty() )
+    {
+      auto [enter, leave] = generateProtection( getProtectFromTitle( extIt.second->first ) );
+      str += "\n" + enter + "  //=== " + extIt.second->first + " ===\n";
+      for ( auto const & command : commandNames )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+
+        std::string commandString;
+        std::string commandName = generateCommandName( commandIt->first, commandIt->second.params, 1, m_tags );
+        str += "\n";
+        str += generateCommand( commandIt->first, commandIt->second, 1, false );
+        str += generateDestroyCommand( commandIt->first, commandIt->second );
+      }
+      str += leave;
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateHandleDependencies( std::pair<std::string, HandleData> const & handleData, std::set<std::string> & listedHandles ) const
+{
+  std::string str;
+  for ( auto const & command : handleData.second.commands )
+  {
+    auto commandIt = m_commands.find( command );
+    assert( commandIt != m_commands.end() );
+    for ( auto const & parameter : commandIt->second.params )
+    {
+      auto handleIt = m_handles.find( parameter.type.type );
+      if ( ( handleIt != m_handles.end() ) && ( parameter.type.type != handleData.first ) &&
+           ( listedHandles.find( parameter.type.type ) == listedHandles.end() ) )
+      {
+        str += generateHandle( *handleIt, listedHandles );
+      }
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateHandleEmpty( HandleData const & handleData ) const
+{
+  std::string           str;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension !
+  for ( auto const & feature : m_features )
+  {
+    std::vector<std::string> commands = selectCommandsByHandle( feature.second.requireData, handleData.commands, listedCommands );
+    if ( !commands.empty() )
+    {
+      str += "\n  //=== " + feature.first + " ===\n";
+      for ( auto const & command : commands )
+      {
+        auto commandIt = m_commands.find( command );
+        assert( commandIt != m_commands.end() );
+        if ( commandIt->first == "vkCreateInstance" )
+        {
+          // special handling for createInstance, as we need to explicitly place the forward declarations and the
+          // deleter classes here
+#if !defined( NDEBUG )
+          auto handleIt = m_handles.find( "" );
+          assert( ( handleIt != m_handles.end() ) && ( handleIt->second.childrenHandles.size() == 2 ) );
+          assert( handleIt->second.childrenHandles.find( "VkInstance" ) != handleIt->second.childrenHandles.end() );
+#endif
+
+          str += generateUniqueTypes( "", { "VkInstance" } );
+        }
+        str += "\n";
+
+        str += generateCommand( commandIt->first, commandIt->second, 0, false );
+      }
+    }
+  }
+#if !defined( NDEBUG )
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    assert( selectCommandsByHandle( extIt.second->second.requireData, handleData.commands, listedCommands ).empty() );
+  }
+#endif
+  return str;
+}
+
+std::string VulkanHppGenerator::generateHandleHashStructures( std::vector<RequireData> const & requireData, std::string const & title ) const
+{
+  const std::string hashTemplate = R"(
+  template <> struct hash<VULKAN_HPP_NAMESPACE::${type}>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::${type} const & ${name}) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<Vk${type}>{}(static_cast<Vk${type}>(${name}));
+    }
+  };
+)";
+
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto handleIt = m_handles.find( type );
+      if ( handleIt != m_handles.end() )
+      {
+        std::string handleType = stripPrefix( handleIt->first, "Vk" );
+        std::string handleName = startLowerCase( handleType );
+        str += replaceWithMap( hashTemplate, { { "name", handleName }, { "type", handleType } } );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateHandleHashStructures() const
+{
+  const std::string hashesTemplate = R"(
+  //===================================
+  //=== HASH structures for handles ===
+  //===================================
+
+${hashes}
+)";
+
+  std::string hashes;
+  for ( auto const & feature : m_features )
+  {
+    hashes += generateHandleHashStructures( feature.second.requireData, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    hashes += generateHandleHashStructures( extIt.second->second.requireData, extIt.second->first );
+  }
+  return replaceWithMap( hashesTemplate, { { "hashes", hashes } } );
+}
+
+std::string VulkanHppGenerator::generateHandles() const
+{
+  // Note: reordering structs or handles by features and extensions is not possible!
+  std::string str = R"(
+  //===============
+  //=== HANDLEs ===
+  //===============
+
+  template <typename Type>
+  struct isVulkanHandleType
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
+  };
+)";
+
+  std::set<std::string> listedHandles;
+  for ( auto const & handle : m_handles )
+  {
+    if ( listedHandles.find( handle.first ) == listedHandles.end() )
+    {
+      str += generateHandle( handle, listedHandles );
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateIndexTypeTraits() const
+{
+  const std::string indexTypeTraitsTemplate = R"(
+  //=========================
+  //=== Index Type Traits ===
+  //=========================
+
+  template<typename T>
+  struct IndexTypeValue
+  {};
+
+${indexTypeTraits}
+)";
+
+  auto indexType = m_enums.find( "VkIndexType" );
+  assert( indexType != m_enums.end() );
+
+  std::string           indexTypeTraits;
+  std::set<std::string> listedCppTypes;
+  for ( auto const & value : indexType->second.values )
+  {
+    std::string valueName = generateEnumValueName( indexType->first, value.name, false, m_tags );
+    std::string cppType;
+    if ( !valueName.starts_with( "eNone" ) )
+    {
+      // get the bit count out of the value Name (8, 16, 32, ... ) and generate the cppType (uint8_t,...)
+      assert( valueName.starts_with( "eUint" ) );
+      auto beginDigit = valueName.begin() + strlen( "eUint" );
+      assert( isdigit( *beginDigit ) );
+      auto endDigit = std::find_if_not( beginDigit, valueName.end(), []( std::string::value_type c ) { return isdigit( c ); } );
+      cppType       = "uint" + valueName.substr( strlen( "eUint" ), endDigit - beginDigit ) + "_t";
+    }
+
+    if ( !cppType.empty() )
+    {
+      if ( listedCppTypes.insert( cppType ).second )
+      {
+        // IndexType traits aren't necessarily invertible.
+        // The Type -> Enum translation will only occur for the first prefixed enum value.
+        // A hypothetical extension to this enum with a conflicting prefix will use the core spec value.
+        const std::string typeToEnumTemplate = R"(
+  template <>
+  struct IndexTypeValue<${cppType}>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::${valueName};
+  };
+)";
+        indexTypeTraits += replaceWithMap( typeToEnumTemplate, { { "cppType", cppType }, { "valueName", valueName } } );
+      }
+
+      // Enum -> Type translations are always able to occur.
+      const std::string enumToTypeTemplate = R"(
+  template <>
+  struct CppType<IndexType, IndexType::${valueName}>
+  {
+    using Type = ${cppType};
+  };
+)";
+      indexTypeTraits += replaceWithMap( enumToTypeTemplate, { { "cppType", cppType }, { "valueName", valueName } } );
+    }
+  }
+
+  return replaceWithMap( indexTypeTraitsTemplate, { { "indexTypeTraits", indexTypeTraits } } );
+}
+
+std::string VulkanHppGenerator::generateLenInitializer(
+  std::vector<MemberData>::const_iterator                                                                                 mit,
+  std::map<std::vector<MemberData>::const_iterator, std::vector<std::vector<MemberData>::const_iterator>>::const_iterator litit,
+  bool                                                                                                                    mutualExclusiveLens ) const
+{
+  std::string initializer;
+  if ( mutualExclusiveLens )
+  {
+    // there are multiple mutually exclusive arrays related to this len
+    for ( size_t i = 0; i + 1 < litit->second.size(); i++ )
+    {
+      auto        arrayIt      = litit->second[i];
+      std::string argumentName = startLowerCase( stripPrefix( arrayIt->name, "p" ) ) + "_";
+      initializer += "!" + argumentName + ".empty() ? " + argumentName + ".size() : ";
+    }
+    auto        arrayIt      = litit->second.back();
+    std::string argumentName = startLowerCase( stripPrefix( arrayIt->name, "p" ) ) + "_";
+    initializer += argumentName + ".size()";
+  }
+  else
+  {
+    auto arrayIt = litit->second.front();
+    assert( ( arrayIt->len.front() == litit->first->name ) || ( ( arrayIt->len.front() == "codeSize / 4" ) && ( litit->first->name == "codeSize" ) ) );
+
+    assert( arrayIt->name.starts_with( "p" ) );
+    std::string argumentName = startLowerCase( stripPrefix( arrayIt->name, "p" ) ) + "_";
+
+    assert( mit->type.prefix.empty() && mit->type.postfix.empty() );
+    initializer = argumentName + ".size()";
+    if ( arrayIt->len.front() == "codeSize / 4" )
+    {
+      initializer += " * 4";
+    }
+    if ( ( arrayIt->type.type == "void" ) &&
+         ( std::count_if( arrayIt->type.postfix.begin(), arrayIt->type.postfix.end(), []( char c ) { return c == '*'; } ) < 2 ) )
+    {
+      initializer += " * sizeof(T)";
+    }
+  }
+  if ( mit->type.type != "size_t" )
+  {
+    initializer = "static_cast<" + mit->type.type + ">( " + initializer + " )";
+  }
+  return initializer;
+}
+
+std::string VulkanHppGenerator::generateName( TypeInfo const & typeInfo ) const
+{
+  std::string name = stripPrefix( typeInfo.type, "Vk" );
+  assert( typeInfo.prefix.find( '*' ) == std::string::npos );
+  if ( typeInfo.postfix.find( '*' ) != std::string::npos )
+  {
+    assert( typeInfo.postfix.find_first_of( '*' ) == typeInfo.postfix.find_last_of( '*' ) );
+    name = "p" + name;
+  }
+  else
+  {
+    name = startLowerCase( name );
+  }
+  return name;
+}
+
+std::string VulkanHppGenerator::generateNoExcept( std::vector<std::string> const &          errorCodes,
+                                                  std::vector<size_t> const &               returnParams,
+                                                  std::map<size_t, VectorParamData> const & vectorParams,
+                                                  CommandFlavourFlags                       flavourFlags,
+                                                  bool                                      vectorSizeCheck,
+                                                  bool                                      raii ) const
+{
+  // noexcept is only possible with no error codes, and the return param (if any) is not a vector param (unless it's the singular version)
+  return ( errorCodes.empty() &&
+           ( ( flavourFlags & CommandFlavourFlagBits::singular ) || returnParams.empty() ||
+             ( std::find_if( returnParams.begin(),
+                             returnParams.end(),
+                             [&vectorParams]( size_t rp ) { return vectorParams.find( rp ) != vectorParams.end(); } ) == returnParams.end() ) ) )
+         ? ( vectorSizeCheck ? ( raii ? "" : " VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS" ) : " VULKAN_HPP_NOEXCEPT" )
+         : "";
+}
+
+std::string VulkanHppGenerator::generateObjectDeleter( std::string const & commandName,
+                                                       CommandData const & commandData,
+                                                       size_t              initialSkipCount,
+                                                       size_t              returnParam ) const
+{
+  std::string objectDeleter, allocator;
+  if ( ( commandName.find( "Acquire" ) != std::string::npos ) || ( commandName.find( "Get" ) != std::string::npos ) )
+  {
+    if ( ( commandName == "vkAcquirePerformanceConfigurationINTEL" ) || ( commandName == "vkGetRandROutputDisplayEXT" ) ||
+         ( commandName == "vkGetWinrtDisplayNV" ) || ( commandName == "vkGetDrmDisplayEXT" ) )
+    {
+      objectDeleter = "ObjectRelease";
+    }
+    else
+    {
+      throw std::runtime_error( "Found " + commandName + " which requires special handling for the object deleter" );
+    }
+  }
+  else if ( commandName.find( "Allocate" ) != std::string::npos )
+  {
+    objectDeleter = "ObjectFree";
+    allocator     = "allocator, ";
+  }
+  else
+  {
+    assert( ( commandName.find( "Create" ) != std::string::npos ) || ( commandName.find( "Register" ) != std::string::npos ) );
+    objectDeleter = "ObjectDestroy";
+    allocator     = "allocator, ";
+  }
+  std::string className  = initialSkipCount ? stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" ) : "";
+  std::string parentName = ( className.empty() || ( commandData.params[returnParam].type.type == "VkDevice" ) ) ? "NoParent" : className;
+  return objectDeleter + "<" + parentName + ", Dispatch>( " + ( ( parentName == "NoParent" ) ? "" : "*this, " ) + allocator + "d )";
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateProtection( std::string const & protect ) const
+{
+  return protect.empty() ? std::make_pair( "", "" ) : std::make_pair( "#if defined( " + protect + " )\n", "#endif /*" + protect + "*/\n" );
+}
+
+std::string VulkanHppGenerator::generateRAIICommandDefinitions() const
+{
+  const std::string commandDefinitionsTemplate = R"(
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+
+${commandDefinitions}
+)";
+
+  std::string           commandDefinitions;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension!
+  for ( auto const & feature : m_features )
+  {
+    commandDefinitions += generateRAIICommandDefinitions( feature.second.requireData, listedCommands, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    commandDefinitions += generateRAIICommandDefinitions( extIt.second->second.requireData, listedCommands, extIt.second->first );
+  }
+
+  return replaceWithMap( commandDefinitionsTemplate, { { "commandDefinitions", commandDefinitions } } );
+}
+
+std::string VulkanHppGenerator::generateRAIICommandDefinitions( std::vector<RequireData> const & requireData,
+                                                                std::set<std::string> &          listedCommands,
+                                                                std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      if ( listedCommands.insert( command ).second )
+      {
+        str += generateRAIIHandleCommand( command, determineInitialSkipCount( command ), true );
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateRAIIDispatchers() const
+{
+  std::string contextInitializers, contextMembers, deviceAssignments, deviceMembers, instanceAssignments, instanceMembers;
+
+  std::set<std::string> listedCommands;
+  for ( auto const & feature : m_features )
+  {
+    appendRAIIDispatcherCommands( feature.second.requireData,
+                                  listedCommands,
+                                  feature.first,
+                                  contextInitializers,
+                                  contextMembers,
+                                  deviceAssignments,
+                                  deviceMembers,
+                                  instanceAssignments,
+                                  instanceMembers );
+  }
+  for ( auto const & extension : m_extensions )
+  {
+    appendRAIIDispatcherCommands( extension.second.requireData,
+                                  listedCommands,
+                                  extension.first,
+                                  contextInitializers,
+                                  contextMembers,
+                                  deviceAssignments,
+                                  deviceMembers,
+                                  instanceAssignments,
+                                  instanceMembers );
+  }
+
+  std::string contextDispatcherTemplate = R"(
+    class ContextDispatcher : public DispatchLoaderBase
+    {
+    public:
+      ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr )
+        : vkGetInstanceProcAddr( getProcAddr )${contextInitializers}
+      {}
+
+    public:
+      PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+${contextMembers}
+    };
+)";
+
+  std::string str = replaceWithMap( contextDispatcherTemplate, { { "contextInitializers", contextInitializers }, { "contextMembers", contextMembers } } );
+
+  std::string instanceDispatcherTemplate = R"(
+    class InstanceDispatcher : public DispatchLoaderBase
+    {
+    public:
+      InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance )
+        : vkGetInstanceProcAddr( getProcAddr )
+      {
+${instanceAssignments}
+        vkGetDeviceProcAddr =
+          PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      }
+
+    public:
+${instanceMembers}
+      PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+    };
+)";
+
+  str += replaceWithMap( instanceDispatcherTemplate, { { "instanceAssignments", instanceAssignments }, { "instanceMembers", instanceMembers } } );
+
+  std::string deviceDispatcherTemplate = R"(
+    class DeviceDispatcher : public DispatchLoaderBase
+    {
+    public:
+      DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr )
+      {
+${deviceAssignments}
+      }
+
+    public:
+${deviceMembers}
+    };
+)";
+
+  str += replaceWithMap( deviceDispatcherTemplate, { { "deviceAssignments", deviceAssignments }, { "deviceMembers", deviceMembers } } );
+  return str;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandle( std::pair<std::string, HandleData> const & handle,
+                                                    std::set<std::string> &                    listedHandles,
+                                                    std::set<std::string> const &              specialFunctions ) const
+{
+  std::string str;
+  if ( listedHandles.find( handle.first ) == listedHandles.end() )
+  {
+    rescheduleRAIIHandle( str, handle, listedHandles, specialFunctions );
+
+    auto [enter, leave]    = generateProtection( handle.second.alias.empty() ? getProtectFromType( handle.first ) : "" );
+    std::string handleType = stripPrefix( handle.first, "Vk" );
+    std::string handleName = generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+
+    auto [singularStaticCreates, arrayStaticCreates] = generateRAIIHandleStaticCreates( handle );
+    auto [singularConstructors, arrayConstructors] = generateRAIIHandleConstructors( handle );
+
+    auto [clearMembers, getConstructorSuccessCode, memberVariables, moveConstructorInitializerList, moveAssignmentInstructions, swapMembers, releaseMembers] =
+      generateRAIIHandleDetails( handle );
+
+    std::string declarations = generateRAIIHandleCommandDeclarations( handle, specialFunctions );
+
+    assert( !handle.second.objTypeEnum.empty() );
+    auto enumIt = m_enums.find( "VkObjectType" );
+    assert( enumIt != m_enums.end() );
+    auto valueIt = std::find_if(
+      enumIt->second.values.begin(), enumIt->second.values.end(), [&handle]( EnumValueData const & evd ) { return evd.name == handle.second.objTypeEnum; } );
+    assert( valueIt != enumIt->second.values.end() );
+    std::string objTypeEnum = generateEnumValueName( enumIt->first, valueIt->name, false, m_tags );
+
+    enumIt = m_enums.find( "VkDebugReportObjectTypeEXT" );
+    assert( enumIt != m_enums.end() );
+    std::string valueName = handle.second.objTypeEnum;
+    valueName             = valueName.replace( 3, 0, "DEBUG_REPORT_" ) + "_EXT";
+    valueIt =
+      std::find_if( enumIt->second.values.begin(), enumIt->second.values.end(), [&valueName]( EnumValueData const & evd ) { return valueName == evd.name; } );
+    std::string debugReportObjectType =
+      ( valueIt != enumIt->second.values.end() ) ? generateEnumValueName( enumIt->first, valueIt->name, false, m_tags ) : "eUnknown";
+
+    std::string dispatcherType = ( ( handle.first == "VkDevice" ) || ( handle.second.constructorIts.front()->second.params.front().type.type == "VkDevice" ) )
+                                 ? "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher"
+                                 : "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher";
+
+    std::string getParent;
+    if ( ( handle.first != "VkInstance" ) && ( handle.first != "VkDevice" ) && ( handle.second.destructorIt != m_commands.end() ) )
+    {
+      assert( !handle.second.destructorIt->second.params.empty() );
+      std::string parentType = stripPrefix( handle.second.destructorIt->second.params.front().type.type, "Vk" );
+      getParent              = "    VULKAN_HPP_NAMESPACE::" + parentType + " get" + parentType + "() const\n";
+      getParent += "    {\n";
+      getParent += "      return m_" + handle.second.destructorIt->second.params.front().name + ";\n";
+      getParent += "    }\n";
+    }
+
+    std::string assignmentOperator, copyConstructor;
+    if ( handle.second.destructorIt == m_commands.end() )
+    {
+      // allow copy constructor and assignment operator for classes without destructor
+      std::string const copyConstructorTemplate =
+        R"(      ${handleType}( ${handleType} const & rhs ) : m_${handleName}( rhs.m_${handleName} ), m_dispatcher( rhs.m_dispatcher ) {})";
+      copyConstructor += replaceWithMap( copyConstructorTemplate, { { "handleName", handleName }, { "handleType", handleType } } );
+
+      std::string assignmentOperatorTemplate = R"(      ${handleType} & operator=( ${handleType} const & rhs )
+      {
+        m_${handleName} = rhs.m_${handleName};
+        m_dispatcher    = rhs.m_dispatcher;
+        return *this;
+      })";
+      assignmentOperator += replaceWithMap( assignmentOperatorTemplate, { { "handleName", handleName }, { "handleType", handleType } } );
+    }
+    else
+    {
+      std::string const copyConstructorTemplate = R"(      ${handleType}( ${handleType} const & ) = delete;)";
+      copyConstructor += replaceWithMap( copyConstructorTemplate, { { "handleType", handleType } } );
+
+      std::string const assignmentOperatorTemplate = R"(      ${handleType} & operator=( ${handleType} const & ) = delete;)";
+      assignmentOperator += replaceWithMap( assignmentOperatorTemplate, { { "handleType", handleType } } );
+    }
+
+    const std::string handleTemplate = R"(
+${enter}  class ${handleType}
+  {
+  public:
+    using CType = Vk${handleType};
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::${objTypeEnum};
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::${debugReportObjectType};
+
+  public:
+${singularStaticCreates}
+${singularConstructors}
+
+    ${handleType}( std::nullptr_t ) {}
+
+    ~${handleType}()
+    {
+      clear();
+    }
+
+    ${handleType}() = delete;
+${copyConstructor}
+    ${handleType}( ${handleType} && rhs ) VULKAN_HPP_NOEXCEPT
+      : ${moveConstructorInitializerList}
+    {}
+${assignmentOperator}
+    ${handleType} & operator=( ${handleType} && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+${moveAssignmentInstructions}
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::${handleType} const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_${handleName};
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+${clearMembers}
+    }
+
+    VULKAN_HPP_NAMESPACE::${handleType} release()
+    {
+${releaseMembers}
+    }
+
+${getConstructorSuccessCode}
+${getParent}
+    ${dispatcherType} const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return ${getDispatcherReturn}m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::${handleType} & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+${swapMembers}
+    }
+
+${memberFunctionsDeclarations}
+
+  private:
+
+    ${memberVariables}
+  };
+${leave})";
+
+    str += replaceWithMap( handleTemplate,
+                           { { "assignmentOperator", assignmentOperator },
+                             { "clearMembers", clearMembers },
+                             { "copyConstructor", copyConstructor },
+                             { "debugReportObjectType", debugReportObjectType },
+                             { "dispatcherType", dispatcherType },
+                             { "enter", enter },
+                             { "getConstructorSuccessCode", getConstructorSuccessCode },
+                             { "getDispatcherReturn", ( handleType == "Device" ) || ( handleType == "Instance" ) ? "&*" : "" },
+                             { "getParent", getParent },
+                             { "handleName", handleName },
+                             { "handleType", handleType },
+                             { "leave", leave },
+                             { "memberFunctionsDeclarations", declarations },
+                             { "memberVariables", memberVariables },
+                             { "moveAssignmentInstructions", moveAssignmentInstructions },
+                             { "moveConstructorInitializerList", moveConstructorInitializerList },
+                             { "objTypeEnum", objTypeEnum },
+                             { "releaseMembers", releaseMembers },
+                             { "singularConstructors", singularConstructors },
+                             { "singularStaticCreates", singularStaticCreates },
+                             { "swapMembers", swapMembers } } );
+
+    if ( !arrayConstructors.empty() )
+    {
+      // it's a handle class with a friendly handles class
+      const std::string handlesTemplate = R"(
+${enter}  class ${handleType}s : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::${handleType}>
+  {
+  public:
+    ${arrayStaticCreates}
+    ${arrayConstructors}
+    ${handleType}s( std::nullptr_t ) {}
+
+    ${handleType}s() = delete;
+    ${handleType}s( ${handleType}s const & ) = delete;
+    ${handleType}s( ${handleType}s && rhs ) = default;
+    ${handleType}s & operator=( ${handleType}s const & ) = delete;
+    ${handleType}s & operator=( ${handleType}s && rhs ) = default;
+  };
+${leave}
+)";
+
+      str += replaceWithMap( handlesTemplate,
+                             { { "arrayConstructors", arrayConstructors },
+                               { "arrayStaticCreates", arrayStaticCreates },
+                               { "enter", enter },
+                               { "handleType", handleType },
+                               { "leave", leave } } );
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommand( std::string const & command, size_t initialSkipCount, bool definition ) const
+{
+  std::string str;
+  if ( m_RAIISpecialFunctions.find( command ) == m_RAIISpecialFunctions.end() )
+  {
+    auto commandIt = m_commands.find( command );
+    assert( commandIt != m_commands.end() );
+
+    if ( commandIt->second.returnType == "VkResult" )
+    {
+      str = generateRAIIHandleCommandResult( commandIt, initialSkipCount, definition );
+    }
+    else if ( commandIt->second.returnType == "void" )
+    {
+      str = generateRAIIHandleCommandVoid( commandIt, initialSkipCount, definition );
+    }
+    else
+    {
+      str = generateRAIIHandleCommandValue( commandIt, initialSkipCount, definition );
+    }
+
+    if ( str.empty() )
+    {
+      throw std::runtime_error( "Never encountered a command like <" + command + "> !" );
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandDeclarations( std::pair<std::string, HandleData> const & handle,
+                                                                       std::set<std::string> const &              specialFunctions ) const
+{
+  std::string           functionDeclarations;
+  std::set<std::string> listedCommands;  // some commands are listed with more than one extension !
+  for ( auto const & feature : m_features )
+  {
+    std::vector<std::string> firstLevelCommands, secondLevelCommands;
+
+    for ( auto const & require : feature.second.requireData )
+    {
+      for ( auto const & command : require.commands )
+      {
+        if ( specialFunctions.find( command ) == specialFunctions.end() )
+        {
+          if ( handle.second.commands.find( command ) != handle.second.commands.end() )
+          {
+            assert( listedCommands.find( command ) == listedCommands.end() );
+            listedCommands.insert( command );
+            firstLevelCommands.push_back( command );
+          }
+          else if ( handle.second.secondLevelCommands.find( command ) != handle.second.secondLevelCommands.end() )
+          {
+            assert( listedCommands.find( command ) == listedCommands.end() );
+            listedCommands.insert( command );
+            assert( !handle.first.empty() );
+            secondLevelCommands.push_back( command );
+          }
+        }
+      }
+    }
+    if ( !firstLevelCommands.empty() || !secondLevelCommands.empty() )
+    {
+      functionDeclarations += "\n  //=== " + feature.first + " ===\n";
+      for ( auto const & command : firstLevelCommands )
+      {
+        functionDeclarations += generateRAIIHandleCommand( command, handle.first.empty() ? 0 : 1, false );
+      }
+      for ( auto const & command : secondLevelCommands )
+      {
+        assert( !handle.first.empty() );
+        functionDeclarations += generateRAIIHandleCommand( command, 2, false );
+      }
+    }
+  }
+
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    std::vector<std::string> firstLevelCommands, secondLevelCommands;
+    for ( auto & req : extIt.second->second.requireData )
+    {
+      for ( auto const & command : req.commands )
+      {
+        if ( ( specialFunctions.find( command ) == specialFunctions.end() ) && ( listedCommands.find( command ) == listedCommands.end() ) )
+        {
+          if ( handle.second.commands.find( command ) != handle.second.commands.end() )
+          {
+            listedCommands.insert( command );
+            firstLevelCommands.push_back( command );
+          }
+          else if ( handle.second.secondLevelCommands.find( command ) != handle.second.secondLevelCommands.end() )
+          {
+            listedCommands.insert( command );
+            secondLevelCommands.push_back( command );
+          }
+        }
+      }
+    }
+    if ( !firstLevelCommands.empty() || !secondLevelCommands.empty() )
+    {
+      std::string enter, leave;
+      if ( extIt.second->first != m_types.find( handle.first )->second.referencedIn )
+      {
+        std::tie( enter, leave ) = generateProtection( getProtectFromTitle( extIt.second->first ) );
+      }
+      functionDeclarations += "\n" + enter + "  //=== " + extIt.second->first + " ===\n";
+      for ( auto const & command : firstLevelCommands )
+      {
+        functionDeclarations += generateRAIIHandleCommand( command, handle.first.empty() ? 0 : 1, false );
+      }
+      for ( auto const & command : secondLevelCommands )
+      {
+        assert( !handle.first.empty() );
+        functionDeclarations += generateRAIIHandleCommand( command, 2, false );
+      }
+      functionDeclarations += leave;
+    }
+  }
+  return functionDeclarations;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandEnhanced( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                   size_t                                             initialSkipCount,
+                                                                   std::vector<size_t> const &                        returnParams,
+                                                                   std::map<size_t, VectorParamData> const &          vectorParams,
+                                                                   bool                                               definition,
+                                                                   CommandFlavourFlags                                flavourFlags ) const
+{
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+
+  std::set<size_t> skippedParams  = determineSkippedParams( commandIt->second.params, initialSkipCount, vectorParams, returnParams, singular );
+  std::set<size_t> singularParams = singular ? determineSingularParams( returnParams[0], vectorParams ) : std::set<size_t>();
+  // special handling for vkGetMemoryHostPointerPropertiesEXT: here, we really need to stick with the const void * parameter !
+  std::set<size_t> templatedParams =
+    ( commandIt->first == "vkGetMemoryHostPointerPropertiesEXT" ) ? std::set<size_t>() : determineVoidPointerParams( commandIt->second.params );
+
+  bool                     enumerating = determineEnumeration( vectorParams, returnParams );
+  std::vector<std::string> dataTypes   = determineDataTypes( commandIt->second.params, vectorParams, returnParams, templatedParams );
+  std::string              dataType    = combineDataTypes( vectorParams, returnParams, enumerating, dataTypes, flavourFlags, true );
+
+  std::string argumentTemplates = generateArgumentTemplates( commandIt->second.params, returnParams, vectorParams, templatedParams, flavourFlags, true );
+  std::string argumentList      = generateArgumentListEnhanced(
+    commandIt->second.params, returnParams, vectorParams, skippedParams, singularParams, templatedParams, definition, flavourFlags, false );
+  std::string commandName = generateCommandName( commandIt->first, commandIt->second.params, initialSkipCount, m_tags, flavourFlags );
+  std::string nodiscard =
+    generateNoDiscard( !returnParams.empty() || ( ( commandIt->second.returnType != "VkResult" ) && ( commandIt->second.returnType != "void" ) ),
+                       1 < commandIt->second.successCodes.size(),
+                       false );
+  std::pair<bool, std::map<size_t, std::vector<size_t>>> vectorSizeCheck =
+    needsVectorSizeCheck( commandIt->second.params, vectorParams, returnParams, singularParams );
+  std::string noexceptString = generateNoExcept( commandIt->second.errorCodes, returnParams, vectorParams, flavourFlags, vectorSizeCheck.first, true );
+  std::string returnType     = generateReturnType( commandIt->second, returnParams, vectorParams, flavourFlags, true, dataType );
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  if ( definition )
+  {
+    std::string const definitionTemplate =
+      R"(
+  ${argumentTemplates}
+  ${nodiscard} VULKAN_HPP_INLINE ${returnType} ${className}::${commandName}( ${argumentList} ) const ${noexcept} ${debugHelper}
+  {
+${functionPointerCheck}
+${vectorSizeCheck}
+    ${dataSizeChecks}
+    ${dataDeclarations}
+    ${callSequence}
+    ${resultCheck}
+    ${dataPreparation}
+    ${returnStatement}
+  }
+)";
+
+    std::string callSequence = generateCallSequence(
+      commandIt->first, commandIt->second, returnParams, vectorParams, initialSkipCount, singularParams, templatedParams, flavourFlags, true );
+    std::string className        = initialSkipCount ? stripPrefix( commandIt->second.params[initialSkipCount - 1].type.type, "Vk" ) : "Context";
+    std::string returnVariable   = generateReturnVariable( commandIt->second, returnParams, vectorParams, flavourFlags );
+    std::string dataDeclarations = generateDataDeclarations(
+      commandIt->second, returnParams, vectorParams, templatedParams, flavourFlags, true, dataTypes, dataType, returnType, returnVariable );
+    std::string dataPreparation =
+      generateDataPreparation( commandIt->second, initialSkipCount, returnParams, vectorParams, templatedParams, flavourFlags, enumerating );
+    std::string dataSizeChecks  = generateDataSizeChecks( commandIt->second, returnParams, dataTypes, vectorParams, templatedParams, singular );
+    std::string resultCheck     = generateResultCheck( commandIt->second, className, "::", commandName, enumerating );
+    std::string returnStatement = generateReturnStatement( commandIt->first,
+                                                           commandIt->second,
+                                                           returnVariable,
+                                                           returnType,
+                                                           dataType,
+                                                           initialSkipCount,
+                                                           returnParams.empty() ? INVALID_INDEX : returnParams[0],
+                                                           flavourFlags,
+                                                           enumerating,
+                                                           true );
+    std::string vectorSizeCheckString =
+      vectorSizeCheck.first ? generateRAIIHandleVectorSizeCheck( commandIt->first, commandIt->second, initialSkipCount, vectorSizeCheck.second, skippedParams )
+                            : "";
+
+    return replaceWithMap( definitionTemplate,
+                           { { "argumentList", argumentList },
+                             { "argumentTemplates", argumentTemplates },
+                             { "callSequence", callSequence },
+                             { "className", className },
+                             { "commandName", commandName },
+                             { "dataDeclarations", dataDeclarations },
+                             { "dataPreparation", dataPreparation },
+                             { "dataSizeChecks", dataSizeChecks },
+                             { "debugHelper", debugHelper },
+                             { "functionPointerCheck", generateFunctionPointerCheck( commandIt->first, commandIt->second.referencedIn ) },
+                             { "nodiscard", nodiscard },
+                             { "noexcept", noexceptString },
+                             { "resultCheck", resultCheck },
+                             { "returnStatement", returnStatement },
+                             { "returnType", returnType },
+                             { "vectorSizeCheck", vectorSizeCheckString } } );
+  }
+  else
+  {
+    std::string const declarationTemplate =
+      R"(
+    ${argumentTemplates}
+    ${nodiscard} ${returnType} ${commandName}( ${argumentList} ) const ${noexcept};
+)";
+
+    return replaceWithMap( declarationTemplate,
+                           { { "argumentList", argumentList },
+                             { "argumentTemplates", argumentTemplates },
+                             { "commandName", commandName },
+                             { "nodiscard", nodiscard },
+                             { "noexcept", noexceptString },
+                             { "returnType", returnType } } );
+  }
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandFactory( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                  size_t                                             initialSkipCount,
+                                                                  std::vector<size_t> const &                        returnParams,
+                                                                  std::map<size_t, VectorParamData> const &          vectorParams,
+                                                                  bool                                               definition,
+                                                                  CommandFlavourFlags                                flavourFlags ) const
+{
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+
+  assert( isHandleType( commandIt->second.params[returnParams.back()].type.type ) );
+  assert( ( returnParams.size() == 1 ) ||
+          ( ( returnParams.size() == 2 ) && ( vectorParams.size() == 1 ) && ( returnParams[0] == vectorParams.begin()->second.lenParam ) &&
+            ( returnParams[1] == vectorParams.begin()->first ) ) );
+
+  std::set<size_t> skippedParams  = determineSkippedParams( commandIt->second.params, initialSkipCount, vectorParams, returnParams, singular );
+  std::set<size_t> singularParams = singular ? determineSingularParams( returnParams.back(), vectorParams ) : std::set<size_t>();
+  std::string      argumentList   = generateRAIIHandleCommandFactoryArgumentList( commandIt->second.params, skippedParams, definition, singular );
+  std::string      commandName    = generateCommandName( commandIt->first, commandIt->second.params, initialSkipCount, m_tags, flavourFlags );
+  std::string      handleType     = stripPostfix( commandIt->second.params[returnParams.back()].type.compose( "VULKAN_HPP_RAII_NAMESPACE" ), " *" );
+  std::string      returnType     = handleType;
+  if ( ( vectorParams.find( returnParams.back() ) != vectorParams.end() ) && !singular )
+  {
+    returnType = "std::vector<" + handleType + ">";
+    handleType += "s";
+  }
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  if ( definition )
+  {
+    std::string callArguments = generateCallArgumentsRAIIFactory( commandIt->second.params, initialSkipCount, skippedParams, singularParams );
+    std::string className     = initialSkipCount ? stripPrefix( commandIt->second.params[initialSkipCount - 1].type.type, "Vk" ) : "Context";
+
+    std::string const definitionTemplate =
+      R"(
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ${returnType} ${className}::${commandName}( ${argumentList} ) const ${debugHelper}
+  {
+    return ${handleType}( ${callArguments} );
+  }
+#endif
+)";
+
+    return replaceWithMap( definitionTemplate,
+                           { { "argumentList", argumentList },
+                             { "callArguments", callArguments },
+                             { "className", className },
+                             { "commandName", commandName },
+                             { "debugHelper", debugHelper },
+                             { "handleType", handleType },
+                             { "returnType", returnType } } );
+  }
+  else
+  {
+    std::string const declarationTemplate =
+      R"(
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD ${returnType} ${commandName}( ${argumentList} ) const; ${debugHelper}
+#endif
+)";
+
+    return replaceWithMap( declarationTemplate,
+                           { { "argumentList", argumentList },
+                           { "commandName", commandName },
+                           { "debugHelper", debugHelper },
+                           { "returnType", returnType } } );
+  }
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandFactoryArgumentList( std::vector<ParamData> const & params,
+                                                                              std::set<size_t> const &       skippedParams,
+                                                                              bool                           definition,
+                                                                              bool                           singular ) const
+{
+  std::string arguments;
+  bool        encounteredArgument = false;
+  for ( size_t i = 0; i < params.size(); ++i )
+  {
+    if ( skippedParams.find( i ) == skippedParams.end() )
+    {
+      if ( encounteredArgument )
+      {
+        arguments += ", ";
+      }
+      arguments += generateRAIIHandleConstructorArgument( params[i], definition, singular, false );
+      encounteredArgument = true;
+    }
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResult( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                 size_t                                             initialSkipCount,
+                                                                 bool                                               definition ) const
+{
+  assert( !commandIt->second.successCodes.empty() );
+  if ( commandIt->second.successCodes.size() == 1 )
+  {
+    if ( commandIt->second.errorCodes.empty() )
+    {
+      return generateRAIIHandleCommandResultSingleSuccessNoErrors( commandIt, initialSkipCount, definition );
+    }
+    else
+    {
+      return generateRAIIHandleCommandResultSingleSuccessWithErrors( commandIt, initialSkipCount, definition );
+    }
+  }
+  else
+  {
+    if ( commandIt->second.errorCodes.empty() )
+    {
+      return generateRAIIHandleCommandResultMultiSuccessNoErrors( commandIt, initialSkipCount, definition );
+    }
+    else
+    {
+      return generateRAIIHandleCommandResultMultiSuccessWithErrors( commandIt, initialSkipCount, definition );
+    }
+  }
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessNoErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                     size_t                                             initialSkipCount,
+                                                                                     bool                                               definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandIt->second.params );
+  switch ( returnParams.size() )
+  {
+    case 0:
+      return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, determineVectorParams( commandIt->second.params ), definition );
+    case 2: return generateRAIIHandleCommandResultMultiSuccessNoErrors2Return( commandIt, initialSkipCount, definition, returnParams );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessNoErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                            size_t                                             initialSkipCount,
+                                                                                            bool                                               definition,
+                                                                                            std::vector<size_t> const & returnParams ) const
+{
+  if ( ( commandIt->second.successCodes.size() == 2 ) && ( commandIt->second.successCodes[0] == "VK_SUCCESS" ) &&
+       ( commandIt->second.successCodes[1] == "VK_INCOMPLETE" ) )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    if ( vectorParams.size() == 1 )
+    {
+      if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+      {
+        if ( returnParams[1] == vectorParams.begin()->first )
+        {
+          if ( ( commandIt->second.params[returnParams[0]].type.type == "uint32_t" ) || ( commandIt->second.params[returnParams[0]].type.type == "size_t" ) )
+          {
+            if ( ( commandIt->second.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandIt->second.params[returnParams[1]].type.type ) &&
+                 !isStructureChainAnchor( commandIt->second.params[returnParams[1]].type.type ) )
+            {
+              return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessWithErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                       size_t                                             initialSkipCount,
+                                                                                       bool                                               definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandIt->second.params );
+  switch ( returnParams.size() )
+  {
+    case 0:
+      return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, determineVectorParams( commandIt->second.params ), definition );
+      break;
+    case 1: return generateRAIIHandleCommandResultMultiSuccessWithErrors1Return( commandIt, initialSkipCount, definition, returnParams[0] ); break;
+    case 2: return generateRAIIHandleCommandResultMultiSuccessWithErrors2Return( commandIt, initialSkipCount, definition, returnParams ); break;
+    case 3: return generateRAIIHandleCommandResultMultiSuccessWithErrors3Return( commandIt, initialSkipCount, definition, returnParams ); break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessWithErrors1Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                              size_t initialSkipCount,
+                                                                                              bool   definition,
+                                                                                              size_t returnParam ) const
+{
+  if ( commandIt->second.params[returnParam].type.type == "void" )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    if ( vectorParams.size() == 1 )
+    {
+      if ( returnParam == vectorParams.begin()->first )
+      {
+        if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "size_t" )
+        {
+          return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                 generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+        }
+      }
+    }
+  }
+  else if ( isHandleType( commandIt->second.params[returnParam].type.type ) )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    if ( vectorParams.size() == 2 )
+    {
+      if ( returnParam == std::next( vectorParams.begin() )->first )
+      {
+        if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+        {
+          if ( isStructureChainAnchor( commandIt->second.params[vectorParams.begin()->first].type.type ) )
+          {
+            return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                   generateRAIIHandleCommandFactory( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+          }
+        }
+      }
+    }
+  }
+  else
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    if ( vectorParams.empty() )
+    {
+      std::string str = generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition );
+      if ( isStructureChainAnchor( commandIt->second.params[returnParam].type.type ) )
+      {
+        str += generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::chained );
+      }
+      return str;
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessWithErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                              size_t                      initialSkipCount,
+                                                                                              bool                        definition,
+                                                                                              std::vector<size_t> const & returnParams ) const
+{
+  if ( ( commandIt->second.successCodes.size() == 2 ) && ( commandIt->second.successCodes[0] == "VK_SUCCESS" ) &&
+       ( commandIt->second.successCodes[1] == "VK_INCOMPLETE" ) )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    switch ( vectorParams.size() )
+    {
+      case 0:
+        if ( ( commandIt->second.params[returnParams[0]].type.type != "void" ) && !isHandleType( commandIt->second.params[returnParams[0]].type.type ) &&
+             !isStructureChainAnchor( commandIt->second.params[returnParams[0]].type.type ) )
+        {
+          if ( ( commandIt->second.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandIt->second.params[returnParams[1]].type.type ) &&
+               !isStructureChainAnchor( commandIt->second.params[returnParams[1]].type.type ) )
+          {
+            return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+          }
+        }
+        break;
+      case 1:
+        if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+        {
+          if ( returnParams[1] == vectorParams.begin()->first )
+          {
+            if ( ( commandIt->second.params[returnParams[0]].type.type == "uint32_t" ) || ( commandIt->second.params[returnParams[0]].type.type == "size_t" ) )
+            {
+              // needs some very special handling of "vkGetSwapchainImagesKHR" !!
+              if ( isHandleType( commandIt->second.params[returnParams[1]].type.type ) && ( commandIt->first != "vkGetSwapchainImagesKHR" ) )
+              {
+                return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+              }
+              else
+              {
+                std::string str = generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+                if ( isStructureChainAnchor( commandIt->second.params[returnParams[1]].type.type ) )
+                {
+                  str +=
+                    generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition, CommandFlavourFlagBits::chained );
+                }
+                return str;
+              }
+            }
+          }
+        }
+        break;
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultMultiSuccessWithErrors3Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                              size_t                      initialSkipCount,
+                                                                                              bool                        definition,
+                                                                                              std::vector<size_t> const & returnParams ) const
+{
+  if ( commandIt->second.params[returnParams[0]].type.type == "uint32_t" )
+  {
+    if ( ( commandIt->second.params[returnParams[1]].type.type != "void" ) && !isHandleType( commandIt->second.params[returnParams[1]].type.type ) &&
+         !isStructureChainAnchor( commandIt->second.params[returnParams[1]].type.type ) )
+    {
+      if ( ( commandIt->second.params[returnParams[2]].type.type != "void" ) && !isHandleType( commandIt->second.params[returnParams[2]].type.type ) &&
+           !isStructureChainAnchor( commandIt->second.params[returnParams[2]].type.type ) )
+      {
+        std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+        if ( vectorParams.size() == 2 )
+        {
+          if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+          {
+            if ( returnParams[1] == vectorParams.begin()->first )
+            {
+              if ( returnParams[2] == std::next( vectorParams.begin() )->first )
+              {
+                if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+                {
+                  if ( ( commandIt->second.successCodes.size() == 2 ) && ( commandIt->second.successCodes[0] == "VK_SUCCESS" ) &&
+                       ( commandIt->second.successCodes[1] == "VK_INCOMPLETE" ) )
+                  {
+                    return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessNoErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                      size_t                                             initialSkipCount,
+                                                                                      bool                                               definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandIt->second.params );
+  if ( returnParams.size() < 2 )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                        size_t                                             initialSkipCount,
+                                                                                        bool                                               definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandIt->second.params );
+  switch ( returnParams.size() )
+  {
+    case 0:
+      return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, determineVectorParams( commandIt->second.params ), definition );
+      break;
+    case 1: return generateRAIIHandleCommandResultSingleSuccessWithErrors1Return( commandIt, initialSkipCount, definition, returnParams[0] ); break;
+    case 2: return generateRAIIHandleCommandResultSingleSuccessWithErrors2Return( commandIt, initialSkipCount, definition, returnParams ); break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                               size_t initialSkipCount,
+                                                                                               bool   definition,
+                                                                                               size_t returnParam ) const
+{
+  if ( commandIt->second.params[returnParam].type.type == "void" )
+  {
+    return generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnVoid( commandIt, initialSkipCount, definition, returnParam );
+  }
+  else if ( isHandleType( commandIt->second.params[returnParam].type.type ) )
+  {
+    return generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnHandle( commandIt, initialSkipCount, definition, returnParam );
+  }
+  else if ( isStructureChainAnchor( commandIt->second.params[returnParam].type.type ) )
+  {
+    return generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnChain( commandIt, initialSkipCount, definition, returnParam );
+  }
+  else
+  {
+    return generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue( commandIt, initialSkipCount, definition, returnParam );
+  }
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnChain(
+  std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+  if ( vectorParams.empty() )
+  {
+    return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+           generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::chained );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnHandle(
+  std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+  switch ( vectorParams.size() )
+  {
+    case 0: return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, { returnParam }, vectorParams, definition );
+    case 1:
+      if ( returnParam == vectorParams.begin()->first )
+      {
+        if ( m_structures.find( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type ) != m_structures.end() )
+        {
+          return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, { returnParam }, vectorParams, definition );
+        }
+      }
+      break;
+    case 2:
+      if ( returnParam == std::next( vectorParams.begin() )->first )
+      {
+        if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+        {
+          if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+          {
+            if ( ( commandIt->second.params[vectorParams.begin()->first].type.type != "void" ) &&
+                 !isHandleType( commandIt->second.params[vectorParams.begin()->first].type.type ) )
+            {
+              return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                     generateRAIIHandleCommandFactory(
+                       commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+            }
+          }
+        }
+      }
+      break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue(
+  std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition, size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+  switch ( vectorParams.size() )
+  {
+    case 0: return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition );
+    case 2:
+      return generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( commandIt, initialSkipCount, definition, returnParam, vectorParams );
+  }
+  return "";
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                                  size_t                                    initialSkipCount,
+                                                                                                  bool                                      definition,
+                                                                                                  size_t                                    returnParam,
+                                                                                                  std::map<size_t, VectorParamData> const & vectorParams ) const
+{
+  if ( returnParam == std::next( vectorParams.begin() )->first )
+  {
+    if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+    {
+      if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+      {
+        if ( ( commandIt->second.params[vectorParams.begin()->first].type.type != "void" ) &&
+             !isHandleType( commandIt->second.params[vectorParams.begin()->first].type.type ) &&
+             !isStructureChainAnchor( commandIt->second.params[vectorParams.begin()->first].type.type ) )
+        {
+          return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                 generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnVoid( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                                   size_t initialSkipCount,
+                                                                                                   bool   definition,
+                                                                                                   size_t returnParam ) const
+{
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+  switch ( vectorParams.size() )
+  {
+    case 0: return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition );
+    case 1:
+      if ( returnParam == vectorParams.begin()->first )
+      {
+        if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "size_t" )
+        {
+          return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                 generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+        }
+      }
+      break;
+    case 2:
+      if ( returnParam == std::next( vectorParams.begin() )->first )
+      {
+        if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+        {
+          if ( isHandleType( commandIt->second.params[vectorParams.begin()->first].type.type ) )
+          {
+            if ( commandIt->second.params[std::next( vectorParams.begin() )->second.lenParam].type.type == "size_t" )
+            {
+              return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, { returnParam }, vectorParams, definition ) +
+                     generateRAIIHandleCommandEnhanced(
+                       commandIt, initialSkipCount, { returnParam }, vectorParams, definition, CommandFlavourFlagBits::singular );
+            }
+          }
+        }
+      }
+      break;
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandResultSingleSuccessWithErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                               size_t                      initialSkipCount,
+                                                                                               bool                        definition,
+                                                                                               std::vector<size_t> const & returnParams ) const
+{
+  if ( commandIt->second.params[returnParams[0]].type.type == "uint64_t" )
+  {
+    if ( commandIt->second.params[returnParams[1]].type.type == "uint64_t" )
+    {
+      std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+      if ( vectorParams.size() == 2 )
+      {
+        if ( returnParams[0] == std::next( vectorParams.begin() )->first )
+        {
+          assert( returnParams[1] != std::next( vectorParams.begin() )->second.lenParam );
+          if ( returnParams[1] != vectorParams.begin()->first )
+          {
+            assert( returnParams[1] != vectorParams.begin()->second.lenParam );
+            if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+            {
+              if ( commandIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+              {
+                if ( ( commandIt->second.params[vectorParams.begin()->first].type.type != "void" ) &&
+                     !isHandleType( commandIt->second.params[vectorParams.begin()->first].type.type ) &&
+                     !isStructureChainAnchor( commandIt->second.params[vectorParams.begin()->first].type.type ) )
+                {
+                  // two returns and two vectors! But one input vector, one output vector of the same size,
+                  // and one output value
+                  return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition ) +
+                         generateRAIIHandleCommandEnhanced(
+                           commandIt, initialSkipCount, returnParams, vectorParams, definition, CommandFlavourFlagBits::singular );
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandValue( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                size_t                                             initialSkipCount,
+                                                                bool                                               definition ) const
+{
+  std::vector<size_t> returnParams = determineReturnParams( commandIt->second.params );
+  if ( returnParams.empty() )
+  {
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+    if ( vectorParams.size() <= 1 )
+    {
+      return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+    }
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleCommandVoid( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                               size_t                                             initialSkipCount,
+                                                               bool                                               definition ) const
+{
+  std::vector<size_t>               returnParams = determineReturnParams( commandIt->second.params );
+  std::map<size_t, VectorParamData> vectorParams = determineVectorParams( commandIt->second.params );
+  switch ( returnParams.size() )
+  {
+    case 0: return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+    case 1:
+      if ( commandIt->second.params[returnParams[0]].type.type == "void" )
+      {
+        if ( commandIt->second.params[returnParams[0]].type.postfix == "**" )
+        {
+          // get a pointer to something
+          if ( vectorParams.empty() )
+          {
+            return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+          }
+        }
+        else
+        {
+          switch ( vectorParams.size() )
+          {
+            case 0: return generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+            case 1:
+              {
+                auto returnVectorParamIt = vectorParams.find( returnParams[0] );
+                if ( returnVectorParamIt != vectorParams.end() )
+                {
+                  return generateRAIIHandleCommandEnhanced(
+                    commandIt, initialSkipCount, returnParams, vectorParams, definition, CommandFlavourFlagBits::singular );
+                }
+              }
+              break;
+          }
+        }
+      }
+      else if ( isHandleType( commandIt->second.params[returnParams[0]].type.type ) )
+      {
+        if ( vectorParams.empty() )
+        {
+          return generateRAIIHandleCommandFactory( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+        }
+      }
+      else
+      {
+        auto returnVectorParamIt = vectorParams.find( returnParams[0] );
+        if ( returnVectorParamIt == vectorParams.end() )
+        {
+          std::string str = generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+          if ( isStructureChainAnchor( commandIt->second.params[returnParams[0]].type.type ) )
+          {
+            str += generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition, CommandFlavourFlagBits::chained );
+          }
+          return str;
+        }
+      }
+      break;
+    case 2:
+      if ( commandIt->second.params[returnParams[0]].type.type == "uint32_t" )
+      {
+        if ( vectorParams.size() == 1 )
+        {
+          if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+          {
+            if ( returnParams[1] == vectorParams.begin()->first )
+            {
+              std::string str = generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition );
+              if ( isStructureChainAnchor( commandIt->second.params[returnParams[1]].type.type ) )
+              {
+                str +=
+                  generateRAIIHandleCommandEnhanced( commandIt, initialSkipCount, returnParams, vectorParams, definition, CommandFlavourFlagBits::chained );
+              }
+              return str;
+            }
+          }
+        }
+      }
+      break;
+  }
+  return "";
+}
+
+std::pair<std::string, std::string>
+  VulkanHppGenerator::generateRAIIHandleStaticCreate( std::pair<std::string, HandleData> const &                             handle,
+                                                      std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                      std::string const &                                                    enter,
+                                                      std::string const &                                                    leave ) const
+{
+  std::string singularConstructor, arrayConstructor;
+
+  if ( constructorIt->second.returnType == "VkResult" )
+  {
+    std::tie( singularConstructor, arrayConstructor ) = generateRAIIHandleStaticCreateResult( handle, constructorIt, enter, leave );
+  }
+  else if ( constructorIt->second.returnType == "void" )
+  {
+    // If the return type of the construct method does not return a VkResult, and thus can not fail,
+    // and thus does not throw an exception, a static create method is not needed.
+  }
+  else
+  {
+    if ( singularConstructor.empty() && arrayConstructor.empty() )
+    {
+      throw std::runtime_error( "Never encountered a function like <" + constructorIt->first + "> !" );
+    }
+  }
+  return std::make_pair( singularConstructor, arrayConstructor );
+}
+
+
+std::pair<std::string, std::string>
+  VulkanHppGenerator::generateRAIIHandleConstructor( std::pair<std::string, HandleData> const &                             handle,
+                                                     std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                     std::string const &                                                    enter,
+                                                     std::string const &                                                    leave ) const
+{
+  std::string singularConstructor, arrayConstructor;
+  if ( constructorIt->second.returnType == "VkResult" )
+  {
+    std::tie( singularConstructor, arrayConstructor ) = generateRAIIHandleConstructorResult( handle, constructorIt, enter, leave );
+  }
+  else if ( constructorIt->second.returnType == "void" )
+  {
+    std::tie( singularConstructor, arrayConstructor ) = generateRAIIHandleConstructorVoid( handle, constructorIt, enter, leave );
+  }
+  if ( singularConstructor.empty() && arrayConstructor.empty() )
+  {
+    throw std::runtime_error( "Never encountered a function like <" + constructorIt->first + "> !" );
+  }
+  return std::make_pair( singularConstructor, arrayConstructor );
+}
+
+std::pair<std::string, std::string>
+  VulkanHppGenerator::generateRAIIHandleStaticCreate1Return2Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                   std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                   std::string const &                                enter,
+                                                                   std::string const &                                leave,
+                                                                   size_t                                             returnParam,
+                                                                   std::map<size_t, VectorParamData> const &          vectorParams ) const
+{
+  if ( returnParam == std::next( vectorParams.begin() )->first )
+  {
+    if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+    {
+      if ( constructorIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+      {
+        if ( ( constructorIt->second.params[vectorParams.begin()->first].type.type != "void" ) &&
+             !isHandleType( constructorIt->second.params[vectorParams.begin()->first].type.type ) )
+        {
+          std::string singularConstructor;
+          auto        lenParamIt    = constructorIt->second.params.begin() + vectorParams.begin()->second.lenParam;
+          auto        handleParamIt = constructorIt->second.params.begin() + std::next( vectorParams.begin() )->first;
+          if ( !checkEquivalentSingularConstructor( handle.second.constructorIts, constructorIt, lenParamIt ) )
+          {
+            singularConstructor = generateRAIIHandleStaticCreateVectorSingular( handle, constructorIt, handleParamIt, enter, leave );
+          }
+          return std::make_pair( singularConstructor, generateRAIIHandleStaticCreateVector( handle, constructorIt, handleParamIt, enter, leave ) );
+        }
+      }
+    }
+  }
+  return std::make_pair( "", "" );
+}
+
+std::pair<std::string, std::string>
+  VulkanHppGenerator::generateRAIIHandleConstructor1Return2Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                   std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                   std::string const &                                enter,
+                                                                   std::string const &                                leave,
+                                                                   size_t                                             returnParam,
+                                                                   std::map<size_t, VectorParamData> const &          vectorParams ) const
+{
+  if ( returnParam == std::next( vectorParams.begin() )->first )
+  {
+    if ( vectorParams.begin()->second.lenParam == std::next( vectorParams.begin() )->second.lenParam )
+    {
+      if ( constructorIt->second.params[vectorParams.begin()->second.lenParam].type.type == "uint32_t" )
+      {
+        if ( ( constructorIt->second.params[vectorParams.begin()->first].type.type != "void" ) &&
+             !isHandleType( constructorIt->second.params[vectorParams.begin()->first].type.type ) )
+        {
+          std::string singularConstructor;
+          auto        lenParamIt    = constructorIt->second.params.begin() + vectorParams.begin()->second.lenParam;
+          auto        handleParamIt = constructorIt->second.params.begin() + std::next( vectorParams.begin() )->first;
+          if ( !checkEquivalentSingularConstructor( handle.second.constructorIts, constructorIt, lenParamIt ) )
+          {
+            singularConstructor = generateRAIIHandleConstructorVectorSingular( handle, constructorIt, handleParamIt, enter, leave );
+          }
+          return std::make_pair( singularConstructor, generateRAIIHandleConstructorVector( handle, constructorIt, handleParamIt, enter, leave ) );
+        }
+      }
+    }
+  }
+  return std::make_pair( "", "" );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateRAIIHandleStaticCreates( std::pair<std::string, HandleData> const & handle ) const
+{
+  auto [enter, leave] = generateProtection( handle.second.alias.empty() ? getProtectFromType( handle.first ) : "" );
+
+  std::string singularConstructors, arrayConstructors;
+  for ( auto constructorIt : handle.second.constructorIts )
+  {
+    // there is a non-const parameter with handle type : the to-be-constructed handle
+
+    // check for additional enter/leave guards for the constructors
+    auto [constructorEnter, constructorLeave] = generateProtection( getProtectFromTitle( constructorIt->second.referencedIn ) );
+    if ( constructorEnter == enter )
+    {
+      constructorEnter.clear();
+      constructorLeave.clear();
+    }
+
+    auto [singularConstructor, arrayConstructor] = generateRAIIHandleStaticCreate( handle, constructorIt, constructorEnter, constructorLeave );
+    arrayConstructors += arrayConstructor;
+    singularConstructors += singularConstructor;
+  }
+  return std::make_pair( singularConstructors, arrayConstructors );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateRAIIHandleConstructors( std::pair<std::string, HandleData> const & handle ) const
+{
+  auto [enter, leave] = generateProtection( handle.second.alias.empty() ? getProtectFromType( handle.first ) : "" );
+
+  std::string singularConstructors, arrayConstructors;
+  for ( auto constructorIt : handle.second.constructorIts )
+  {
+    // there is a non-const parameter with handle type : the to-be-constructed handle
+
+    // check for additional enter/leave guards for the constructors
+    auto [constructorEnter, constructorLeave] = generateProtection( getProtectFromTitle( constructorIt->second.referencedIn ) );
+    if ( constructorEnter == enter )
+    {
+      constructorEnter.clear();
+      constructorLeave.clear();
+    }
+
+    auto [singularConstructor, arrayConstructor] = generateRAIIHandleConstructor( handle, constructorIt, constructorEnter, constructorLeave );
+    arrayConstructors += arrayConstructor;
+    singularConstructors += singularConstructor;
+  }
+  singularConstructors += generateRAIIHandleConstructorTakeOwnership( handle );
+  return std::make_pair( singularConstructors, arrayConstructors );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorArgument( ParamData const & param, bool definition, bool singular, bool takesOwnership ) const
+{
+  std::string argument;
+  if ( param.type.isConstPointer() )
+  {
+    assert( param.type.type.starts_with( "Vk" ) );
+    assert( param.name.starts_with( "p" ) );
+    std::string argumentName = startLowerCase( stripPrefix( param.name, "p" ) );
+    std::string argumentType = generateNamespacedType( param.type.type );
+    if ( param.optional )
+    {
+      assert( param.len.empty() );
+      argument = "VULKAN_HPP_NAMESPACE::Optional<const " + argumentType + "> " + argumentName + ( definition ? "" : " = nullptr" );
+    }
+    else if ( param.len.empty() )
+    {
+      argument = argumentType + " const & " + argumentName;
+    }
+    else if ( singular )
+    {
+      argument = argumentType + " const & " + stripPluralS( argumentName, m_tags );
+    }
+    else
+    {
+      argument = "VULKAN_HPP_NAMESPACE::ArrayProxy<" + argumentType + "> const & " + argumentName;
+    }
+  }
+  else if ( specialPointerTypes.find( param.type.type ) != specialPointerTypes.end() )
+  {
+    assert( !param.optional );
+    assert( param.type.isNonConstPointer() );
+    argument = param.type.type + " & " + param.name;
+  }
+  else if ( ( param.type.isValue() ) && isHandleType( param.type.type ) )
+  {
+    if ( takesOwnership )
+    {
+      assert( !param.optional );
+      argument = param.type.type + " " + param.name;
+    }
+    else
+    {
+      argument = "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::" + stripPrefix( param.type.type, "Vk" );
+      if ( param.optional )
+      {
+        argument = "VULKAN_HPP_NAMESPACE::Optional<const " + argument + ">";
+      }
+      argument += " const & " + param.name;
+    }
+  }
+  else
+  {
+    assert( !param.optional );
+    if ( param.arraySizes.empty() )
+    {
+      argument = param.type.compose( "VULKAN_HPP_NAMESPACE" ) + " ";
+    }
+    else
+    {
+      argument = generateStandardArray( param.type.compose( "VULKAN_HPP_NAMESPACE" ), param.arraySizes ) + " const & ";
+    }
+    argument += param.name;
+  }
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorArguments( std::pair<std::string, HandleData> const &                             handle,
+                                                                        std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                        bool                                                                   singular,
+                                                                        bool takesOwnership ) const
+{
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string arguments = "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::" + parentType + " const & " + parentName;
+  if ( takesOwnership )
+  {
+    arguments += ", " + handle.first + " " + generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+  }
+
+  if ( constructorIt != m_commands.end() )
+  {
+    parentType = "Vk" + parentType;
+    bool skip  = skipLeadingGrandParent( handle );
+    for ( size_t i = skip ? 1 : 0; i < constructorIt->second.params.size(); i++ )
+    {
+      ParamData const & param = constructorIt->second.params[i];
+      // filter parent and handle type
+      if ( ( param.type.type != parentType ) && ( param.type.type != handle.first ) )
+      {
+        // the specialPointerTypes are considered const-pointers!
+        if ( param.type.isNonConstPointer() && ( specialPointerTypes.find( param.type.type ) == specialPointerTypes.end() ) )
+        {
+          // this is supposed to be the returned size on an enumeration function!
+#if !defined( NDEBUG )
+          assert( param.type.type == "uint32_t" );
+          auto typeIt = std::find_if( constructorIt->second.params.begin(),
+                                      constructorIt->second.params.end(),
+                                      [&handle]( ParamData const & pd ) { return pd.type.type == handle.first; } );
+          assert( typeIt != constructorIt->second.params.end() );
+          assert( typeIt->len == param.name );
+#endif
+          continue;
+        }
+        else if ( std::find_if( constructorIt->second.params.begin(),
+                                constructorIt->second.params.end(),
+                                [&param]( ParamData const & pd ) { return pd.len == param.name; } ) != constructorIt->second.params.end() )
+        {
+          // this is the len of an other parameter, which will be mapped to an ArrayProxy
+          assert( param.type.isValue() && ( param.type.type == "uint32_t" ) );
+          assert( param.arraySizes.empty() && param.len.empty() && !param.optional );
+          continue;
+        }
+        arguments += ", " + generateRAIIHandleConstructorArgument( param, false, singular, takesOwnership );
+      }
+    }
+  }
+  return arguments;
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleConstructorCallArguments( std::pair<std::string, HandleData> const &                             handle,
+                                                                  std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                  bool                     nonConstPointerAsNullptr,
+                                                                  std::set<size_t> const & singularParams,
+                                                                  bool                     allocatorIsMemberVariable,
+                                                                  bool                     handleParamsAreMembers ) const
+{
+  std::string arguments;
+  bool        encounteredArgument = false;
+  size_t      i                   = 0;
+  if ( skipLeadingGrandParent( handle ) )
+  {
+    assert( ( 1 < constructorIt->second.params.size() ) && ( m_handles.find( constructorIt->second.params[0].type.type ) != m_handles.end() ) &&
+            ( m_handles.find( constructorIt->second.params[1].type.type ) != m_handles.end() ) );
+    arguments += "static_cast<" + constructorIt->second.params[0].type.type + ">( " + constructorIt->second.params[1].name + ".get" +
+                 stripPrefix( constructorIt->second.params[0].type.type, "Vk" ) + "() )";
+    encounteredArgument = true;
+    i                   = 1;
+  }
+  for ( ; i < constructorIt->second.params.size(); ++i )
+  {
+    ParamData const & param = constructorIt->second.params[i];
+    if ( encounteredArgument )
+    {
+      arguments += ", ";
+    }
+    if ( param.type.type == handle.first )
+    {
+      assert( param.type.isNonConstPointer() && param.arraySizes.empty() );
+      if ( param.len.empty() || !singularParams.empty() )
+      {
+        assert( !param.optional );
+        assert( singularParams.empty() || ( param.len == constructorIt->second.params[*singularParams.begin()].name ) );
+        std::string paramName = generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+        if (handleParamsAreMembers) {
+          paramName = "m_" + paramName;
+        }
+        arguments += "reinterpret_cast<" + handle.first + "*>( &" + paramName + " )";
+      }
+      else if ( nonConstPointerAsNullptr )
+      {
+        arguments += "nullptr";
+      }
+      else
+      {
+        arguments += startLowerCase( stripPrefix( param.name, "p" ) ) + ".data()";
+      }
+    }
+    else if ( param.type.type == "VkAllocationCallbacks" )
+    {
+      assert( param.optional );
+      if ( allocatorIsMemberVariable )
+      {
+        arguments += "reinterpret_cast<const VkAllocationCallbacks *>( m_allocator )";
+      }
+      else
+      {
+        arguments += "reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )";
+      }
+    }
+    else if ( m_handles.find( param.type.type ) != m_handles.end() )
+    {
+      assert( param.type.isValue() && param.arraySizes.empty() && param.len.empty() );
+      if ( param.optional )
+      {
+        arguments += param.name + " ? static_cast<" + param.type.type + ">( **" + param.name + " ) : 0";
+      }
+      else
+      {
+        arguments += "static_cast<" + param.type.type + ">( *" + param.name + " )";
+      }
+    }
+    else
+    {
+      assert( !param.optional );
+      arguments += generateCallArgumentEnhanced( constructorIt->second.params, i, nonConstPointerAsNullptr, singularParams, {} );
+    }
+    encounteredArgument = true;
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleStaticCreateEnumerate( std::pair<std::string, HandleData> const &                             handle,
+                                                                         std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                         std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                         std::vector<ParamData>::const_iterator                                 lenParamIt,
+                                                                         std::string const &                                                    enter,
+                                                                         std::string const &                                                    leave ) const
+{
+  std::string handleConstructorArguments = generateRAIIHandleSingularConstructorArguments( handle, constructorIt );
+  std::string handleType                 = stripPrefix( handle.first, "Vk" );
+  std::string dispatcherType             = hasParentHandle( handle.first, "VkDevice" ) ? "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher"
+                                                                                       : "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher";
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+    static android::base::expected<${handleType}s, VULKAN_HPP_NAMESPACE::Result> create( ${constructorArguments} ) ${debugHelper}
+    {
+      ${dispatcherType} const * dispatcher = ${parentName}.getDispatcher();
+      std::vector<${vectorElementType}> ${vectorName};
+      ${counterType} ${counterName};
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${firstCallArguments} ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ${counterName} )
+        {
+          ${vectorName}.resize( ${counterName} );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${secondCallArguments} ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+      ${handleType}s ret(nullptr);
+      ret.reserve( ${counterName} );
+      for ( auto const & ${handleName} : ${vectorName} )
+      {
+        ret.emplace_back( ${parentName}, ${handleConstructorArguments} );
+      }
+      return std::move(ret);
+    }
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "constructorArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, false, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "counterName", startLowerCase( stripPrefix( lenParamIt->name, "p" ) ) },
+                           { "counterType", lenParamIt->type.type },
+                           { "debugHelper", debugHelper },
+                           { "dispatcherType", dispatcherType },
+                           { "enter", enter },
+                           { "firstCallArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, true, {}, true, true ) },
+                           { "handleConstructorArguments", handleConstructorArguments },
+                           { "handleName", startLowerCase( handleType ) },
+                           { "handleType", handleType },
+                           { "leave", leave },
+                           { "parentName", constructorIt->second.params.front().name },
+                           { "secondCallArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, true, true ) },
+                           { "vectorElementType", handleParamIt->type.type },
+                           { "vectorName", startLowerCase( stripPrefix( handleParamIt->name, "p" ) ) } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorEnumerate( std::pair<std::string, HandleData> const &                             handle,
+                                                                        std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                        std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                        std::vector<ParamData>::const_iterator                                 lenParamIt,
+                                                                        std::string const &                                                    enter,
+                                                                        std::string const &                                                    leave ) const
+{
+  std::string handleConstructorArguments = generateRAIIHandleSingularConstructorArguments( handle, constructorIt );
+  std::string handleType                 = stripPrefix( handle.first, "Vk" );
+  std::string dispatcherType             = hasParentHandle( handle.first, "VkDevice" ) ? "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher"
+                                                                                       : "VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher";
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ${handleType}s( ${constructorArguments} )
+    {
+      ${dispatcherType} const * dispatcher = ${parentName}.getDispatcher();
+      std::vector<${vectorElementType}> ${vectorName};
+      ${counterType} ${counterName};
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${firstCallArguments} ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ${counterName} )
+        {
+          ${vectorName}.resize( ${counterName} );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${secondCallArguments} ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_ASSERT( ${counterName} <= ${vectorName}.size() );
+        this->reserve( ${counterName} );
+        for ( auto const & ${handleName} : ${vectorName} )
+        {
+          this->emplace_back( ${parentName}, ${handleConstructorArguments} );
+        }
+      }
+      else
+      {
+        throwResultException( result, "${constructorCall}" );
+      }
+    }
+#endif
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "constructorArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, false, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "counterName", startLowerCase( stripPrefix( lenParamIt->name, "p" ) ) },
+                           { "counterType", lenParamIt->type.type },
+                           { "dispatcherType", dispatcherType },
+                           { "enter", enter },
+                           { "firstCallArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, true, {}, true, true ) },
+                           { "handleConstructorArguments", handleConstructorArguments },
+                           { "handleName", startLowerCase( handleType ) },
+                           { "handleType", handleType },
+                           { "leave", leave },
+                           { "parentName", constructorIt->second.params.front().name },
+                           { "secondCallArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, true, true ) },
+                           { "vectorElementType", handleParamIt->type.type },
+                           { "vectorName", startLowerCase( stripPrefix( handleParamIt->name, "p" ) ) } } );
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleConstructorInitializationList( std::pair<std::string, HandleData> const &                             handle,
+                                                                       std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                       std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator destructorIt,
+                                                                       bool takesOwnership ) const
+{
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+  std::string handleName        = generateRAIIHandleConstructorParamName( handle.first, destructorIt );
+
+  std::string initializationList;
+  if ( destructorIt != m_commands.end() )
+  {
+    for ( auto destructorParam : destructorIt->second.params )
+    {
+      if ( destructorParam.type.type == "Vk" + parentType )
+      {
+        initializationList += "m_" + parentName + "( *" + parentName + " ), ";
+      }
+      else if ( destructorParam.type.type == handle.first )
+      {
+        if ( takesOwnership )
+        {
+          initializationList += "m_" + handleName + "( " + handleName + " ), ";
+        }
+      }
+      else if ( destructorParam.type.type == "VkAllocationCallbacks" )
+      {
+        assert( destructorParam.type.isConstPointer() && destructorParam.arraySizes.empty() && destructorParam.len.empty() && destructorParam.optional );
+        initializationList += "m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), ";
+      }
+      else if ( isHandleType( destructorParam.type.type ) )
+      {
+        assert( destructorParam.type.isValue() && destructorParam.arraySizes.empty() && destructorParam.len.empty() && !destructorParam.optional );
+        initializationList += "m_" + destructorParam.name + "( ";
+        auto constructorParamIt = std::find_if( constructorIt->second.params.begin(),
+                                                constructorIt->second.params.end(),
+                                                [&destructorParam]( ParamData const & pd ) { return pd.type.type == destructorParam.type.type; } );
+        if ( constructorParamIt != constructorIt->second.params.end() )
+        {
+          assert( constructorParamIt->type.isValue() && constructorParamIt->arraySizes.empty() && constructorParamIt->len.empty() &&
+                  !constructorParamIt->optional );
+          if ( constructorParamIt->type.type == "Vk" + parentType )
+          {
+            initializationList += "*";
+          }
+          initializationList += constructorParamIt->name;
+        }
+        else
+        {
+#if !defined( NDEBUG )
+          bool found = false;
+#endif
+          for ( auto constructorParam : constructorIt->second.params )
+          {
+            auto structureIt = m_structures.find( constructorParam.type.type );
+            if ( structureIt != m_structures.end() )
+            {
+              auto structureMemberIt = findStructMemberItByType( destructorParam.type.type, structureIt->second.members );
+              if ( structureMemberIt != structureIt->second.members.end() )
+              {
+                assert( constructorParam.type.isConstPointer() && constructorParam.arraySizes.empty() && constructorParam.len.empty() &&
+                        !constructorParam.optional );
+                initializationList += startLowerCase( stripPrefix( constructorParam.name, "p" ) ) + "." + structureMemberIt->name;
+#if !defined( NDEBUG )
+                found = true;
+#endif
+                break;
+              }
+            }
+          }
+          assert( found );
+        }
+        initializationList += " ), ";
+      }
+      else
+      {
+        // we can ignore all other parameters here !
+      }
+    }
+  }
+  else
+  {
+    if ( !handle.second.secondLevelCommands.empty() )
+    {
+      assert( !handle.second.constructorIts.empty() );
+#if !defined( NDEBUG )
+      auto constructorCommandIt = m_commands.find( handle.second.constructorIts.front()->first );
+#endif
+      assert( ( constructorCommandIt != m_commands.end() ) && ( 1 < constructorCommandIt->second.params.size() ) );
+      assert( std::next( constructorCommandIt->second.params.begin() )->type.type == "Vk" + parentType );
+
+      auto commandIt = m_commands.find( *handle.second.secondLevelCommands.begin() );
+      assert( ( commandIt != m_commands.end() ) && ( 1 < commandIt->second.params.size() ) );
+      assert( commandIt->second.params.front().type.type == constructorCommandIt->second.params.front().type.type );
+      assert( std::next( commandIt->second.params.begin() )->type.type == handle.first );
+
+      std::string grandParentType = stripPrefix( commandIt->second.params.front().type.type, "Vk" );
+      initializationList += "m_" + startLowerCase( grandParentType ) + "( " + parentName + ".get" + grandParentType + "() ), ";
+    }
+    if ( takesOwnership )
+    {
+      initializationList += "m_" + handleName + "( " + handleName + " ), ";
+    }
+  }
+  return initializationList.empty() ? initializationList : initializationList.substr( 0, initializationList.size() - 2 );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorParamName( std::string const &                                type,
+                                                                        std::map<std::string, CommandData>::const_iterator destructorIt ) const
+{
+  if ( destructorIt != m_commands.end() )
+  {
+    auto destructorParamIt = std::find_if( destructorIt->second.params.begin(),
+                                           destructorIt->second.params.end(),
+                                           [&type]( ParamData const & destructorParam ) { return destructorParam.type.type == type; } );
+    if ( destructorParamIt != destructorIt->second.params.end() )
+    {
+      assert( std::find_if( std::next( destructorParamIt ),
+                            destructorIt->second.params.end(),
+                            [&type]( ParamData const & destructorParam ) { return destructorParam.type.type == type; } ) == destructorIt->second.params.end() );
+      if ( !destructorParamIt->type.isValue() )
+      {
+        return startLowerCase( stripPrefix( stripPluralS( destructorParamIt->name, m_tags ), "p" ) );
+      }
+      else
+      {
+        return destructorParamIt->name;
+      }
+    }
+  }
+  return startLowerCase( stripPrefix( type, "Vk" ) );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateRAIIHandleStaticCreateResult( std::pair<std::string, HandleData> const &         handle,
+                                                                                              std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                              std::string const &                                enter,
+                                                                                              std::string const &                                leave ) const
+{
+  assert( !constructorIt->second.successCodes.empty() );
+  assert( constructorIt->second.successCodes[0] == "VK_SUCCESS" );
+  switch ( constructorIt->second.successCodes.size() )
+  {
+    case 1:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        if ( returnParams.size() == 1 )
+        {
+          assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+          std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+          switch ( vectorParams.size() )
+          {
+            case 0:
+              return std::make_pair( generateRAIIHandleStaticCreateResultSingleSuccessWithErrors1Return0Vector( handle, constructorIt, enter, leave ), "" );
+            case 1:
+              if ( returnParams[0] == vectorParams.begin()->first )
+              {
+                if ( isLenByStructMember( constructorIt->second.params[vectorParams.begin()->first].len,
+                                          constructorIt->second.params[vectorParams.begin()->second.lenParam] ) )
+                {
+                  auto handleParamIt = constructorIt->second.params.begin() + returnParams[0];
+                  return std::make_pair( "", generateRAIIHandleStaticCreateVector( handle, constructorIt, handleParamIt, enter, leave ) );
+                }
+              }
+              break;
+            case 2: return generateRAIIHandleStaticCreate1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+          }
+        }
+      }
+      break;
+    case 2:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        switch ( returnParams.size() )
+        {
+          case 1:
+            assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+            {
+              std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+              if ( vectorParams.size() == 2 )
+              {
+                return generateRAIIHandleStaticCreate1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+              }
+            }
+            break;
+          case 2:
+            if ( constructorIt->second.params[returnParams[0]].type.type == "uint32_t" )
+            {
+              assert( isHandleType( constructorIt->second.params[returnParams[1]].type.type ) );
+              std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+              if ( vectorParams.size() == 1 )
+              {
+                if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+                {
+                  assert( returnParams[1] == vectorParams.begin()->first );
+                  assert( constructorIt->second.successCodes[1] == "VK_INCOMPLETE" );
+                  auto lenParamIt    = constructorIt->second.params.begin() + returnParams[0];
+                  auto handleParamIt = constructorIt->second.params.begin() + returnParams[1];
+                  return std::make_pair( "", generateRAIIHandleStaticCreateEnumerate( handle, constructorIt, handleParamIt, lenParamIt, enter, leave ) );
+                }
+              }
+            }
+            break;
+        }
+      }
+      break;
+    case 4:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        if ( returnParams.size() == 1 )
+        {
+          assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+          std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+          if ( vectorParams.size() == 2 )
+          {
+            return generateRAIIHandleStaticCreate1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+          }
+        }
+      }
+      break;
+  }
+  return std::make_pair( "", "" );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateRAIIHandleConstructorResult( std::pair<std::string, HandleData> const &         handle,
+                                                                                             std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                             std::string const &                                enter,
+                                                                                             std::string const &                                leave ) const
+{
+  assert( !constructorIt->second.successCodes.empty() );
+  assert( constructorIt->second.successCodes[0] == "VK_SUCCESS" );
+  switch ( constructorIt->second.successCodes.size() )
+  {
+    case 1:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        if ( returnParams.size() == 1 )
+        {
+          assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+          std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+          switch ( vectorParams.size() )
+          {
+            case 0:
+              return std::make_pair( generateRAIIHandleConstructorResultSingleSuccessWithErrors1Return0Vector( handle, constructorIt, enter, leave ), "" );
+            case 1:
+              if ( returnParams[0] == vectorParams.begin()->first )
+              {
+                if ( isLenByStructMember( constructorIt->second.params[vectorParams.begin()->first].len,
+                                          constructorIt->second.params[vectorParams.begin()->second.lenParam] ) )
+                {
+                  auto handleParamIt = constructorIt->second.params.begin() + returnParams[0];
+                  return std::make_pair( "", generateRAIIHandleConstructorVector( handle, constructorIt, handleParamIt, enter, leave ) );
+                }
+              }
+              break;
+            case 2: return generateRAIIHandleConstructor1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+          }
+        }
+      }
+      break;
+    case 2:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        switch ( returnParams.size() )
+        {
+          case 1:
+            assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+            {
+              std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+              if ( vectorParams.size() == 2 )
+              {
+                return generateRAIIHandleConstructor1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+              }
+            }
+            break;
+          case 2:
+            if ( constructorIt->second.params[returnParams[0]].type.type == "uint32_t" )
+            {
+              assert( isHandleType( constructorIt->second.params[returnParams[1]].type.type ) );
+              std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+              if ( vectorParams.size() == 1 )
+              {
+                if ( returnParams[0] == vectorParams.begin()->second.lenParam )
+                {
+                  assert( returnParams[1] == vectorParams.begin()->first );
+                  assert( constructorIt->second.successCodes[1] == "VK_INCOMPLETE" );
+                  auto lenParamIt    = constructorIt->second.params.begin() + returnParams[0];
+                  auto handleParamIt = constructorIt->second.params.begin() + returnParams[1];
+                  return std::make_pair( "", generateRAIIHandleConstructorEnumerate( handle, constructorIt, handleParamIt, lenParamIt, enter, leave ) );
+                }
+              }
+            }
+            break;
+        }
+      }
+      break;
+    case 4:
+      if ( !constructorIt->second.errorCodes.empty() )
+      {
+        std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+        if ( returnParams.size() == 1 )
+        {
+          assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+          std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+          if ( vectorParams.size() == 2 )
+          {
+            return generateRAIIHandleConstructor1Return2Vector( handle, constructorIt, enter, leave, returnParams[0], vectorParams );
+          }
+        }
+      }
+      break;
+  }
+  return std::make_pair( "", "" );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleStaticCreateToConstructorArgument( ParamData const & param, bool singular ) const
+{
+  std::string argument;
+  if ( param.type.isConstPointer() )
+  {
+    assert( param.type.type.starts_with( "Vk" ) );
+    assert( param.name.starts_with( "p" ) );
+    std::string argumentName = startLowerCase( stripPrefix( param.name, "p" ) );
+    std::string argumentType = generateNamespacedType( param.type.type );
+    if ( param.optional )
+    {
+      assert( param.len.empty() );
+      argument = argumentName;
+    }
+    else if ( param.len.empty() )
+    {
+      argument = argumentName;
+    }
+    else if ( singular )
+    {
+      argument = stripPluralS( argumentName, m_tags );
+    }
+    else
+    {
+      argument = argumentName;
+    }
+  }
+  else if ( specialPointerTypes.find( param.type.type ) != specialPointerTypes.end() )
+  {
+    assert( !param.optional );
+    assert( param.type.isNonConstPointer() );
+    argument = param.name;
+  }
+  else if ( ( param.type.isValue() ) && isHandleType( param.type.type ) )
+  {
+    argument += param.name;
+  }
+  else
+  {
+    assert( !param.optional );
+    argument += param.name;
+  }
+  return argument;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleStaticCreateToConstructorArguments(
+  std::pair<std::string, HandleData> const & handle,
+  std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt ) const
+{
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string arguments = parentName;
+
+  arguments += ", " + generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+
+  if ( constructorIt != m_commands.end() )
+  {
+    parentType = "Vk" + parentType;
+    bool skip  = skipLeadingGrandParent( handle );
+    for ( size_t i = skip ? 1 : 0; i < constructorIt->second.params.size(); i++ )
+    {
+      ParamData const & param = constructorIt->second.params[i];
+      // filter parent and handle type
+      if ( ( param.type.type != parentType ) && ( param.type.type != handle.first ) )
+      {
+        // the specialPointerTypes are considered const-pointers!
+        if ( param.type.isNonConstPointer() && ( specialPointerTypes.find( param.type.type ) == specialPointerTypes.end() ) )
+        {
+          // this is supposed to be the returned size on an enumeration function!
+#if !defined( NDEBUG )
+          assert( param.type.type == "uint32_t" );
+          auto typeIt = std::find_if( constructorIt->second.params.begin(),
+                                      constructorIt->second.params.end(),
+                                      [&handle]( ParamData const & pd ) { return pd.type.type == handle.first; } );
+          assert( typeIt != constructorIt->second.params.end() );
+          assert( typeIt->len == param.name );
+#endif
+          continue;
+        }
+        else if ( std::find_if( constructorIt->second.params.begin(),
+                                constructorIt->second.params.end(),
+                                [&param]( ParamData const & pd ) { return pd.len == param.name; } ) != constructorIt->second.params.end() )
+        {
+          // this is the len of an other parameter, which will be mapped to an ArrayProxy
+          assert( param.type.isValue() && ( param.type.type == "uint32_t" ) );
+          assert( param.arraySizes.empty() && param.len.empty() && !param.optional );
+          continue;
+        }
+        arguments += ", " + generateRAIIHandleStaticCreateToConstructorArgument(param, false);
+      }
+    }
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleStaticCreateResultSingleSuccessWithErrors1Return0Vector(
+  std::pair<std::string, HandleData> const &                             handle,
+  std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+  std::string const &                                                    enter,
+  std::string const &                                                    leave ) const
+{
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string getDispatcher = parentName + ".getDispatcher()";
+
+  std::string staticCreateArguments = generateRAIIHandleConstructorArguments( handle, constructorIt, false, false );
+
+  std::string callArguments = generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, false, false );
+
+  std::string localParamType = handle.first;
+  std::string localParamName = generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+
+  std::string callConstructorArguments =  generateRAIIHandleStaticCreateToConstructorArguments( handle, handle.second.destructorIt );
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+    static android::base::expected<${handleType}, VULKAN_HPP_NAMESPACE::Result> create( ${staticCreateArguments} ) ${debugHelper}
+    {
+      ${localParamType} ${localParamName};
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( ${getDispatcher}->${constructorCall}( ${callArguments} ) );
+      if ( ${failureCheck} )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::${handleType}(${callConstructorArguments});
+    }
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "callArguments", callArguments },
+                           { "staticCreateArguments", staticCreateArguments },
+                           { "callConstructorArguments", callConstructorArguments },
+                           { "constructorCall", constructorIt->first },
+                           { "debugHelper", debugHelper },
+                           { "enter", enter },
+                           { "failureCheck", generateFailureCheck( constructorIt->second.successCodes ) },
+                           { "getDispatcher", getDispatcher },
+                           { "leave", leave },
+                           { "localParamName", localParamName },
+                           { "localParamType", localParamType },
+                           { "handleType", stripPrefix( handle.first, "Vk" ) } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorResultSingleSuccessWithErrors1Return0Vector(
+  std::pair<std::string, HandleData> const &                             handle,
+  std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+  std::string const &                                                    enter,
+  std::string const &                                                    leave ) const
+{
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string getDispatcher = parentName + ".getDispatcher()";
+  std::string dispatcherInitializer, dispatcherInit;
+  if ( ( handle.first != "VkInstance" ) && ( handle.first != "VkDevice" ) )
+  {
+    dispatcherInitializer = "m_dispatcher( " + getDispatcher + " )";
+  }
+  else
+  {
+    std::string handleType = stripPrefix( handle.first, "Vk" );
+    dispatcherInit         = "\n        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::" + handleType + "Dispatcher( " + parentName +
+                     ".getDispatcher()->vkGet" + handleType + "ProcAddr, static_cast<" + handle.first + ">( m_" + startLowerCase( handleType ) + " ) ) );";
+  }
+
+  std::string constructorArguments = generateRAIIHandleConstructorArguments( handle, constructorIt, false, false );
+
+  std::string callArguments = generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, handle.second.destructorIt != m_commands.end(), false );
+
+  std::string initializationList = generateRAIIHandleConstructorInitializationList( handle, constructorIt, handle.second.destructorIt, false );
+  if ( !initializationList.empty() && !dispatcherInitializer.empty() )
+  {
+    initializationList += ", ";
+  }
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ${handleType}( ${constructorArguments} )
+      : ${initializationList}${dispatcherInitializer}
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( ${getDispatcher}->${constructorCall}( ${callArguments} ) );
+      if ( ${failureCheck} )
+      {
+        throwResultException( result, "${constructorCall}" );
+      }${dispatcherInit}
+    }
+#endif
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "callArguments", callArguments },
+                           { "constructorArguments", constructorArguments },
+                           { "constructorCall", constructorIt->first },
+                           { "dispatcherInitializer", dispatcherInitializer },
+                           { "dispatcherInit", dispatcherInit },
+                           { "enter", enter },
+                           { "failureCheck", generateFailureCheck( constructorIt->second.successCodes ) },
+                           { "getDispatcher", getDispatcher },
+                           { "leave", leave },
+                           { "handleType", stripPrefix( handle.first, "Vk" ) },
+                           { "initializationList", initializationList } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorTakeOwnership( std::pair<std::string, HandleData> const & handle ) const
+{
+  std::string handleType = stripPrefix( handle.first, "Vk" );
+  std::string handleName = startLowerCase( handleType );
+
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string constructorArguments = generateRAIIHandleConstructorArguments( handle, handle.second.destructorIt, false, true );
+  std::string initializationList   = generateRAIIHandleConstructorInitializationList( handle, handle.second.destructorIt, handle.second.destructorIt, true );
+  assert( !handle.second.constructorIts.empty() );
+  if ( 1 < handle.second.constructorIts[0]->second.successCodes.size() && ( handle.second.constructorIts[0]->second.successCodes[1] != "VK_INCOMPLETE" ) )
+  {
+#if !defined( NDEBUG )
+    for ( size_t i = 1; i < handle.second.constructorIts.size(); ++i )
+    {
+      assert( 1 < handle.second.constructorIts[i]->second.successCodes.size() );
+    }
+#endif
+    constructorArguments += ", VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess";
+    initializationList += ", m_constructorSuccessCode( successCode )";
+  }
+
+  std::string dispatcherInitializer;
+  if ( ( handle.first != "VkInstance" ) && ( handle.first != "VkDevice" ) )
+  {
+    dispatcherInitializer = "m_dispatcher( " + parentName + ".getDispatcher() )";
+  }
+  if ( !initializationList.empty() && !dispatcherInitializer.empty() )
+  {
+    initializationList += ", ";
+  }
+
+  std::string dispatcherInit;
+  if ( ( handle.first == "VkDevice" ) || ( handle.first == "VkInstance" ) )
+  {
+    dispatcherInit = "\n        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::" + handleType + "Dispatcher( " + parentName +
+                     ".getDispatcher()->vkGet" + handleType + "ProcAddr, static_cast<" + handle.first + ">( m_" + startLowerCase( handleType ) + " ) ) );";
+  }
+
+  const std::string constructorTemplate =
+    R"(
+    ${handleType}( ${constructorArguments} )
+      : ${initializationList}${dispatcherInitializer}
+    {${dispatcherInit}}
+)";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "constructorArguments", constructorArguments },
+                           { "dispatcherInitializer", dispatcherInitializer },
+                           { "dispatcherInit", dispatcherInit },
+                           { "handleType", handleType },
+                           { "initializationList", initializationList } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleStaticCreateVector( std::pair<std::string, HandleData> const &                             handle,
+                                                                      std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                      std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                      std::string const &                                                    enter,
+                                                                      std::string const &                                                    leave ) const
+{
+  std::string vectorSize;
+  auto        lenIt = std::find_if( constructorIt->second.params.begin(),
+                                    constructorIt->second.params.end(),
+                                    [&handleParamIt]( ParamData const & pd ) { return pd.name == handleParamIt->len; } );
+  if ( lenIt == constructorIt->second.params.end() )
+  {
+    std::vector<std::string> lenParts = tokenize( handleParamIt->len, "->" );
+    assert( lenParts.size() == 2 );
+    lenIt = std::find_if(
+      constructorIt->second.params.begin(), constructorIt->second.params.end(), [&lenParts]( ParamData const & pd ) { return pd.name == lenParts[0]; } );
+#if !defined( NDEBUG )
+    assert( lenIt != constructorIt->second.params.end() );
+    auto structureIt = m_structures.find( lenIt->type.type );
+    assert( structureIt != m_structures.end() );
+    assert( isStructMember( lenParts[1], structureIt->second.members ) );
+    assert( constructorIt->second.successCodes.size() == 1 );
+#endif
+    vectorSize = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + lenParts[1];
+  }
+  else
+  {
+    auto arrayIt = std::find_if( constructorIt->second.params.begin(),
+                                 constructorIt->second.params.end(),
+                                 [&lenIt, &handleParamIt]( ParamData const & pd ) { return ( pd.len == lenIt->name ) && ( pd.name != handleParamIt->name ); } );
+    assert( arrayIt != constructorIt->second.params.end() );
+    vectorSize = startLowerCase( stripPrefix( arrayIt->name, "p" ) ) + ".size()";
+  }
+
+  std::string handleConstructorArguments = generateRAIIHandleSingularConstructorArguments( handle, constructorIt );
+  std::string handleType                 = stripPrefix( handle.first, "Vk" );
+  std::string successCodePassToElement   = ( 1 < constructorIt->second.successCodes.size() ) ? ", result" : "";
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+    static android::base::expected<${handleType}s, VULKAN_HPP_NAMESPACE::Result> create( ${staticCreateArguments} ) ${debugHelper}
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = ${parentName}.getDispatcher();
+      std::vector<${vectorElementType}> ${vectorName}( ${vectorSize} );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${callArguments} ) );
+      if ( ${successCheck} )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::${handleType}s ret(nullptr);
+        ret.reserve( ${vectorSize} );
+        for ( auto const & ${handleName} : ${vectorName} )
+        {
+          ret.emplace_back( ${parentName}, ${handleConstructorArguments}${successCodePassToElement} );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "callArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, false, true ) },
+                           { "staticCreateArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, false, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "debugHelper", debugHelper },
+                           { "enter", enter },
+                           { "handleConstructorArguments", handleConstructorArguments },
+                           { "handleName", startLowerCase( handleType ) },
+                           { "handleType", handleType },
+                           { "leave", leave },
+                           { "parentName", constructorIt->second.params.front().name },
+                           { "successCheck", generateSuccessCheck( constructorIt->second.successCodes ) },
+                           { "successCodePassToElement", successCodePassToElement },
+                           { "vectorElementType", handleParamIt->type.type },
+                           { "vectorName", startLowerCase( stripPrefix( handleParamIt->name, "p" ) ) },
+                           { "vectorSize", vectorSize } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleConstructorVector( std::pair<std::string, HandleData> const &                             handle,
+                                                                     std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                     std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                     std::string const &                                                    enter,
+                                                                     std::string const &                                                    leave ) const
+{
+  std::string vectorSize;
+  auto        lenIt = std::find_if( constructorIt->second.params.begin(),
+                             constructorIt->second.params.end(),
+                             [&handleParamIt]( ParamData const & pd ) { return pd.name == handleParamIt->len; } );
+  if ( lenIt == constructorIt->second.params.end() )
+  {
+    std::vector<std::string> lenParts = tokenize( handleParamIt->len, "->" );
+    assert( lenParts.size() == 2 );
+    lenIt = std::find_if(
+      constructorIt->second.params.begin(), constructorIt->second.params.end(), [&lenParts]( ParamData const & pd ) { return pd.name == lenParts[0]; } );
+#if !defined( NDEBUG )
+    assert( lenIt != constructorIt->second.params.end() );
+    auto structureIt = m_structures.find( lenIt->type.type );
+    assert( structureIt != m_structures.end() );
+    assert( isStructMember( lenParts[1], structureIt->second.members ) );
+    assert( constructorIt->second.successCodes.size() == 1 );
+#endif
+    vectorSize = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + lenParts[1];
+  }
+  else
+  {
+    auto arrayIt = std::find_if( constructorIt->second.params.begin(),
+                                 constructorIt->second.params.end(),
+                                 [&lenIt, &handleParamIt]( ParamData const & pd ) { return ( pd.len == lenIt->name ) && ( pd.name != handleParamIt->name ); } );
+    assert( arrayIt != constructorIt->second.params.end() );
+    vectorSize = startLowerCase( stripPrefix( arrayIt->name, "p" ) ) + ".size()";
+  }
+
+  std::string handleConstructorArguments = generateRAIIHandleSingularConstructorArguments( handle, constructorIt );
+  std::string handleType                 = stripPrefix( handle.first, "Vk" );
+  std::string successCodePassToElement   = ( 1 < constructorIt->second.successCodes.size() ) ? ", result" : "";
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string constructorTemplate =
+    R"(
+${enter}
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ${handleType}s( ${constructorArguments} ) ${debugHelper}
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = ${parentName}.getDispatcher();
+      std::vector<${vectorElementType}> ${vectorName}( ${vectorSize} );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->${constructorCall}( ${callArguments} ) );
+      if ( ${successCheck} )
+      {
+        this->reserve( ${vectorSize} );
+        for ( auto const & ${handleName} : ${vectorName} )
+        {
+          this->emplace_back( ${parentName}, ${handleConstructorArguments}${successCodePassToElement} );
+        }
+      }
+      else
+      {
+        throwResultException( result, "${constructorCall}" );
+      }
+    }
+#endif
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "callArguments", generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, false, true ) },
+                           { "constructorArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, false, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "debugHelper", debugHelper },
+                           { "enter", enter },
+                           { "handleConstructorArguments", handleConstructorArguments },
+                           { "handleName", startLowerCase( handleType ) },
+                           { "handleType", handleType },
+                           { "leave", leave },
+                           { "parentName", constructorIt->second.params.front().name },
+                           { "successCheck", generateSuccessCheck( constructorIt->second.successCodes ) },
+                           { "successCodePassToElement", successCodePassToElement },
+                           { "vectorElementType", handleParamIt->type.type },
+                           { "vectorName", startLowerCase( stripPrefix( handleParamIt->name, "p" ) ) },
+                           { "vectorSize", vectorSize } } );
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleStaticCreateVectorSingular( std::pair<std::string, HandleData> const &                             handle,
+                                                                    std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                    std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                    std::string const &                                                    enter,
+                                                                    std::string const &                                                    leave ) const
+{
+  size_t                            returnParam    = static_cast<size_t>( std::distance( constructorIt->second.params.begin(), handleParamIt ) );
+  std::map<size_t, VectorParamData> vectorParams   = determineVectorParams( constructorIt->second.params );
+  std::set<size_t>                  singularParams = determineSingularParams( returnParam, vectorParams );
+
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+  std::string getDispatcher = parentName + ".getDispatcher()";
+
+  std::string callArguments      = generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, singularParams, false, false );
+  std::string failureCheck = generateFailureCheck( constructorIt->second.successCodes );
+
+  std::string localParamType = handle.first;
+  std::string localParamName = generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+
+  std::string callConstructorArguments =  generateRAIIHandleStaticCreateToConstructorArguments( handle, handle.second.destructorIt );
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string singularConstructorTemplate =
+    R"(
+${enter}
+    static android::base::expected<${handleType}, VULKAN_HPP_NAMESPACE::Result> create( ${staticCreateArguments} ) ${debugHelper}
+    {
+      ${localParamType} ${localParamName};
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( ${getDispatcher}->${constructorCall}( ${callArguments} ) );
+      if ( ${failureCheck} )
+      {
+        return android::base::unexpected(result);
+      }
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::${handleType}(${callConstructorArguments});
+    }
+${leave})";
+
+  return replaceWithMap( singularConstructorTemplate,
+                         { { "callArguments", callArguments },
+                           { "callConstructorArguments", callConstructorArguments },
+                           { "staticCreateArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, true, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "debugHelper", debugHelper },
+                           { "enter", enter },
+                           { "failureCheck", failureCheck },
+                           { "getDispatcher", getDispatcher },
+                           { "leave", leave },
+                           { "localParamType", localParamType },
+                           { "localParamName", localParamName },
+                           { "handleType", stripPrefix( handle.first, "Vk" ) } } );
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleConstructorVectorSingular( std::pair<std::string, HandleData> const &                             handle,
+                                                                   std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                   std::vector<ParamData>::const_iterator                                 handleParamIt,
+                                                                   std::string const &                                                    enter,
+                                                                   std::string const &                                                    leave ) const
+{
+  size_t                            returnParam    = static_cast<size_t>( std::distance( constructorIt->second.params.begin(), handleParamIt ) );
+  std::map<size_t, VectorParamData> vectorParams   = determineVectorParams( constructorIt->second.params );
+  std::set<size_t>                  singularParams = determineSingularParams( returnParam, vectorParams );
+
+  std::string callArguments      = generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, singularParams, true, true );
+  std::string initializationList = generateRAIIHandleConstructorInitializationList( handle, constructorIt, handle.second.destructorIt, false );
+  assert( !initializationList.empty() );
+  std::string failureCheck = generateFailureCheck( constructorIt->second.successCodes );
+  failureCheck             = std::regex_replace( failureCheck, std::regex( "result" ), "m_constructorSuccessCode" );
+
+  std::string debugHelper = "";
+  #ifdef DEBUG_GENERATOR
+  debugHelper = "/*" + std::string(__FUNCTION__) + "*/";
+  #endif
+
+  const std::string singularConstructorTemplate =
+    R"(
+${enter}
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ${handleType}( ${constructorArguments} ) ${debugHelper}
+      : ${initializationList}, m_dispatcher( ${firstArgument}.getDispatcher() )
+    {
+      m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->${constructorCall}( ${callArguments} ) );
+      if ( ${failureCheck} )
+      {
+        throwResultException( m_constructorSuccessCode, "${constructorCall}" );
+      }
+    }
+#endif
+${leave})";
+
+  return replaceWithMap( singularConstructorTemplate,
+                         { { "initializationList", initializationList },
+                           { "callArguments", callArguments },
+                           { "constructorArguments", generateRAIIHandleConstructorArguments( handle, constructorIt, true, false ) },
+                           { "constructorCall", constructorIt->first },
+                           { "debugHelper", debugHelper },
+                           { "enter", enter },
+                           { "firstArgument", constructorIt->second.params[0].name },
+                           { "failureCheck", failureCheck },
+                           { "leave", leave },
+                           { "handleType", stripPrefix( handle.first, "Vk" ) } } );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::generateRAIIHandleConstructorVoid( std::pair<std::string, HandleData> const &         handle,
+                                                                                           std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                           std::string const &                                enter,
+                                                                                           std::string const &                                leave ) const
+{
+  assert( constructorIt->second.successCodes.empty() && constructorIt->second.errorCodes.empty() );
+  std::vector<size_t> returnParams = determineReturnParams( constructorIt->second.params );
+  if ( returnParams.size() == 1 )
+  {
+    assert( isHandleType( constructorIt->second.params[returnParams[0]].type.type ) );
+    std::map<size_t, VectorParamData> vectorParams = determineVectorParams( constructorIt->second.params );
+    if ( vectorParams.empty() )
+    {
+      return std::make_pair( generateRAIIHandleConstructorVoid1Return0Vector( handle, constructorIt, enter, leave ), "" );
+    }
+  }
+  return std::make_pair( "", "" );
+}
+
+std::string
+  VulkanHppGenerator::generateRAIIHandleConstructorVoid1Return0Vector( std::pair<std::string, HandleData> const &                             handle,
+                                                                       std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                                       std::string const &                                                    enter,
+                                                                       std::string const &                                                    leave ) const
+{
+  std::string callArguments        = generateRAIIHandleConstructorCallArguments( handle, constructorIt, false, {}, true, true );
+  std::string constructorArguments = generateRAIIHandleConstructorArguments( handle, constructorIt, false, false );
+  std::string initializationList   = generateRAIIHandleConstructorInitializationList( handle, constructorIt, handle.second.destructorIt, false );
+  if ( !initializationList.empty() )
+  {
+    initializationList += ", ";
+  }
+
+  const std::string constructorTemplate =
+    R"(
+${enter}    ${handleType}( ${constructorArguments} )
+      : ${initializationList}m_dispatcher( ${firstArgument}.getDispatcher() )
+    {
+      getDispatcher()->${constructorCall}( ${callArguments} );
+    }
+${leave})";
+
+  return replaceWithMap( constructorTemplate,
+                         { { "callArguments", callArguments },
+                           { "constructorArguments", constructorArguments },
+                           { "constructorCall", constructorIt->first },
+                           { "enter", enter },
+                           { "firstArgument", constructorIt->second.params[0].name },
+                           { "leave", leave },
+                           { "handleType", stripPrefix( handle.first, "Vk" ) },
+                           { "initializationList", initializationList } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleContext( std::pair<std::string, HandleData> const & handle,
+                                                           std::set<std::string> const &              specialFunctions ) const
+{
+  const std::string contextTemplate = R"(
+    class Context
+    {
+    public:
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      Context()
+        : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher(
+            m_dynamicLoader.getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" ) ) )
+#else
+      Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr )
+        : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) )
+#endif
+      {}
+
+      ~Context() = default;
+
+      Context( Context const & ) = delete;
+      Context( Context && rhs ) VULKAN_HPP_NOEXCEPT
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+        : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) )
+        , m_dispatcher( rhs.m_dispatcher.release() )
+#else
+        : m_dispatcher( rhs.m_dispatcher.release() )
+#endif
+      {}
+      Context & operator=( Context const & ) = delete;
+      Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT
+      {
+        if ( this != &rhs )
+        {
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+          m_dynamicLoader = std::move( rhs.m_dynamicLoader );
+#endif
+          m_dispatcher.reset( rhs.m_dispatcher.release() );
+        }
+        return *this;
+      }
+
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const
+      {
+        VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+        return &*m_dispatcher;
+      }
+
+      void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs )
+      {
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+        std::swap( m_dynamicLoader, rhs.m_dynamicLoader );
+#endif
+        m_dispatcher.swap( rhs.m_dispatcher );
+      }
+
+${memberFunctionDeclarations}
+
+    private:
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      VULKAN_HPP_NAMESPACE::DynamicLoader                                                 m_dynamicLoader;
+#endif
+      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher> m_dispatcher;
+    };
+
+)";
+
+  return replaceWithMap( contextTemplate, { { "memberFunctionDeclarations", generateRAIIHandleCommandDeclarations( handle, specialFunctions ) } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleDestructorCallArguments( std::string const &                                handleType,
+                                                                           std::map<std::string, CommandData>::const_iterator destructorIt ) const
+{
+  std::string arguments;
+  bool        encounteredArgument = false;
+  for ( auto param : destructorIt->second.params )
+  {
+    if ( encounteredArgument )
+    {
+      arguments += ", ";
+    }
+    if ( param.type.type == handleType )
+    {
+      std::string handleName = param.name;
+      if ( param.type.isValue() )
+      {
+        arguments += "static_cast<" + handleType + ">( m_" + handleName + " )";
+      }
+      else
+      {
+        arguments += "reinterpret_cast<" + handleType + " const *>( &m_" + stripPluralS( startLowerCase( stripPrefix( handleName, "p" ) ), m_tags ) + " )";
+      }
+    }
+    else if ( param.type.type == "VkAllocationCallbacks" )
+    {
+      // vk::AllocationCallbacks is stored as a member of the handle class
+      arguments += "reinterpret_cast<const VkAllocationCallbacks *>( m_allocator )";
+    }
+    else if ( isHandleType( param.type.type ) )
+    {
+      assert( param.arraySizes.empty() );
+      std::string argument = "m_" + param.name;
+      if ( param.type.isValue() )
+      {
+        arguments += "static_cast<" + param.type.type + ">( " + argument + " )";
+      }
+      else
+      {
+        assert( param.type.isConstPointer() );
+        assert( !param.len.empty() &&
+                ( std::find_if( destructorIt->second.params.begin(),
+                                destructorIt->second.params.end(),
+                                [&param]( ParamData const & pd ) { return pd.name == param.len; } ) != destructorIt->second.params.end() ) );
+        arguments += "reinterpret_cast<" + param.type.type + " const *>( &" + argument + " )";
+      }
+    }
+    else
+    {
+      assert( ( param.type.type == "uint32_t" ) && param.type.isValue() && param.arraySizes.empty() && param.len.empty() && !param.optional );
+      assert( std::find_if( destructorIt->second.params.begin(),
+                            destructorIt->second.params.end(),
+                            [&param]( ParamData const & pd ) { return pd.len == param.name; } ) != destructorIt->second.params.end() );
+      arguments += "1";
+    }
+    encounteredArgument = true;
+  }
+  return arguments;
+}
+
+std::tuple<std::string, std::string, std::string, std::string, std::string, std::string, std::string>
+  VulkanHppGenerator::generateRAIIHandleDetails( std::pair<std::string, HandleData> const & handle ) const
+{
+  std::string getConstructorSuccessCode;
+  bool        multiSuccessCodeContructor = isMultiSuccessCodeConstructor( handle.second.constructorIts );
+  if ( multiSuccessCodeContructor )
+  {
+    getConstructorSuccessCode = R"(
+    VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
+    {
+      return m_constructorSuccessCode;
+    }
+)";
+  }
+
+  auto [parentType, parentName] = getParentTypeAndName( handle );
+
+  std::string handleName = generateRAIIHandleConstructorParamName( handle.first, handle.second.destructorIt );
+
+  std::string clearMembers, moveConstructorInitializerList, moveAssignmentInstructions, memberVariables, swapMembers, releaseMembers;
+
+  if ( handle.second.destructorIt != m_commands.end() )
+  {
+    moveAssignmentInstructions = "          clear();";
+
+    clearMembers = "        if ( m_" + handleName + " )\n";
+    clearMembers += "        {\n";
+    clearMembers += "          getDispatcher()->" + handle.second.destructorIt->first + "( " +
+                    generateRAIIHandleDestructorCallArguments( handle.first, handle.second.destructorIt ) + " );\n";
+    clearMembers += "        }";
+    for ( auto const & destructorParam : handle.second.destructorIt->second.params )
+    {
+      std::string memberName, memberType;
+      if ( destructorParam.type.type == "Vk" + parentType )
+      {
+        memberName = parentName;
+        memberType = "VULKAN_HPP_NAMESPACE::" + parentType;
+      }
+      else if ( destructorParam.type.type == handle.first )
+      {
+        memberName = handleName;
+        memberType = generateNamespacedType( handle.first );
+      }
+      else if ( std::find_if( handle.second.destructorIt->second.params.begin(),
+                              handle.second.destructorIt->second.params.end(),
+                              [&destructorParam]( ParamData const & pd )
+                              { return pd.len == destructorParam.name; } ) == handle.second.destructorIt->second.params.end() )
+      {
+        std::string name = destructorParam.name;
+        if ( !destructorParam.type.isValue() )
+        {
+          name = startLowerCase( stripPrefix( name, "p" ) );
+        }
+        memberName = name;
+        memberType = destructorParam.type.compose( "VULKAN_HPP_NAMESPACE" );
+      }
+      if ( !memberName.empty() )
+      {
+        clearMembers += "\n      m_" + memberName + " = nullptr;";
+        moveConstructorInitializerList += "m_" + memberName + "( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + memberName + ", {} ) ), ";
+        moveAssignmentInstructions +=
+          "\n          m_" + memberName + " = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + memberName + ", {} );";
+        memberVariables += "\n    " + memberType + " m_" + memberName + " = {};";
+        swapMembers += "\n      std::swap( m_" + memberName + ", rhs.m_" + memberName + " );";
+        if ( destructorParam.type.type != handle.first )
+        {
+          releaseMembers += "\n      m_" + memberName + " = nullptr;";
+        }
+      }
+    }
+  }
+  else
+  {
+    if ( !handle.second.secondLevelCommands.empty() )
+    {
+      assert( !handle.second.constructorIts.empty() );
+      assert( !handle.second.constructorIts.front()->second.params.empty() );
+      auto const & frontType = handle.second.constructorIts.front()->second.params.front().type.type;
+      assert( isHandleType( frontType ) );
+#if !defined( NDEBUG )
+      auto handleIt = m_handles.find( "Vk" + parentType );
+#endif
+      assert( handleIt != m_handles.end() );
+      assert( handleIt->second.parent == frontType );
+      std::string frontName = handle.second.constructorIts.front()->second.params.front().name;
+
+      clearMembers += "\n        m_" + frontName + " = nullptr;";
+      moveConstructorInitializerList = "m_" + frontName + "( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + frontName + ", {} ) ), ";
+      moveAssignmentInstructions =
+        "\n          m_" + frontName + " = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + frontName + ", {} );";
+      memberVariables = "\n    VULKAN_HPP_NAMESPACE::" + stripPrefix( frontType, "Vk" ) + " m_" + frontName + " = {};";
+      swapMembers     = "\n      std::swap( m_" + frontName + ", rhs.m_" + frontName + " );";
+      releaseMembers += "\n        m_" + frontName + " = nullptr;";
+    }
+    clearMembers += "\n        m_" + handleName + " = nullptr;";
+    moveConstructorInitializerList += "m_" + handleName + "( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + handleName + ", {} ) ), ";
+    moveAssignmentInstructions +=
+      "\n          m_" + handleName + " = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_" + handleName + ", {} );";
+    memberVariables += "\n    " + generateNamespacedType( handle.first ) + " m_" + handleName + " = {};";
+    swapMembers += "\n      std::swap( m_" + handleName + ", rhs.m_" + handleName + " );";
+  }
+
+  if ( multiSuccessCodeContructor )
+  {
+    clearMembers += "\n        m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;";
+    memberVariables += "\n    VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;";
+    swapMembers += "\n      std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );";
+    moveConstructorInitializerList +=
+      "m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ), ";
+    moveAssignmentInstructions +=
+      "\n          m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} );";
+    releaseMembers += "\n        m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;";
+  }
+
+  if ( handle.first == "VkInstance" )
+  {
+    memberVariables += "\n      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher> m_dispatcher;";
+  }
+  else if ( handle.first == "VkDevice" )
+  {
+    memberVariables += "\n      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher> m_dispatcher;";
+  }
+  else if ( handle.second.constructorIts.front()->second.params.front().type.type == "VkDevice" )
+  {
+    memberVariables += "\n      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;";
+  }
+  else
+  {
+    memberVariables += "\n      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;";
+  }
+  clearMembers += "\n        m_dispatcher = nullptr;";
+  swapMembers += "\n      std::swap( m_dispatcher, rhs.m_dispatcher );";
+  releaseMembers += "\n        m_dispatcher = nullptr;";
+  releaseMembers += "\n        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_" + handleName + ", nullptr );";
+
+  if ( ( handle.first == "VkInstance" ) || ( handle.first == "VkDevice" ) )
+  {
+    moveConstructorInitializerList += "m_dispatcher( rhs.m_dispatcher.release() )";
+    moveAssignmentInstructions += "\n        m_dispatcher.reset( rhs.m_dispatcher.release() );";
+  }
+  else
+  {
+    moveConstructorInitializerList += "m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )";
+    moveAssignmentInstructions += "\n        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );";
+  }
+
+  return std::make_tuple(
+    clearMembers, getConstructorSuccessCode, memberVariables, moveConstructorInitializerList, moveAssignmentInstructions, swapMembers, releaseMembers );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleForwardDeclarations( std::vector<RequireData> const & requireData, std::string const & title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto handleIt = m_handles.find( type );
+      if ( handleIt != m_handles.end() )
+      {
+        str += "  class " + stripPrefix( handleIt->first, "Vk" ) + ";\n";
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandles() const
+{
+  const std::string raiiHandlesTemplate = R"(
+  //========================================
+  //=== RAII HANDLE forward declarations ===
+  //========================================
+
+${forwardDeclarations}
+
+  //====================
+  //=== RAII HANDLES ===
+  //====================
+
+${raiiHandles}
+)";
+
+  std::string forwardDeclarations;
+  for ( auto const & feature : m_features )
+  {
+    forwardDeclarations += generateRAIIHandleForwardDeclarations( feature.second.requireData, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    forwardDeclarations += generateRAIIHandleForwardDeclarations( extIt.second->second.requireData, extIt.second->first );
+  }
+
+  std::set<std::string> listedHandles;
+  auto                  handleIt = m_handles.begin();
+  assert( handleIt->first.empty() );
+  std::string raiiHandles = generateRAIIHandleContext( *handleIt, m_RAIISpecialFunctions );
+  for ( ++handleIt; handleIt != m_handles.end(); ++handleIt )
+  {
+    raiiHandles += generateRAIIHandle( *handleIt, listedHandles, m_RAIISpecialFunctions );
+  }
+  return replaceWithMap( raiiHandlesTemplate, { { "forwardDeclarations", forwardDeclarations }, { "raiiHandles", raiiHandles } } );
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleSingularConstructorArguments( std::pair<std::string, HandleData> const &         handle,
+                                                                                std::map<std::string, CommandData>::const_iterator constructorIt ) const
+{
+  std::string arguments = startLowerCase( stripPrefix( handle.first, "Vk" ) );
+  if ( handle.second.destructorIt != m_commands.end() )
+  {
+    auto [parentType, parentName] = getParentTypeAndName( handle );
+    parentType                    = "Vk" + parentType;
+    for ( auto const & destructorParam : handle.second.destructorIt->second.params )
+    {
+      if ( ( destructorParam.type.type != parentType ) && ( destructorParam.type.type != handle.first ) &&
+           ( std::find_if( handle.second.destructorIt->second.params.begin(),
+                           handle.second.destructorIt->second.params.end(),
+                           [&destructorParam]( ParamData const & pd )
+                           { return pd.len == destructorParam.name; } ) == handle.second.destructorIt->second.params.end() ) )
+      {
+        if ( std::find_if( constructorIt->second.params.begin(),
+                           constructorIt->second.params.end(),
+                           [&destructorParam]( ParamData const & pd )
+                           { return pd.type.type == destructorParam.type.type; } ) != constructorIt->second.params.end() )
+        {
+          if ( isHandleType( destructorParam.type.type ) )
+          {
+            assert( destructorParam.type.isValue() );
+            arguments += ", static_cast<" + destructorParam.type.type + ">( *" + destructorParam.name + " )";
+          }
+          else
+          {
+            assert( destructorParam.type.type == "VkAllocationCallbacks" );
+            arguments += ", allocator";
+          }
+        }
+        else
+        {
+#if !defined( NDEBUG )
+          bool found = false;
+#endif
+          for ( auto const & constructorParam : constructorIt->second.params )
+          {
+            auto structureIt = m_structures.find( constructorParam.type.type );
+            if ( structureIt != m_structures.end() )
+            {
+              auto memberIt = findStructMemberItByType( destructorParam.type.type, structureIt->second.members );
+              if ( memberIt != structureIt->second.members.end() )
+              {
+#if !defined( NDEBUG )
+                found = true;
+#endif
+                assert( !constructorParam.type.isValue() );
+                std::string argument = startLowerCase( stripPrefix( constructorParam.name, "p" ) ) + "." + memberIt->name;
+                if ( isHandleType( memberIt->type.type ) )
+                {
+                  argument = "static_cast<" + memberIt->type.type + ">( " + argument + " )";
+                }
+                arguments += ", " + argument;
+                break;
+              }
+            }
+          }
+          assert( found );
+        }
+      }
+    }
+  }
+  return arguments;
+}
+
+std::string VulkanHppGenerator::generateRAIIHandleVectorSizeCheck( std::string const &                           name,
+                                                                   CommandData const &                           commandData,
+                                                                   size_t                                        initialSkipCount,
+                                                                   std::map<size_t, std::vector<size_t>> const & countToVectorMap,
+                                                                   std::set<size_t> const &                      skippedParams ) const
+{
+  std::string const throwTemplate =
+    R"#(    if ( ${zeroSizeCheck}${firstVectorName}.size() != ${secondVectorName}.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() != ${secondVectorName}.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() != ${secondVectorName}.size()";
+    #endif
+  })#";
+
+  std::string const throwTemplateVoid =
+    R"#(    if ( ${zeroSizeCheck}${firstVectorName}.size() * sizeof( ${firstDataType} ) != ${secondVectorName}.size() * sizeof( ${secondDataType} ) )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() * sizeof( ${firstDataType} ) != ${secondVectorName}.size() * sizeof( ${secondDataType} )" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() * sizeof( ${firstDataType} ) != ${secondVectorName}.size() * sizeof( ${secondDataType} )";
+    #endif
+  })#";
+
+  std::string const throwTemplateByLen = R"#(    if ( ${vectorName}.size() != ${sizeValue} )
+    {
+      #ifndef VULKAN_HPP_NO_EXCEPTIONS
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${vectorName}.size() != ${sizeValue}" );
+      #else
+      LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${vectorName}.size() != ${sizeValue}";
+      #endif
+    })#";
+
+  std::string className   = stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" );
+  std::string commandName = generateCommandName( name, commandData.params, initialSkipCount, m_tags );
+
+  std::string sizeChecks;
+  for ( auto const & cvm : countToVectorMap )
+  {
+    size_t      defaultStartIndex = determineDefaultStartIndex( commandData.params, skippedParams );
+    std::string firstVectorName   = startLowerCase( stripPrefix( commandData.params[cvm.second[0]].name, "p" ) );
+
+    if ( cvm.second.size() == 1 )
+    {
+      assert( isLenByStructMember( commandData.params[cvm.second[0]].len, commandData.params[cvm.first] ) );
+
+      std::vector<std::string> lenParts = tokenize( commandData.params[cvm.second[0]].len, "->" );
+      assert( lenParts.size() == 2 );
+      std::string sizeValue = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + lenParts[1];
+      sizeChecks += replaceWithMap(
+        throwTemplateByLen, { { "className", className }, { "commandName", commandName }, { "sizeValue", sizeValue }, { "vectorName", firstVectorName } } );
+    }
+    else
+    {
+      for ( size_t i = 1; i < cvm.second.size(); i++ )
+      {
+        std::string secondVectorName  = startLowerCase( stripPrefix( commandData.params[cvm.second[i]].name, "p" ) );
+        bool        withZeroSizeCheck = commandData.params[cvm.second[i]].optional && ( defaultStartIndex <= cvm.second[i] );
+        if ( commandData.params[cvm.second[0]].type.type == "void" )
+        {
+          assert( commandData.params[cvm.second[i]].type.type == "void" );
+          std::string firstDataType  = stripPrefix( commandData.params[cvm.second[0]].name, "p" ) + "Type";
+          std::string secondDataType = stripPrefix( commandData.params[cvm.second[i]].name, "p" ) + "Type";
+          sizeChecks += replaceWithMap( throwTemplateVoid,
+                                        { { "firstDataType", firstDataType },
+                                          { "firstVectorName", firstVectorName },
+                                          { "className", className },
+                                          { "commandName", commandName },
+                                          { "secondDataType", secondDataType },
+                                          { "secondVectorName", secondVectorName },
+                                          { "zeroSizeCheck", withZeroSizeCheck ? ( "!" + secondVectorName + ".empty() && " ) : "" } } );
+        }
+        else
+        {
+          sizeChecks += replaceWithMap( throwTemplate,
+                                        { { "firstVectorName", firstVectorName },
+                                          { "className", className },
+                                          { "commandName", commandName },
+                                          { "secondVectorName", secondVectorName },
+                                          { "zeroSizeCheck", withZeroSizeCheck ? ( "!" + secondVectorName + ".empty() && " ) : "" } } );
+        }
+        if ( i + 1 < cvm.second.size() )
+        {
+          sizeChecks += "\n";
+        }
+      }
+    }
+  }
+  if ( !sizeChecks.empty() )
+  {
+    sizeChecks += "\n";
+  }
+
+  return sizeChecks;
+}
+
+std::string VulkanHppGenerator::generateResultAssignment( CommandData const & commandData ) const
+{
+  std::string resultAssignment;
+  if ( ( commandData.returnType != "void" ) &&
+       !( ( commandData.returnType == "VkResult" ) && ( commandData.successCodes.size() == 1 ) && commandData.errorCodes.empty() ) )
+  {
+    resultAssignment = commandData.returnType + " result = ";
+  }
+  return resultAssignment;
+}
+
+std::string VulkanHppGenerator::generateResultCheck(
+  CommandData const & commandData, std::string const & className, std::string const & classSeparator, std::string commandName, bool enumerating ) const
+{
+  std::string resultCheck;
+  if ( !commandData.errorCodes.empty() )
+  {
+    std::string successCodeList = generateSuccessCodeList( commandData.successCodes, enumerating );
+
+    std::string const resultCheckTemplate =
+      R"(resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::${className}${classSeparator}${commandName}"${successCodeList} );)";
+
+    resultCheck = replaceWithMap(
+      resultCheckTemplate,
+      { { "className", className }, { "classSeparator", classSeparator }, { "commandName", commandName }, { "successCodeList", successCodeList } } );
+  }
+  return resultCheck;
+}
+
+// Intended only for `enum class Result`!
+std::string VulkanHppGenerator::generateResultExceptions() const
+{
+  const std::string templateString = R"(
+${enter}  class ${className} : public SystemError
+  {
+  public:
+    ${className}( std::string const & message )
+      : SystemError( make_error_code( ${enumName}::${enumMemberName} ), message ) {}
+    ${className}( char const * message )
+      : SystemError( make_error_code( ${enumName}::${enumMemberName} ), message ) {}
+  };
+${leave})";
+
+  std::string str;
+  auto        enumIt = m_enums.find( "VkResult" );
+  for ( auto const & value : enumIt->second.values )
+  {
+    if ( value.name.starts_with( "VK_ERROR" ) )
+    {
+      auto [enter, leave]   = generateProtection( getProtect( value ) );
+      std::string valueName = generateEnumValueName( enumIt->first, value.name, false, m_tags );
+      str += replaceWithMap( templateString,
+                             { { "className", stripPrefix( valueName, "eError" ) + "Error" },
+                               { "enter", enter },
+                               { "enumName", stripPrefix( enumIt->first, "Vk" ) },
+                               { "enumMemberName", valueName },
+                               { "leave", leave } } );
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateReturnStatement( std::string const & commandName,
+                                                         CommandData const & commandData,
+                                                         std::string const & returnVariable,
+                                                         std::string const & returnType,
+                                                         std::string const & dataType,
+                                                         size_t              initialSkipCount,
+                                                         size_t              returnParam,
+                                                         CommandFlavourFlags flavourFlags,
+                                                         bool                enumerating,
+                                                         bool                raii ) const
+{
+  bool unique = flavourFlags & CommandFlavourFlagBits::unique;
+
+  std::string returnStatement;
+  if ( commandData.returnType.starts_with( "Vk" ) )
+  {
+    if ( ( commandData.successCodes.size() == 1 ) || enumerating )
+    {
+      assert( commandData.successCodes[0] == "VK_SUCCESS" );
+      if ( raii || commandData.errorCodes.empty() )
+      {
+        if ( !returnVariable.empty() )
+        {
+          returnStatement = "return " + returnVariable + ";";
+        }
+      }
+      else
+      {
+        if ( returnVariable.empty() )
+        {
+          assert( !unique );
+          returnStatement = "return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );";
+        }
+        else if ( unique )
+        {
+          assert( returnParam != INVALID_INDEX );
+          returnStatement = "return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ";
+          if ( dataType.starts_with( "std::" ) )
+          {
+            returnStatement += "std::move( unique" + startUpperCase( returnVariable ) + " )";
+          }
+          else
+          {
+            returnStatement += "UniqueHandle<" + dataType + ", Dispatch>( " + returnVariable + ", " +
+                               generateObjectDeleter( commandName, commandData, initialSkipCount, returnParam ) + " )";
+          }
+          returnStatement += " );";
+        }
+        else
+        {
+          returnStatement = "return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), " + returnVariable + " );";
+        }
+      }
+    }
+    else
+    {
+      if ( returnVariable.empty() )
+      {
+        assert( !unique );
+        returnStatement = "return static_cast<VULKAN_HPP_NAMESPACE::" + stripPrefix( commandData.returnType, "Vk" ) + ">( result );";
+      }
+      else if ( unique )
+      {
+        assert( returnParam != INVALID_INDEX );
+        assert( returnType.starts_with( "ResultValue<" ) && returnType.ends_with( ">" ) );
+        returnStatement = "return " + returnType + "( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ";
+        if ( dataType.starts_with( "std::" ) )
+        {
+          returnStatement += "std::move( unique" + startUpperCase( returnVariable ) + " )";
+        }
+        else
+        {
+          returnStatement += "UniqueHandle<" + dataType + ", Dispatch>( " + returnVariable + ", " +
+                             generateObjectDeleter( commandName, commandData, initialSkipCount, returnParam ) + " )";
+        }
+        returnStatement += " );";
+      }
+      else
+      {
+        assert( returnType.starts_with( raii ? "std::pair<VULKAN_HPP_NAMESPACE::Result, " : "ResultValue<" ) && returnType.ends_with( ">" ) );
+        returnStatement =
+          "return " + ( raii ? "std::make_pair" : returnType ) + "( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), " + returnVariable + " );";
+      }
+    }
+  }
+  else
+  {
+    assert( !unique );
+    if ( returnVariable.empty() )
+    {
+      if ( commandData.returnType != "void" )
+      {
+        returnStatement = "return result;";
+      }
+    }
+    else
+    {
+      returnStatement = "return " + returnVariable + ";";
+    }
+  }
+  return returnStatement;
+}
+
+std::string VulkanHppGenerator::generateReturnType( CommandData const &                       commandData,
+                                                    std::vector<size_t> const &               returnParams,
+                                                    std::map<size_t, VectorParamData> const & vectorParams,
+                                                    CommandFlavourFlags                       flavourFlags,
+                                                    bool                                      raii,
+                                                    std::string const &                       dataType ) const
+{
+  bool chained = flavourFlags & CommandFlavourFlagBits::chained;
+  bool unique  = flavourFlags & CommandFlavourFlagBits::unique;
+
+  std::string modifiedDataType = dataType;
+  if ( chained )
+  {
+    assert( !unique );
+    modifiedDataType = dataType.starts_with( "std::vector" )
+                       ? ( std::string( "std::vector<StructureChain" ) + ( raii ? "" : ", StructureChainAllocator" ) + ">" )
+                       : "StructureChain<X, Y, Z...>";
+  }
+  else if ( unique )
+  {
+    assert( !chained );
+    assert( ( returnParams.size() == 1 ) && isHandleType( commandData.params[returnParams[0]].type.type ) );
+    if ( dataType.starts_with( "std::vector" ) )
+    {
+      auto from = dataType.find( '<' ) + 1;
+      assert( from != std::string::npos );
+      auto to = dataType.find( '>', from );
+      assert( to == dataType.length() - 1 );
+      std::string type = dataType.substr( from, to - from );
+      assert( type.starts_with( "VULKAN_HPP_NAMESPACE::" ) );
+      modifiedDataType.replace( from, to - from, "UniqueHandle<" + type + ", Dispatch>, " + stripPrefix( type, "VULKAN_HPP_NAMESPACE::" ) + "Allocator" );
+    }
+    else
+    {
+      assert( !dataType.starts_with( "std::" ) );
+      modifiedDataType = "UniqueHandle<" + dataType + ", Dispatch>";
+    }
+  }
+
+  std::string returnType;
+  if ( ( 1 < commandData.successCodes.size() ) && returnParams.empty() && !chained )
+  {
+    assert( ( commandData.returnType == "VkResult" ) && !unique );
+    returnType = "VULKAN_HPP_NAMESPACE::Result";
+  }
+  else if ( ( commandData.returnType != "VkResult" ) && ( commandData.returnType != "void" ) )
+  {
+    assert( returnParams.empty() && !chained && !unique );
+    if ( commandData.returnType.starts_with( "Vk" ) )
+    {
+      returnType = generateNamespacedType( commandData.returnType );
+    }
+    else
+    {
+      returnType = commandData.returnType;
+    }
+  }
+  else if ( ( commandData.returnType == "void" ) ||
+            ( ( commandData.returnType == "VkResult" ) && ( commandData.successCodes.size() == 1 ) && ( commandData.errorCodes.empty() || raii ) ) )
+  {
+    assert( !unique );
+    assert( ( commandData.returnType != "void" ) || ( returnParams.size() <= 2 ) );
+    returnType = modifiedDataType;
+  }
+  else
+  {
+    assert( commandData.returnType == "VkResult" );
+    assert( !commandData.successCodes.empty() && ( commandData.successCodes[0] == "VK_SUCCESS" ) );
+    if ( ( 1 < commandData.successCodes.size() ) && ( ( returnParams.size() == 1 ) || ( ( returnParams.size() == 2 ) && vectorParams.empty() ) ) )
+    {
+      assert( !commandData.errorCodes.empty() );
+      returnType = ( raii ? "std::pair<VULKAN_HPP_NAMESPACE::Result, " : "ResultValue<" ) + modifiedDataType + ">";
+    }
+    else
+    {
+      assert(
+        ( ( commandData.successCodes.size() == 1 ) || ( ( commandData.successCodes.size() == 2 ) && ( commandData.successCodes[1] == "VK_INCOMPLETE" ) ) ) &&
+        ( returnParams.size() <= 3 ) );
+      returnType = raii ? modifiedDataType : ( "typename ResultValueType<" + modifiedDataType + ">::type" );
+    }
+  }
+  return returnType;
+}
+
+std::string VulkanHppGenerator::generateReturnVariable( CommandData const &                       commandData,
+                                                        std::vector<size_t> const &               returnParams,
+                                                        std::map<size_t, VectorParamData> const & vectorParams,
+                                                        CommandFlavourFlags                       flavourFlags ) const
+{
+  bool chained  = flavourFlags & CommandFlavourFlagBits::chained;
+  bool singular = flavourFlags & CommandFlavourFlagBits::singular;
+
+  std::string returnVariable;
+  switch ( returnParams.size() )
+  {
+    case 0: break;  // no return variable
+    case 1:
+      if ( chained )
+      {
+        returnVariable = "structureChain";
+      }
+      else
+      {
+        returnVariable = startLowerCase( stripPrefix( commandData.params[returnParams[0]].name, "p" ) );
+        if ( singular )
+        {
+          returnVariable = stripPluralS( returnVariable, m_tags );
+        }
+      }
+      break;
+    case 2:
+      if ( chained )
+      {
+        returnVariable = "structureChains";
+      }
+      else
+      {
+        auto vectorParamIt = vectorParams.find( returnParams[1] );
+        if ( vectorParamIt == vectorParams.end() )
+        {  // the return variable is simply named "data", and holds the multi-return value stuff
+          returnVariable = "data";
+        }
+        else
+        {
+          assert( vectorParamIt->second.lenParam == returnParams[0] );
+          assert( !singular );
+          returnVariable = startLowerCase( stripPrefix( commandData.params[returnParams[1]].name, "p" ) );
+        }
+      }
+      break;
+    case 3:
+      assert( !chained && !singular );
+      assert( ( vectorParams.size() == 2 ) && ( vectorParams.begin()->first == returnParams[1] ) &&
+              ( vectorParams.begin()->second.lenParam == returnParams[0] ) && ( std::next( vectorParams.begin() )->first == returnParams[2] ) &&
+              ( std::next( vectorParams.begin() )->second.lenParam == returnParams[0] ) );
+      returnVariable = "data";
+      break;
+  }
+  return returnVariable;
+}
+
+std::string VulkanHppGenerator::generateSizeCheck( std::vector<std::vector<MemberData>::const_iterator> const & arrayIts,
+                                                   std::string const &                                          structName,
+                                                   bool                                                         mutualExclusiveLens ) const
+{
+  std::string sizeCheck;
+  if ( 1 < arrayIts.size() )
+  {
+    static const std::string throwTextTemplate = R"(      if ( ${throwCheck} )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::${structName}::${structName}: ${throwCheck}" );
+      }
+)";
+
+    std::string assertionText, throwText;
+    if ( mutualExclusiveLens )
+    {
+      // exactly one of the arrays has to be non-empty
+      std::string sum;
+      for ( auto it : arrayIts )
+      {
+        sum += "!" + startLowerCase( stripPrefix( it->name, "p" ) ) + "_.empty() + ";
+      }
+      sum.resize( sum.size() - 3 );
+      assertionText += "      VULKAN_HPP_ASSERT( ( " + sum + " ) <= 1);\n";
+      throwText += replaceWithMap( throwTextTemplate, { { "structName", structName }, { "throwCheck", "1 < ( " + sum + " )" } } );
+    }
+    else
+    {
+      for ( size_t first = 0; first + 1 < arrayIts.size(); ++first )
+      {
+        assert( arrayIts[first]->name.starts_with( "p" ) );
+        std::string firstName = startLowerCase( stripPrefix( arrayIts[first]->name, "p" ) ) + "_";
+        for ( auto second = first + 1; second < arrayIts.size(); ++second )
+        {
+          assert( arrayIts[second]->name.starts_with( "p" ) );
+          std::string secondName     = startLowerCase( stripPrefix( arrayIts[second]->name, "p" ) ) + "_";
+          std::string assertionCheck = firstName + ".size() == " + secondName + ".size()";
+          std::string throwCheck     = firstName + ".size() != " + secondName + ".size()";
+          if ( ( !arrayIts[first]->optional.empty() && arrayIts[first]->optional.front() ) ||
+               ( !arrayIts[second]->optional.empty() && arrayIts[second]->optional.front() ) )
+          {
+            assertionCheck = "( " + assertionCheck + " )";
+            throwCheck     = "( " + throwCheck + " )";
+            if ( !arrayIts[second]->optional.empty() && arrayIts[second]->optional.front() )
+            {
+              assertionCheck = secondName + ".empty() || " + assertionCheck;
+              throwCheck     = "!" + secondName + ".empty() && " + throwCheck;
+            }
+            if ( !arrayIts[first]->optional.empty() && arrayIts[first]->optional.front() )
+            {
+              assertionCheck = firstName + ".empty() || " + assertionCheck;
+              throwCheck     = "!" + firstName + ".empty() && " + throwCheck;
+            }
+          }
+          assertionText += "      VULKAN_HPP_ASSERT( " + assertionCheck + " );\n";
+          throwText += replaceWithMap( throwTextTemplate, { { "structName", structName }, { "throwCheck", throwCheck } } );
+        }
+      }
+    }
+    sizeCheck += "\n#ifdef VULKAN_HPP_NO_EXCEPTIONS\n" + assertionText + "#else\n" + throwText + "#endif /*VULKAN_HPP_NO_EXCEPTIONS*/\n" + "    ";
+  }
+  return sizeCheck;
+}
+
+std::string VulkanHppGenerator::generateStaticAssertions() const
+{
+  std::string staticAssertions;
+  for ( auto const & feature : m_features )
+  {
+    staticAssertions += generateStaticAssertions( feature.second.requireData, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    staticAssertions += generateStaticAssertions( extIt.second->second.requireData, extIt.second->first );
+  }
+  return staticAssertions;
+}
+
+std::string VulkanHppGenerator::generateStaticAssertions( std::vector<RequireData> const & requireData, std::string const & title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto handleIt = m_handles.find( type );
+      if ( handleIt != m_handles.end() )
+      {
+        std::string const assertionTemplate = R"(
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::${className} ) == sizeof( Vk${className} ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::${className}>::value, "${className} is not nothrow_move_constructible!" );
+)";
+        str += replaceWithMap( assertionTemplate, { { "className", stripPrefix( handleIt->first, "Vk" ) } } );
+      }
+      else
+      {
+        auto structIt = m_structures.find( type );
+        if ( structIt != m_structures.end() )
+        {
+          std::string const assertionTemplate = R"(
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::${structureType} ) == sizeof( Vk${structureType} ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::${structureType}>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::${structureType}>::value, "${structureType} is not nothrow_move_constructible!" );
+)";
+          str += replaceWithMap( assertionTemplate, { { "structureType", stripPrefix( structIt->first, "Vk" ) } } );
+        }
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateStruct( std::pair<std::string, StructureData> const & structure, std::set<std::string> & listedStructs ) const
+{
+  assert( listedStructs.find( structure.first ) == listedStructs.end() );
+
+  std::string str;
+  for ( auto const & member : structure.second.members )
+  {
+    auto structIt = m_structures.find( member.type.type );
+    if ( ( structIt != m_structures.end() ) && ( structure.first != member.type.type ) && ( listedStructs.find( member.type.type ) == listedStructs.end() ) )
+    {
+      str += generateStruct( *structIt, listedStructs );
+    }
+  }
+
+  if ( !structure.second.subStruct.empty() )
+  {
+    auto structureIt = m_structures.find( structure.second.subStruct );
+    if ( ( structureIt != m_structures.end() ) && ( listedStructs.find( structureIt->first ) == listedStructs.end() ) )
+    {
+      str += generateStruct( *structureIt, listedStructs );
+    }
+  }
+
+  if ( structure.second.isUnion )
+  {
+    str += generateUnion( structure );
+  }
+  else
+  {
+    str += generateStructure( structure );
+  }
+
+  listedStructs.insert( structure.first );
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructCompareOperators( std::pair<std::string, StructureData> const & structData ) const
+{
+  static const std::set<std::string> simpleTypes = { "char",   "double",  "DWORD",    "float",    "HANDLE",  "HINSTANCE", "HMONITOR",
+                                                     "HWND",   "int",     "int8_t",   "int16_t",  "int32_t", "int64_t",   "LPCWSTR",
+                                                     "size_t", "uint8_t", "uint16_t", "uint32_t", "uint64_t" };
+  // two structs are compared by comparing each of the elements
+  std::string compareMembers, spaceshipMembers;
+  std::string intro             = "";
+  bool        nonDefaultCompare = false;
+  std::string spaceshipOrdering = containsFloatingPoints( structData.second.members ) ? "std::partial_ordering" : "std::strong_ordering";
+  for ( size_t i = 0; i < structData.second.members.size(); i++ )
+  {
+    MemberData const & member = structData.second.members[i];
+    auto               typeIt = m_types.find( member.type.type );
+    assert( typeIt != m_types.end() );
+    if ( ( typeIt->second.category == TypeCategory::Requires ) && member.type.postfix.empty() && ( simpleTypes.find( member.type.type ) == simpleTypes.end() ) )
+    {
+      nonDefaultCompare = true;
+      // this type might support operator==() or operator<=>()... that is, use memcmp
+      compareMembers += intro + "( memcmp( &" + member.name + ", &rhs." + member.name + ", sizeof( " + member.type.type + " ) ) == 0 )";
+
+      static const std::string spaceshipMemberTemplate =
+        R"(      if ( auto cmp = memcmp( &${name}, &rhs.${name}, sizeof( ${type} ) ); cmp != 0 )
+        return ( cmp < 0 ) ? ${ordering}::less : ${ordering}::greater;
+)";
+      spaceshipMembers +=
+        replaceWithMap( spaceshipMemberTemplate, { { "name", member.name }, { "ordering", spaceshipOrdering }, { "type", member.type.type } } );
+    }
+    else if ( member.type.type == "char" && !member.len.empty() )
+    {
+      // compare null-terminated strings
+      nonDefaultCompare = true;
+      assert( member.len.size() < 3 );
+      if ( member.len.size() == 1 )
+      {
+        assert( member.len[0] == "null-terminated" );
+        compareMembers += intro + "( ( " + member.name + " == rhs." + member.name + " ) || ( strcmp( " + member.name + ", rhs." + member.name + " ) == 0 ) )";
+
+        static const std::string spaceshipMemberTemplate =
+          R"(     if ( ${name} != rhs.${name} )
+        if ( auto cmp = strcmp( ${name}, rhs.${name} ); cmp != 0 )
+          return ( cmp < 0 ) ? ${ordering}::less : ${ordering}::greater;
+)";
+        spaceshipMembers += replaceWithMap( spaceshipMemberTemplate, { { "name", member.name }, { "ordering", spaceshipOrdering } } );
+      }
+      else
+      {
+        assert( member.len[1] == "null-terminated" );
+        assert( ( member.type.prefix == "const" ) && ( member.type.postfix == "* const *" ) );
+        static const std::string compareMemberTemplate =
+          R"(std::equal( ${name}, ${name} + ${count}, rhs.${name}, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ))";
+        compareMembers += intro + replaceWithMap( compareMemberTemplate, { { "count", member.len[0] }, { "name", member.name } } );
+
+        static const std::string spaceshipMemberTemplate = R"(      for ( size_t i = 0; i < ${count}; ++i )
+      {
+        if ( ${name}[i] != rhs.${name}[i] )
+          if ( auto cmp = strcmp( ${name}[i], rhs.${name}[i] ); cmp != 0 )
+            return cmp < 0 ? ${ordering}::less : ${ordering}::greater;
+      }
+)";
+        spaceshipMembers +=
+          replaceWithMap( spaceshipMemberTemplate, { { "count", member.len[0] }, { "name", member.name }, { "ordering", spaceshipOrdering } } );
+      }
+    }
+    else
+    {
+      // for all others, we use the operator== of that type
+      compareMembers += intro + "( " + member.name + " == rhs." + member.name + " )";
+      spaceshipMembers += "      if ( auto cmp = " + member.name + " <=> rhs." + member.name + "; cmp != 0 ) return cmp;\n";
+    }
+    intro = "\n          && ";
+  }
+
+  std::string structName = stripPrefix( structData.first, "Vk" );
+
+  std::string compareBody, spaceshipOperator, spaceshipOperatorIf, spaceshipOperatorElse, spaceshipOperatorEndif;
+  if ( nonDefaultCompare )
+  {
+    compareBody = "      return " + compareMembers + ";";
+
+    if ( !containsFuncPointer( structData.first ) )
+    {
+      static const std::string spaceshipOperatorTemplate =
+        R"(    ${ordering} operator<=>( ${name} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+${spaceshipMembers}
+      return ${ordering}::equivalent;
+    })";
+      spaceshipOperatorIf = "#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)";
+      spaceshipOperator =
+        replaceWithMap( spaceshipOperatorTemplate, { { "name", structName }, { "ordering", spaceshipOrdering }, { "spaceshipMembers", spaceshipMembers } } );
+      spaceshipOperatorElse  = "#endif\n";
+      spaceshipOperatorEndif = "";
+    }
+  }
+  else
+  {
+    // use reflection only if VULKAN_HPP_USE_REFLECT is defined
+    static const std::string compareBodyTemplate = R"(#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ${compareMembers};
+#endif)";
+    compareBody                                  = replaceWithMap( compareBodyTemplate, { { "compareMembers", compareMembers } } );
+
+    if ( !containsFuncPointer( structData.first ) )
+    {
+      spaceshipOperatorIf    = "#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)";
+      spaceshipOperator      = "auto operator<=>( " + structName + " const & ) const = default;";
+      spaceshipOperatorElse  = "#else";
+      spaceshipOperatorEndif = "#endif\n";
+    }
+  }
+
+  static const std::string compareTemplate = R"(
+${spaceshipOperatorIf}
+${spaceshipOperator}
+${spaceshipOperatorElse}
+    bool operator==( ${name} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+${compareBody}
+    }
+
+    bool operator!=( ${name} const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+${spaceshipOperatorEndif})";
+
+  return replaceWithMap( compareTemplate,
+                         { { "name", structName },
+                           { "compareBody", compareBody },
+                           { "spaceshipOperator", spaceshipOperator },
+                           { "spaceshipOperatorElse", spaceshipOperatorElse },
+                           { "spaceshipOperatorEndif", spaceshipOperatorEndif },
+                           { "spaceshipOperatorIf", spaceshipOperatorIf } } );
+}
+
+std::string VulkanHppGenerator::generateStructConstructors( std::pair<std::string, StructureData> const & structData ) const
+{
+  // the constructor with all the elements as arguments, with defaults
+  // and the simple copy constructor from the corresponding vulkan structure
+  static const std::string constructors = R"(${constexpr}${structName}(${arguments}) VULKAN_HPP_NOEXCEPT
+    ${initializers}
+    {}
+
+    ${constexpr}${structName}( ${structName} const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ${structName}( Vk${structName} const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ${structName}( *reinterpret_cast<${structName} const *>( &rhs ) )
+    {}
+)";
+
+  std::string arguments, initializers;
+  bool        listedArgument = false;
+  bool        firstArgument  = true;
+  for ( auto const & member : structData.second.members )
+  {
+    // gather the arguments
+    std::string argument = generateStructConstructorArgument( listedArgument, member, true );
+    if ( !argument.empty() )
+    {
+      listedArgument = true;
+      arguments += argument;
+    }
+
+    // gather the initializers; skip members with exactly one legal value
+    if ( member.value.empty() )
+    {
+      initializers += std::string( firstArgument ? ": " : ", " ) + member.name + "( " + member.name + "_ )";
+      firstArgument = false;
+    }
+  }
+  auto pNextIt = std::find_if( structData.second.members.begin(), structData.second.members.end(), []( MemberData const & md ) { return md.name == "pNext"; } );
+  if ( pNextIt != structData.second.members.end() )
+  {
+    // add pNext as a last optional argument to the constructor
+    arguments += std::string( listedArgument ? ", " : "" ) + pNextIt->type.compose( "VULKAN_HPP_NAMESPACE" ) + " pNext_ = nullptr";
+  }
+
+  std::string str = replaceWithMap( constructors,
+                                    { { "arguments", arguments },
+                                      { "constexpr", generateConstexprString( structData.first ) },
+                                      { "initializers", initializers },
+                                      { "structName", stripPrefix( structData.first, "Vk" ) } } );
+
+  str += generateStructConstructorsEnhanced( structData );
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructConstructorsEnhanced( std::pair<std::string, StructureData> const & structData ) const
+{
+  if ( std::find_if( structData.second.members.begin(),
+                     structData.second.members.end(),
+                     [this, &structData]( MemberData const & md ) { return hasLen( structData.second.members, md ); } ) != structData.second.members.end() )
+  {
+    // map from len-members to all the array members using that len
+    std::map<std::vector<MemberData>::const_iterator, std::vector<std::vector<MemberData>::const_iterator>> lenIts;
+    for ( auto mit = structData.second.members.begin(); mit != structData.second.members.end(); ++mit )
+    {
+      if ( hasLen( structData.second.members, *mit ) )
+      {
+        std::string lenName = ( mit->len.front() == "codeSize / 4" ) ? "codeSize" : mit->len.front();
+        auto        lenIt   = findStructMemberIt( lenName, structData.second.members );
+        assert( lenIt != mit );
+        lenIts[lenIt].push_back( mit );
+      }
+    }
+
+    std::string arguments, initializers;
+    bool        listedArgument = false;
+    bool        firstArgument  = true;
+    bool        arrayListed    = false;
+    std::string templateHeader, sizeChecks;
+    for ( auto mit = structData.second.members.begin(); mit != structData.second.members.end(); ++mit )
+    {
+      // gather the initializers
+      if ( mit->name == "pNext" )  // for pNext, we just get the initializer... the argument is added at the end
+      {
+        initializers += std::string( firstArgument ? ":" : "," ) + " pNext( pNext_ )";
+        firstArgument = false;
+      }
+      else if ( mit->value.empty() )  // skip constant members
+      {
+        auto litit = lenIts.find( mit );
+        if ( litit != lenIts.end() )
+        {
+          // len arguments just have an initalizer, from the ArrayProxyNoTemporaries size
+          initializers +=
+            ( firstArgument ? ": " : ", " ) + mit->name + "( " + generateLenInitializer( mit, litit, structData.second.mutualExclusiveLens ) + " )";
+          sizeChecks += generateSizeCheck( litit->second, stripPrefix( structData.first, "Vk" ), structData.second.mutualExclusiveLens );
+        }
+        else if ( hasLen( structData.second.members, *mit ) )
+        {
+          assert( mit->name.starts_with( "p" ) );
+          std::string argumentName = startLowerCase( stripPrefix( mit->name, "p" ) ) + "_";
+
+          assert( mit->type.postfix.ends_with( "*" ) );
+          std::string argumentType = trimEnd( stripPostfix( mit->type.compose( "VULKAN_HPP_NAMESPACE" ), "*" ) );
+          if ( ( mit->type.type == "void" ) && ( argumentType.find( '*' ) == std::string::npos ) )
+          {
+            // the argument after stripping one pointer is just void
+            assert( templateHeader.empty() );
+            templateHeader = "    template <typename T>\n";
+
+            size_t pos = argumentType.find( "void" );
+            assert( pos != std::string::npos );
+            argumentType.replace( pos, strlen( "void" ), "T" );
+          }
+
+          arguments += listedArgument ? ", " : "";
+          arguments += "VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<" + argumentType + "> const & " + argumentName;
+          if ( arrayListed )
+          {
+            arguments += " = {}";
+          }
+          listedArgument = true;
+          arrayListed    = true;
+
+          initializers += ( firstArgument ? ": " : ", " ) + mit->name + "( " + argumentName + ".data() )";
+        }
+        else
+        {
+          std::string argument = generateStructConstructorArgument( listedArgument, *mit, arrayListed );
+          if ( !argument.empty() )
+          {
+            listedArgument = true;
+            arguments += argument;
+          }
+          initializers += ( firstArgument ? ": " : ", " ) + mit->name + "( " + mit->name + "_ )";
+        }
+        firstArgument = false;
+      }
+    }
+
+    auto pNextIt =
+      std::find_if( structData.second.members.begin(), structData.second.members.end(), []( MemberData const & md ) { return md.name == "pNext"; } );
+    if ( pNextIt != structData.second.members.end() )
+    {
+      // add pNext as a last optional argument to the constructor
+      arguments += std::string( listedArgument ? ", " : "" ) + pNextIt->type.compose( "VULKAN_HPP_NAMESPACE" ) + " pNext_ = nullptr";
+    }
+
+    static const std::string constructorTemplate = R"(
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+${templateHeader}    ${structName}( ${arguments} )
+    ${initializers}
+    {${sizeChecks}}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+)";
+
+    return replaceWithMap( constructorTemplate,
+                           { { "arguments", arguments },
+                             { "initializers", initializers },
+                             { "sizeChecks", sizeChecks },
+                             { "structName", stripPrefix( structData.first, "Vk" ) },
+                             { "templateHeader", templateHeader } } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateStructConstructorArgument( bool listedArgument, MemberData const & memberData, bool withDefault ) const
+{
+  // skip members 'pNext' and members with a specified value, as they are never explicitly set
+  std::string str;
+  if ( ( memberData.name != "pNext" ) && memberData.value.empty() )
+  {
+    str += ( listedArgument ? ( ", " ) : "" );
+    if ( memberData.arraySizes.empty() )
+    {
+      str += memberData.type.compose( "VULKAN_HPP_NAMESPACE" ) + " ";
+    }
+    else
+    {
+      str += generateStandardArray( memberData.type.compose( "VULKAN_HPP_NAMESPACE" ), memberData.arraySizes ) + " const & ";
+    }
+    str += memberData.name + "_";
+
+    if ( withDefault )
+    {
+      str += " = ";
+      auto enumIt = m_enums.find( memberData.type.type );
+      if ( enumIt != m_enums.end() && memberData.type.postfix.empty() )
+      {
+        str += generateEnumInitializer( memberData.type, memberData.arraySizes, enumIt->second.values, enumIt->second.isBitmask );
+      }
+      else
+      {
+        assert( memberData.value.empty() );
+        // all the rest can be initialized with just {}
+        str += "{}";
+      }
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructHashStructure( std::pair<std::string, StructureData> const & structure,
+                                                             std::set<std::string> &                       listedStructs ) const
+{
+  assert( listedStructs.find( structure.first ) == listedStructs.end() );
+
+  std::string str;
+  for ( auto const & member : structure.second.members )
+  {
+    auto structIt = m_structures.find( member.type.type );
+    if ( ( structIt != m_structures.end() ) && ( structure.first != member.type.type ) && ( listedStructs.find( member.type.type ) == listedStructs.end() ) )
+    {
+      str += generateStructHashStructure( *structIt, listedStructs );
+    }
+  }
+
+  if ( !containsUnion( structure.first ) )
+  {
+    static const std::string hashTemplate = R"(
+  ${enter}template <> struct hash<VULKAN_HPP_NAMESPACE::${structureType}>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::${structureType} const & ${structureName}) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+${hashSum}
+      return seed;
+    }
+  };
+${leave})";
+
+    auto [enter, leave] =
+      generateProtection( m_structureAliasesInverse.find( structure.first ) == m_structureAliasesInverse.end() ? getProtectFromType( structure.first ) : "" );
+
+    std::string structureType = stripPrefix( structure.first, "Vk" );
+    std::string structureName = startLowerCase( structureType );
+    str += replaceWithMap( hashTemplate,
+                           { { "enter", enter },
+                             { "hashSum", generateStructHashSum( structureName, structure.second.members ) },
+                             { "leave", leave },
+                             { "structureName", structureName },
+                             { "structureType", structureType } } );
+  }
+
+  listedStructs.insert( structure.first );
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructHashStructures() const
+{
+  const std::string hashesTemplate = R"(
+#if 14 <= VULKAN_HPP_CPP_VERSION
+  //======================================
+  //=== HASH structures for structures ===
+  //======================================
+
+#  if !defined( VULKAN_HPP_HASH_COMBINE )
+#    define VULKAN_HPP_HASH_COMBINE( seed, value ) \
+      seed ^= std::hash<std::decay<decltype( value )>::type>{}( value ) + 0x9e3779b9 + ( seed << 6 ) + ( seed >> 2 )
+#  endif
+
+${hashes}
+#endif    // 14 <= VULKAN_HPP_CPP_VERSION
+)";
+
+  // Note reordering structs or handles by features and extensions is not possible!
+  std::set<std::string> listedStructs;
+  std::string           hashes;
+  for ( auto const & structure : m_structures )
+  {
+    if ( listedStructs.find( structure.first ) == listedStructs.end() )
+    {
+      hashes += generateStructHashStructure( structure, listedStructs );
+    }
+  }
+  return replaceWithMap( hashesTemplate, { { "hashes", hashes } } );
+}
+
+std::string VulkanHppGenerator::generateStructHashSum( std::string const & structName, std::vector<MemberData> const & members ) const
+{
+  std::string hashSum;
+  for ( auto const & member : members )
+  {
+    if ( !member.arraySizes.empty() )
+    {
+      assert( member.arraySizes.size() < 3 );
+      hashSum += "    for ( size_t i = 0; i < " + member.arraySizes[0] + "; ++i )\n";
+      hashSum += "    {\n";
+      if ( member.arraySizes.size() == 1 )
+      {
+        hashSum += "      VULKAN_HPP_HASH_COMBINE( seed, " + structName + "." + member.name + "[i] );\n";
+      }
+      else
+      {
+        hashSum += "      for ( size_t j=0; j < " + member.arraySizes[1] + "; ++j )\n";
+        hashSum += "      {\n";
+        hashSum += "        VULKAN_HPP_HASH_COMBINE( seed, " + structName + "." + member.name + "[i][j] );\n";
+        hashSum += "      }\n";
+      }
+      hashSum += "    }\n";
+    }
+    else if ( member.type.type == "char" && !member.len.empty() )
+    {
+      assert( member.len.size() < 3 );
+      if ( member.len.size() == 1 )
+      {
+        assert( member.len[0] == "null-terminated" );
+        hashSum += "    for ( const char* p = " + structName + "." + member.name + "; *p != '\\0'; ++p )\n";
+        hashSum += "    {\n";
+        hashSum += "      VULKAN_HPP_HASH_COMBINE( seed, *p );\n";
+        hashSum += "    }\n";
+      }
+      else
+      {
+        assert( member.len[1] == "null-terminated" );
+        hashSum += "    for ( size_t i = 0; i < " + structName + "." + member.len[0] + "; ++i )\n";
+        hashSum += "    {\n";
+        hashSum += "        for ( const char* p = " + structName + "." + member.name + "[i]; *p != '\\0'; ++p )\n";
+        hashSum += "        {\n";
+        hashSum += "          VULKAN_HPP_HASH_COMBINE( seed, *p );\n";
+        hashSum += "        }\n";
+        hashSum += "    }\n";
+      }
+    }
+    else
+    {
+      hashSum += "    VULKAN_HPP_HASH_COMBINE( seed, " + structName + "." + member.name + " );\n";
+    }
+  }
+  assert( !hashSum.empty() );
+  return hashSum.substr( 0, hashSum.size() - 1 );
+}
+
+std::string VulkanHppGenerator::generateStructs() const
+{
+  const std::string structsTemplate = R"(
+  //===============
+  //=== STRUCTS ===
+  //===============
+
+${structs}
+)";
+
+  // Note reordering structs or handles by features and extensions is not possible!
+  std::set<std::string> listedStructs;
+  std::string           structs;
+  for ( auto const & structure : m_structures )
+  {
+    if ( listedStructs.find( structure.first ) == listedStructs.end() )
+    {
+      structs += generateStruct( structure, listedStructs );
+    }
+  }
+  return replaceWithMap( structsTemplate, { { "structs", structs } } );
+}
+
+std::string VulkanHppGenerator::generateStructure( std::pair<std::string, StructureData> const & structure ) const
+{
+  auto [enter, leave] =
+    generateProtection( m_structureAliasesInverse.find( structure.first ) == m_structureAliasesInverse.end() ? getProtectFromType( structure.first ) : "" );
+
+  std::string str = "\n" + enter;
+
+  static const std::string constructorsTemplate = R"(
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+${constructors}
+${subConstructors}
+    ${structName} & operator=( ${structName} const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ${structName} & operator=( Vk${structName} const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::${structName} const *>( &rhs );
+      return *this;
+    }
+)";
+
+  std::string constructorsAndSetters = replaceWithMap( constructorsTemplate,
+                                                       { { "constructors", generateStructConstructors( structure ) },
+                                                         { "structName", stripPrefix( structure.first, "Vk" ) },
+                                                         { "subConstructors", generateStructSubConstructor( structure ) } } );
+
+  if ( !structure.second.returnedOnly )
+  {
+    // only structs that are not returnedOnly get setters!
+    constructorsAndSetters += "\n#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )";
+    for ( size_t i = 0; i < structure.second.members.size(); i++ )
+    {
+      constructorsAndSetters += generateStructSetter( stripPrefix( structure.first, "Vk" ), structure.second.members, i );
+    }
+    constructorsAndSetters += "#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/\n";
+  }
+
+  std::string structureType = stripPrefix( structure.first, "Vk" );
+  // the member variables
+  std::string members, memberNames, memberTypes, sTypeValue;
+  std::tie( members, memberNames, memberTypes, sTypeValue ) = generateStructMembers( structure );
+
+  // reflect is meaningfull for structs only, filter out unions
+  std::string reflect;
+  if ( !structure.second.isUnion )
+  {
+    // use reflection only if VULKAN_HPP_USE_REFLECT is defined
+    static const std::string reflectTemplate = R"(
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<${memberTypes}>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( ${memberNames} );
+    }
+#endif
+)";
+
+    reflect = replaceWithMap( reflectTemplate, { { "memberNames", memberNames }, { "memberTypes", memberTypes } } );
+  }
+
+  // operator==() and operator!=()
+  // only structs without a union as a member can have a meaningfull == and != operation; we filter them out
+  std::string compareOperators;
+  if ( !containsUnion( structure.first ) )
+  {
+    compareOperators += generateStructCompareOperators( structure );
+  }
+
+  static const std::string structureTemplate = R"(  struct ${structureType}
+  {
+    using NativeType = Vk${structureType};
+
+${allowDuplicate}
+${typeValue}
+${constructorsAndSetters}
+
+    operator Vk${structureType} const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const Vk${structureType}*>( this );
+    }
+
+    operator Vk${structureType} &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<Vk${structureType}*>( this );
+    }
+${reflect}
+${compareOperators}
+    public:
+${members}
+  };
+)";
+
+  std::string allowDuplicate, typeValue;
+  if ( !sTypeValue.empty() )
+  {
+    allowDuplicate = std::string( "    static const bool allowDuplicate = " ) + ( structure.second.allowDuplicate ? "true;" : "false;" );
+    typeValue      = "    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::" + sTypeValue + ";\n";
+  }
+  str += replaceWithMap( structureTemplate,
+                         { { "allowDuplicate", allowDuplicate },
+                           { "constructorsAndSetters", constructorsAndSetters },
+                           { "compareOperators", compareOperators },
+                           { "members", members },
+                           { "reflect", reflect },
+                           { "structureType", structureType },
+                           { "typeValue", typeValue } } );
+
+  if ( !sTypeValue.empty() )
+  {
+    std::string cppTypeTemplate = R"(
+  template <>
+  struct CppType<StructureType, StructureType::${sTypeValue}>
+  {
+    using Type = ${structureType};
+  };
+)";
+    str += replaceWithMap( cppTypeTemplate, { { "sTypeValue", sTypeValue }, { "structureType", structureType } } );
+  }
+
+  auto aliasIt = m_structureAliasesInverse.find( structure.first );
+  if ( aliasIt != m_structureAliasesInverse.end() )
+  {
+    for ( std::string const & alias : aliasIt->second )
+    {
+      str += "  using " + stripPrefix( alias, "Vk" ) + " = " + structureType + ";\n";
+    }
+  }
+
+  str += leave;
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructExtendsStructs() const
+{
+  const std::string structExtendsTemplate = R"(
+  //=======================
+  //=== STRUCTS EXTENDS ===
+  //=======================
+
+${structExtends})";
+
+  std::string           structExtends;
+  std::set<std::string> listedStructs;
+  for ( auto const & feature : m_features )
+  {
+    structExtends += generateStructExtendsStructs( feature.second.requireData, listedStructs, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    structExtends += generateStructExtendsStructs( extIt.second->second.requireData, listedStructs, extIt.second->first );
+  }
+
+  return replaceWithMap( structExtendsTemplate, { { "structExtends", structExtends } } );
+}
+
+std::string VulkanHppGenerator::generateStructExtendsStructs( std::vector<RequireData> const & requireData,
+                                                              std::set<std::string> &          listedStructs,
+                                                              std::string const &              title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto structIt = m_structures.find( type );
+      if ( structIt != m_structures.end() )
+      {
+        assert( listedStructs.find( type ) == listedStructs.end() );
+        listedStructs.insert( type );
+
+        auto [enter, leave] = generateProtection( getProtectFromTitle( title ) );
+
+        // append all allowed structure chains
+        for ( auto extendName : structIt->second.structExtends )
+        {
+          std::map<std::string, StructureData>::const_iterator itExtend = m_structures.find( extendName );
+          if ( itExtend == m_structures.end() )
+          {
+            // look if the extendName acutally is an alias of some other structure
+            auto aliasIt = m_structureAliases.find( extendName );
+            if ( aliasIt != m_structureAliases.end() )
+            {
+              itExtend = m_structures.find( aliasIt->second.alias );
+              assert( itExtend != m_structures.end() );
+            }
+          }
+
+          auto [subEnter, subLeave] = generateProtection(
+            m_structureAliasesInverse.find( itExtend->first ) == m_structureAliasesInverse.end() ? getProtectFromType( itExtend->first ) : "" );
+
+          if ( enter != subEnter )
+          {
+            str += subEnter;
+          }
+
+          str += "  template <> struct StructExtends<" + stripPrefix( structIt->first, "Vk" ) + ", " + stripPrefix( extendName, "Vk" ) +
+                 ">{ enum { value = true }; };\n";
+
+          if ( leave != subLeave )
+          {
+            str += subLeave;
+          }
+        }
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::string VulkanHppGenerator::generateStructForwardDeclarations() const
+{
+  const std::string fowardDeclarationsTemplate = R"(
+  //===================================
+  //=== STRUCT forward declarations ===
+  //===================================
+
+${forwardDeclarations}
+)";
+
+  std::string forwardDeclarations;
+  for ( auto const & feature : m_features )
+  {
+    forwardDeclarations += generateStructForwardDeclarations( feature.second.requireData, feature.first );
+  }
+  for ( auto const & extIt : m_extensionsByNumber )
+  {
+    forwardDeclarations += generateStructForwardDeclarations( extIt.second->second.requireData, extIt.second->first );
+  }
+
+  return replaceWithMap( fowardDeclarationsTemplate, { { "forwardDeclarations", forwardDeclarations } } );
+}
+
+std::string VulkanHppGenerator::generateStructForwardDeclarations( std::vector<RequireData> const & requireData, std::string const & title ) const
+{
+  std::string str;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & type : require.types )
+    {
+      auto structIt = m_structures.find( type );
+      if ( structIt != m_structures.end() )
+      {
+        std::string structureType = stripPrefix( structIt->first, "Vk" );
+        str += ( structIt->second.isUnion ? "  union " : "  struct " ) + structureType + ";\n";
+        auto inverseIt = m_structureAliasesInverse.find( type );
+        if ( inverseIt != m_structureAliasesInverse.end() )
+        {
+          for ( auto alias : inverseIt->second )
+          {
+            str += "  using " + stripPrefix( alias, "Vk" ) + " = " + structureType + ";\n";
+          }
+        }
+      }
+    }
+  }
+  return addTitleAndProtection( title, str );
+}
+
+std::tuple<std::string, std::string, std::string, std::string>
+  VulkanHppGenerator::generateStructMembers( std::pair<std::string, StructureData> const & structData ) const
+{
+  std::string members, memberNames, memberTypes, sTypeValue;
+  for ( auto const & member : structData.second.members )
+  {
+    members += "    ";
+    std::string type;
+    if ( !member.bitCount.empty() && member.type.type.starts_with( "Vk" ) )
+    {
+      assert( member.type.prefix.empty() && member.type.postfix.empty() );  // never encounterd a different case
+      type = member.type.type;
+    }
+    else if ( member.arraySizes.empty() )
+    {
+      type = member.type.compose( "VULKAN_HPP_NAMESPACE" );
+    }
+    else
+    {
+      assert( member.type.prefix.empty() && member.type.postfix.empty() );
+      type = generateStandardArrayWrapper( member.type.compose( "VULKAN_HPP_NAMESPACE" ), member.arraySizes );
+    }
+    members += type + " " + member.name;
+    if ( !member.value.empty() )
+    {
+      // special handling for members with legal value: use it as the default
+      members += " = ";
+      if ( member.type.type == "uint32_t" )
+      {
+        members += member.value;
+      }
+      else
+      {
+        auto enumIt = m_enums.find( member.type.type );
+        assert( enumIt != m_enums.end() );
+        std::string enumValue = member.value;
+        auto        valueIt   = std::find_if(
+          enumIt->second.values.begin(), enumIt->second.values.end(), [&enumValue]( EnumValueData const & evd ) { return enumValue == evd.name; } );
+        assert( valueIt != enumIt->second.values.end() );
+        std::string valueName = generateEnumValueName( enumIt->first, valueIt->name, enumIt->second.isBitmask, m_tags );
+        members += stripPrefix( member.type.type, "Vk" ) + "::" + valueName;
+        if ( member.name == "sType" )
+        {
+          sTypeValue = valueName;
+        }
+      }
+    }
+    else
+    {
+      // as we don't have any meaningful default initialization values, everything can be initialized by just '{}'
+      // !
+      assert( member.arraySizes.empty() || member.bitCount.empty() );
+      if ( !member.bitCount.empty() )
+      {
+        members += " : " + member.bitCount;  // except for bitfield members, where no default member initializatin
+                                             // is supported (up to C++20)
+      }
+      else
+      {
+        members += " = ";
+        auto enumIt = m_enums.find( member.type.type );
+        if ( member.arraySizes.empty() && ( enumIt != m_enums.end() ) && member.type.postfix.empty() )
+        {
+          members += generateEnumInitializer( member.type, member.arraySizes, enumIt->second.values, enumIt->second.isBitmask );
+        }
+        else
+        {
+          members += "{}";
+        }
+      }
+    }
+    members += ";\n";
+
+    memberNames += member.name + ", ";
+    memberTypes += type + " const &, ";
+  }
+  return std::make_tuple( members, stripPostfix( memberNames, ", " ), stripPostfix( memberTypes, ", " ), sTypeValue );
+}
+
+std::string VulkanHppGenerator::generateStructSetter( std::string const & structureName, std::vector<MemberData> const & memberData, size_t index ) const
+{
+  std::string        str;
+  MemberData const & member = memberData[index];
+  if ( member.type.type != "VkStructureType" )  // filter out StructureType, which is supposed to be immutable !
+  {
+    static const std::string templateString = R"(
+    ${constexpr}${structureName} & set${MemberName}( ${memberType} ${reference}${memberName}_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ${assignment};
+      return *this;
+    }
+)";
+
+    std::string memberType         = member.arraySizes.empty() ? member.type.compose( "VULKAN_HPP_NAMESPACE" )
+                                                               : generateStandardArray( member.type.compose( "VULKAN_HPP_NAMESPACE" ), member.arraySizes );
+    bool        isReinterpretation = !member.bitCount.empty() && member.type.type.starts_with( "Vk" );
+    std::string assignment;
+    if ( isReinterpretation )
+    {
+      assignment = member.name + " = " + "*reinterpret_cast<" + member.type.type + "*>(&" + member.name + "_)";
+    }
+    else
+    {
+      assignment = member.name + " = " + member.name + "_";
+    }
+
+    str +=
+      replaceWithMap( templateString,
+                      { { "assignment", assignment },
+                        { "constexpr", isReinterpretation ? "" : "VULKAN_HPP_CONSTEXPR_14 " },
+                        { "memberName", member.name },
+                        { "MemberName", startUpperCase( member.name ) },
+                        { "memberType", memberType },
+                        { "reference", ( member.type.postfix.empty() && ( m_structures.find( member.type.type ) != m_structures.end() ) ) ? "const & " : "" },
+                        { "structureName", structureName } } );
+
+    if ( hasLen( memberData, member ) )
+    {
+      assert( member.name.front() == 'p' );
+      std::string arrayName = startLowerCase( stripPrefix( member.name, "p" ) );
+
+      std::string lenName, lenValue;
+      if ( member.len[0] == "codeSize / 4" )
+      {
+        lenName  = "codeSize";
+        lenValue = arrayName + "_.size() * 4";
+      }
+      else
+      {
+        lenName  = member.len[0];
+        lenValue = arrayName + "_.size()";
+      }
+
+      assert( memberType.back() == '*' );
+      memberType = trimEnd( stripPostfix( memberType, "*" ) );
+
+      std::string templateHeader;
+      if ( ( member.type.type == "void" ) && ( memberType.find( '*' ) == std::string::npos ) )
+      {
+        assert( templateHeader.empty() );
+        templateHeader = "template <typename T>\n    ";
+
+        size_t pos = memberType.find( "void" );
+        assert( pos != std::string::npos );
+        memberType.replace( pos, strlen( "void" ), "T" );
+
+        lenValue += " * sizeof(T)";
+      }
+
+      auto lenMember = findStructMemberIt( lenName, memberData );
+      assert( lenMember != memberData.end() && lenMember->type.prefix.empty() && lenMember->type.postfix.empty() );
+      if ( lenMember->type.type != "size_t" )
+      {
+        lenValue = "static_cast<" + lenMember->type.type + ">( " + lenValue + " )";
+      }
+
+      static const std::string setArrayTemplate = R"(
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ${templateHeader}${structureName} & set${ArrayName}( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<${memberType}> const & ${arrayName}_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ${lenName} = ${lenValue};
+      ${memberName} = ${arrayName}_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+)";
+
+      str += replaceWithMap( setArrayTemplate,
+                             { { "arrayName", arrayName },
+                               { "ArrayName", startUpperCase( arrayName ) },
+                               { "lenName", lenName },
+                               { "lenValue", lenValue },
+                               { "memberName", member.name },
+                               { "memberType", memberType },
+                               { "structureName", structureName },
+                               { "templateHeader", templateHeader } } );
+    }
+  }
+  return str;
+}
+
+std::string VulkanHppGenerator::generateStructSubConstructor( std::pair<std::string, StructureData> const & structData ) const
+{
+  if ( !structData.second.subStruct.empty() )
+  {
+    auto const & subStruct = m_structures.find( structData.second.subStruct );
+    assert( subStruct != m_structures.end() );
+
+    std::string subStructArgumentName = startLowerCase( stripPrefix( subStruct->first, "Vk" ) );
+
+    std::string subCopies;
+    bool        firstArgument = true;
+    for ( size_t i = 0; i < subStruct->second.members.size(); i++ )
+    {
+      assert( structData.second.members[i].arraySizes.empty() );
+      static const std::string subCopiesTemplate =
+        R"(      ${separator} ${structMemberName}( ${subStructArgumentName}.${subStructMemberName} )
+)";
+      subCopies += replaceWithMap( subCopiesTemplate,
+                                   { { "separator", firstArgument ? ":" : "," },
+                                     { "structMemberName", structData.second.members[i].name },
+                                     { "subStructMemberName", subStruct->second.members[i].name },
+                                     { "subStructArgumentName", subStructArgumentName } } );
+      firstArgument = false;
+    }
+
+    std::string subArguments;
+    bool        listedArgument = true;
+    for ( size_t i = subStruct->second.members.size(); i < structData.second.members.size(); i++ )
+    {
+      std::string argument = generateStructConstructorArgument( listedArgument, structData.second.members[i], true );
+      if ( !argument.empty() )
+      {
+        listedArgument = true;
+        subArguments += argument;
+      }
+
+      assert( structData.second.members[i].arraySizes.empty() );
+      static const std::string subCopiesTemplate = R"(    , ${memberName}( ${memberName}_ )
+)";
+      subCopies += replaceWithMap( subCopiesTemplate, { { "memberName", structData.second.members[i].name } } );
+    }
+
+    static const std::string subStructConstructorTemplate = R"(
+    explicit ${structName}( ${subStructName} const & ${subStructArgumentName}${subArguments} )
+${subCopies}    {}
+)";
+    return replaceWithMap( subStructConstructorTemplate,
+                           { { "structName", stripPrefix( structData.first, "Vk" ) },
+                             { "subArguments", subArguments },
+                             { "subCopies", subCopies },
+                             { "subStructArgumentName", subStructArgumentName },
+                             { "subStructName", stripPrefix( subStruct->first, "Vk" ) } } );
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::generateSuccessCheck( std::vector<std::string> const & successCodes ) const
+{
+  assert( !successCodes.empty() );
+  std::string successCheck = "result == " + generateSuccessCode( successCodes[0], m_tags );
+  if ( 1 < successCodes.size() )
+  {
+    successCheck = "( " + successCheck + " )";
+    for ( size_t i = 1; i < successCodes.size(); ++i )
+    {
+      successCheck += "|| ( result == " + generateSuccessCode( successCodes[i], m_tags ) + " )";
+    }
+  }
+  return successCheck;
+}
+
+std::string VulkanHppGenerator::generateSuccessCodeList( std::vector<std::string> const & successCodes, bool enumerating ) const
+{
+  std::string successCodeList;
+  if ( ( 1 < successCodes.size() ) && !enumerating )
+  {
+    successCodeList = ", { " + generateSuccessCode( successCodes[0], m_tags );
+    for ( size_t i = 1; i < successCodes.size(); ++i )
+    {
+      successCodeList += ", " + generateSuccessCode( successCodes[i], m_tags );
+    }
+    successCodeList += " }";
+  }
+  return successCodeList;
+}
+
+std::string VulkanHppGenerator::generateThrowResultException() const
+{
+  auto enumIt = m_enums.find( "VkResult" );
+
+  std::string cases;
+  for ( auto const & value : enumIt->second.values )
+  {
+    if ( value.name.starts_with( "VK_ERROR" ) )
+    {
+      auto [enter, leave]   = generateProtection( getProtect( value ) );
+      std::string valueName = generateEnumValueName( enumIt->first, value.name, false, m_tags );
+      cases += enter + "      case Result::" + valueName + ": throw " + stripPrefix( valueName, "eError" ) + "Error( message );\n" + leave;
+    }
+  }
+  cases.pop_back();  // remove last newline
+
+  const std::string throwTemplate = R"(
+  namespace
+  {
+    [[noreturn]] void throwResultException( Result result, char const * message )
+    {
+      switch ( result )
+      {
+${cases}
+        default: throw SystemError( make_error_code( result ) );
+      }
+    }
+  })";
+  return replaceWithMap( throwTemplate, { { "cases", cases } } );
+}
+
+std::string VulkanHppGenerator::generateTypenameCheck( std::vector<size_t> const &               returnParams,
+                                                       std::map<size_t, VectorParamData> const & vectorParams,
+                                                       bool                                      definition,
+                                                       std::vector<std::string> const &          dataTypes,
+                                                       CommandFlavourFlags                       flavourFlags ) const
+{
+  std::string typenameCheck, enableIf;
+  if ( !( flavourFlags & CommandFlavourFlagBits::singular ) && ( flavourFlags & CommandFlavourFlagBits::withAllocator ) )
+  {
+    for ( size_t i = 0; i < returnParams.size(); ++i )
+    {
+      if ( vectorParams.find( returnParams[i] ) != vectorParams.end() )
+      {
+        std::string elementType = ( flavourFlags & CommandFlavourFlagBits::chained ) ? "StructureChain" : stripPrefix( dataTypes[i], "VULKAN_HPP_NAMESPACE::" );
+        std::string extendedElementType = elementType;
+        if ( flavourFlags & CommandFlavourFlagBits::unique )
+        {
+          extendedElementType = "UniqueHandle<" + elementType + ", Dispatch>";
+        }
+        std::string index = std::to_string( i );
+        if ( definition )
+        {
+          typenameCheck += ", typename B" + index;
+        }
+        else
+        {
+          typenameCheck += ", typename B" + index + " = " + startUpperCase( elementType ) + "Allocator";
+        }
+        enableIf += enableIf.empty() ? ", typename std::enable_if<" : " && ";
+        enableIf += "std::is_same<typename B" + index + "::value_type, " + extendedElementType + ">::value";
+      }
+    }
+    assert( !typenameCheck.empty() );
+    if ( !typenameCheck.empty() )
+    {
+      typenameCheck += enableIf + ", int>::type";
+      if ( !definition )
+      {
+        typenameCheck += " = 0";
+      }
+    }
+  }
+  return typenameCheck;
+}
+
+std::string VulkanHppGenerator::generateUnion( std::pair<std::string, StructureData> const & structure ) const
+{
+  auto [enter, leave] =
+    generateProtection( m_structureAliasesInverse.find( structure.first ) == m_structureAliasesInverse.end() ? getProtectFromType( structure.first ) : "" );
+  std::string unionName = stripPrefix( structure.first, "Vk" );
+
+  bool               firstMember = true;
+  std::set<TypeInfo> listedTypes;  // create just one constructor per different type !
+  std::string        constructors;
+  for ( auto memberIt = structure.second.members.begin(); memberIt != structure.second.members.end(); ++memberIt )
+  {
+    if ( listedTypes.insert( memberIt->type ).second )
+    {
+      // VkBool32 is aliased to uint32_t. Don't create a VkBool32 constructor if the union also contains a
+      // uint32_t constructor.
+      if ( memberIt->type.type == "VkBool32" )
+      {
+        if ( findStructMemberItByType( "uint32_t", structure.second.members ) != structure.second.members.end() )
+        {
+          continue;
+        }
+      }
+
+      bool multipleType =
+        ( std::find_if( std::next( memberIt ),
+                        structure.second.members.end(),
+                        [memberIt]( MemberData const & member ) { return member.type == memberIt->type; } ) != structure.second.members.end() );
+      std::string memberType = ( memberIt->arraySizes.empty() )
+                               ? memberIt->type.compose( "VULKAN_HPP_NAMESPACE" )
+                               : ( "const " + generateStandardArray( memberIt->type.compose( "VULKAN_HPP_NAMESPACE" ), memberIt->arraySizes ) + "&" );
+
+      // In a majority of cases this can be constexpr in C++11 as well, however, determining when exactly
+      // that is the case is a lot more involved and probably not worth it.
+      static const std::string constructorTemplate = R"(
+    VULKAN_HPP_CONSTEXPR_14 ${unionName}( ${memberType} ${argumentName}_${defaultAssignment} )
+      : ${memberName}( ${argumentName}_ )
+    {})";
+
+      constructors += ( firstMember ? "" : "\n" ) + replaceWithMap( constructorTemplate,
+                                                                    { { "argumentName", multipleType ? generateName( memberIt->type ) : memberIt->name },
+                                                                      { "defaultAssignment", firstMember ? " = {}" : "" },
+                                                                      { "memberName", memberIt->name },
+                                                                      { "memberType", memberType },
+                                                                      { "unionName", stripPrefix( structure.first, "Vk" ) } } );
+      firstMember = false;
+
+      if ( !memberIt->arraySizes.empty() )
+      {
+        assert( !multipleType );
+        assert( memberIt->arraySizes.size() == 1 );
+        int size = std::stoi( memberIt->arraySizes[0] );
+        assert( std::to_string( size ) == memberIt->arraySizes[0] );
+        std::string arguments, callArguments;
+        bool        firstArgument = true;
+        for ( int i = 0; i < size; i++ )
+        {
+          if ( !firstArgument )
+          {
+            arguments += ", ";
+            callArguments += ", ";
+          }
+          std::string argumentIndex = std::to_string( i );
+          arguments += memberIt->type.type + " " + memberIt->name + "_" + argumentIndex;
+          callArguments += memberIt->name + "_" + argumentIndex;
+          firstArgument = false;
+        }
+
+        static const std::string constructorBySequenceTemplate = R"(
+    VULKAN_HPP_CONSTEXPR ${unionName}( ${arguments} )
+      : ${memberName}{ { { ${callArguments} } } }
+    {})";
+
+        constructors += "\n" + replaceWithMap( constructorBySequenceTemplate,
+                                               { { "arguments", arguments },
+                                                 { "callArguments", callArguments },
+                                                 { "memberName", memberIt->name },
+                                                 { "unionName", stripPrefix( structure.first, "Vk" ) } } );
+      }
+    }
+  }
+
+  // one setter per union element
+  std::string setters;
+  for ( size_t i = 0; i < structure.second.members.size(); i++ )
+  {
+    setters += generateStructSetter( stripPrefix( structure.first, "Vk" ), structure.second.members, i );
+  }
+  // filter out leading and trailing newline
+  setters = setters.substr( 1, setters.length() - 2 );
+
+  // the union member variables
+  std::string members;
+  // if there's at least one Vk... type in this union, check for unrestricted unions support
+  bool needsUnrestrictedUnions =
+    ( std::find_if( structure.second.members.begin(),
+                    structure.second.members.end(),
+                    []( MemberData const & member ) { return member.type.type.starts_with( "Vk" ); } ) != structure.second.members.end() );
+  if ( needsUnrestrictedUnions )
+  {
+    members += "#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS\n";
+  }
+  for ( auto const & member : structure.second.members )
+  {
+    static const std::string memberTemplate = R"(    ${memberType} ${memberName};
+)";
+    members +=
+      replaceWithMap( memberTemplate,
+                      { { "memberName", member.name },
+                        { "memberType",
+                          member.arraySizes.empty() ? member.type.compose( "VULKAN_HPP_NAMESPACE" )
+                                                    : generateStandardArrayWrapper( member.type.compose( "VULKAN_HPP_NAMESPACE" ), member.arraySizes ) } } );
+  }
+  if ( needsUnrestrictedUnions )
+  {
+    members += "#else\n";
+    for ( auto const & member : structure.second.members )
+    {
+      static const std::string memberTemplate = R"(    ${memberType} ${memberName}${arraySizes};
+)";
+      members += replaceWithMap(
+        memberTemplate,
+        { { "arraySizes", generateCArraySizes( member.arraySizes ) }, { "memberName", member.name }, { "memberType", member.type.compose( "" ) } } );
+    }
+    members += "#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/\n";
+  }
+
+  static const std::string unionTemplate = R"(
+${enter}  union ${unionName}
+  {
+    using NativeType = Vk${unionName};
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+${constructors}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+${setters}
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator Vk${unionName} const &() const
+    {
+      return *reinterpret_cast<const Vk${unionName}*>( this );
+    }
+
+    operator Vk${unionName} &()
+    {
+      return *reinterpret_cast<Vk${unionName}*>( this );
+    }
+
+${members}
+  };
+${leave})";
+
+  return replaceWithMap(
+    unionTemplate,
+    { { "constructors", constructors }, { "enter", enter }, { "leave", leave }, { "members", members }, { "setters", setters }, { "unionName", unionName } } );
+}
+
+std::string VulkanHppGenerator::generateUniqueTypes( std::string const & parentType, std::set<std::string> const & childrenTypes ) const
+{
+  std::string childrenTraits;
+  for ( auto const & childType : childrenTypes )
+  {
+    auto handleIt = m_handles.find( childType );
+    assert( handleIt != m_handles.end() );
+
+    std::string type = stripPrefix( childType, "Vk" );
+
+    auto [enter, leave] = generateProtection( handleIt->second.alias.empty() ? getProtectFromType( handleIt->first ) : "" );
+
+    std::string aliasHandle;
+    if ( !handleIt->second.alias.empty() )
+    {
+      static const std::string aliasHandleTemplate =
+        R"(  using Unique${aliasType} = UniqueHandle<${type}, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+)";
+
+      aliasHandle += replaceWithMap( aliasHandleTemplate, { { "aliasType", stripPrefix( handleIt->second.alias, "Vk" ) }, { "type", type } } );
+    }
+
+    static const std::string traitsTemplate = R"(${enter}  template <typename Dispatch>
+  class UniqueHandleTraits<${type}, Dispatch>
+  {
+  public:
+    using deleter = ${deleterType}${deleterAction}<${deleterParent}${deleterPool}, Dispatch>;
+  };
+  using Unique${type} = UniqueHandle<${type}, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+${aliasHandle}${leave})";
+
+    childrenTraits += replaceWithMap( traitsTemplate,
+                                      { { "aliasHandle", aliasHandle },
+                                        { "deleterAction", ( handleIt->second.deleteCommand.substr( 2, 4 ) == "Free" ) ? "Free" : "Destroy" },
+                                        { "deleterParent", parentType.empty() ? "NoParent" : stripPrefix( parentType, "Vk" ) },
+                                        { "deleterPool", handleIt->second.deletePool.empty() ? "" : ", " + stripPrefix( handleIt->second.deletePool, "Vk" ) },
+                                        { "deleterType", handleIt->second.deletePool.empty() ? "Object" : "Pool" },
+                                        { "enter", enter },
+                                        { "leave", leave },
+                                        { "type", type } } );
+  }
+
+  static const std::string uniqueTypeTemplate = R"(
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+${parentClass}${childrenTraits}#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+)";
+
+  return replaceWithMap(
+    uniqueTypeTemplate,
+    { { "childrenTraits", childrenTraits }, { "parentClass", parentType.empty() ? "" : ( "  class " + stripPrefix( parentType, "Vk" ) + ";\n" ) } } );
+}
+
+std::string VulkanHppGenerator::generateVectorSizeCheck( std::string const &                           name,
+                                                         CommandData const &                           commandData,
+                                                         size_t                                        initialSkipCount,
+                                                         std::map<size_t, std::vector<size_t>> const & countToVectorMap,
+                                                         std::set<size_t> const &                      skippedParams,
+                                                         bool                                          onlyThrows ) const
+{
+  std::string const assertTemplate = "    VULKAN_HPP_ASSERT( ${zeroSizeCheck}${firstVectorName}.size() == ${secondVectorName}.size() );";
+  std::string const assertTemplateVoid =
+    "    VULKAN_HPP_ASSERT( ${zeroSizeCheck}${firstVectorName}.size() * sizeof( ${firstDataType} ) == ${secondVectorName}.size() * sizeof( ${secondDataType} ) );";
+  std::string const assertTemplateByLen = "    VULKAN_HPP_ASSERT( ${vectorName}.size() == ${sizeValue} );";
+  std::string const throwTemplate =
+    R"#(    if ( ${zeroSizeCheck}${firstVectorName}.size() != ${secondVectorName}.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() != ${secondVectorName}.size()" );
+  })#";
+  std::string const throwTemplateVoid =
+    R"#(    if ( ${zeroSizeCheck}${firstVectorName}.size() * sizeof( ${firstDataType} ) != ${secondVectorName}.size() * sizeof( ${secondDataType} ) )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${firstVectorName}.size() * sizeof( ${firstDataType} ) != ${secondVectorName}.size() * sizeof( ${secondDataType} )" );
+  })#";
+  std::string const throwTemplateByLen = R"#(    if ( ${vectorName}.size() != ${sizeValue} )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::${className}::${commandName}: ${vectorName}.size() != ${sizeValue}" );
+    })#";
+
+  std::string className   = stripPrefix( commandData.params[initialSkipCount - 1].type.type, "Vk" );
+  std::string commandName = generateCommandName( name, commandData.params, initialSkipCount, m_tags );
+
+  std::string assertions, throws;
+  for ( auto const & cvm : countToVectorMap )
+  {
+    size_t      defaultStartIndex = determineDefaultStartIndex( commandData.params, skippedParams );
+    std::string firstVectorName   = startLowerCase( stripPrefix( commandData.params[cvm.second[0]].name, "p" ) );
+
+    if ( cvm.second.size() == 1 )
+    {
+      assert( isLenByStructMember( commandData.params[cvm.second[0]].len, commandData.params[cvm.first] ) );
+
+      std::vector<std::string> lenParts = tokenize( commandData.params[cvm.second[0]].len, "->" );
+      assert( lenParts.size() == 2 );
+      std::string sizeValue = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + lenParts[1];
+      assertions += replaceWithMap( assertTemplateByLen, { { "sizeValue", sizeValue }, { "vectorName", firstVectorName } } );
+      throws += replaceWithMap( throwTemplateByLen,
+                                { { "className", className }, { "commandName", commandName }, { "sizeValue", sizeValue }, { "vectorName", firstVectorName } } );
+    }
+    else
+    {
+      for ( size_t i = 1; i < cvm.second.size(); i++ )
+      {
+        std::string secondVectorName  = startLowerCase( stripPrefix( commandData.params[cvm.second[i]].name, "p" ) );
+        bool        withZeroSizeCheck = commandData.params[cvm.second[i]].optional && ( defaultStartIndex <= cvm.second[i] );
+        if ( commandData.params[cvm.second[0]].type.type == "void" )
+        {
+          assert( commandData.params[cvm.second[i]].type.type == "void" );
+          std::string firstDataType  = stripPrefix( commandData.params[cvm.second[0]].name, "p" ) + "Type";
+          std::string secondDataType = stripPrefix( commandData.params[cvm.second[i]].name, "p" ) + "Type";
+          assertions += replaceWithMap( assertTemplateVoid,
+                                        { { "firstDataType", firstDataType },
+                                          { "firstVectorName", firstVectorName },
+                                          { "secondDataType", secondDataType },
+                                          { "secondVectorName", secondVectorName },
+                                          { "zeroSizeCheck", withZeroSizeCheck ? ( secondVectorName + ".empty() || " ) : "" } } );
+          throws += replaceWithMap( throwTemplateVoid,
+                                    { { "firstDataType", firstDataType },
+                                      { "firstVectorName", firstVectorName },
+                                      { "className", className },
+                                      { "commandName", commandName },
+                                      { "secondDataType", secondDataType },
+                                      { "secondVectorName", secondVectorName },
+                                      { "zeroSizeCheck", withZeroSizeCheck ? ( "!" + secondVectorName + ".empty() && " ) : "" } } );
+        }
+        else
+        {
+          assertions += replaceWithMap( assertTemplate,
+                                        { { "firstVectorName", firstVectorName },
+                                          { "secondVectorName", secondVectorName },
+                                          { "zeroSizeCheck", withZeroSizeCheck ? ( secondVectorName + ".empty() || " ) : "" } } );
+          throws += replaceWithMap( throwTemplate,
+                                    { { "firstVectorName", firstVectorName },
+                                      { "className", className },
+                                      { "commandName", commandName },
+                                      { "secondVectorName", secondVectorName },
+                                      { "zeroSizeCheck", withZeroSizeCheck ? ( "!" + secondVectorName + ".empty() && " ) : "" } } );
+        }
+        if ( i + 1 < cvm.second.size() )
+        {
+          assertions += "\n";
+          throws += "\n";
+        }
+      }
+    }
+  }
+
+  std::string const sizeCheckTemplate =
+    R"(#ifdef VULKAN_HPP_NO_EXCEPTIONS
+${assertions}
+#else
+${throws}
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/)";
+
+  return onlyThrows ? throws : replaceWithMap( sizeCheckTemplate, { { "assertions", assertions }, { "throws", throws } } );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::getParentTypeAndName( std::pair<std::string, HandleData> const & handle ) const
+{
+  std::string parentType, parentName;
+  if ( handle.first == "VkInstance" )
+  {
+    parentType = "Context";
+    parentName = "context";
+  }
+  else
+  {
+    bool skip = skipLeadingGrandParent( handle );
+    assert( !handle.second.constructorIts.empty() && ( ( skip ? 1u : 0u ) < handle.second.constructorIts.front()->second.params.size() ) );
+    auto const & param = handle.second.constructorIts.front()->second.params[skip ? 1 : 0];
+    assert( isHandleType( param.type.type ) && param.type.isValue() );
+    parentType = stripPrefix( param.type.type, "Vk" );
+    parentName = param.name;
+  }
+  return std::make_pair( parentType, parentName );
+}
+
+std::string VulkanHppGenerator::getPlatform( std::string const & title ) const
+{
+  if ( m_features.find( title ) == m_features.end() )
+  {
+    auto extensionIt = m_extensions.find( title );
+    assert( extensionIt != m_extensions.end() );
+    return extensionIt->second.platform;
+  }
+  return "";
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::getPoolTypeAndName( std::string const & type ) const
+{
+  auto structIt = m_structures.find( type );
+  assert( structIt != m_structures.end() );
+  auto memberIt = std::find_if(
+    structIt->second.members.begin(), structIt->second.members.end(), []( MemberData const & md ) { return md.name.find( "Pool" ) != std::string::npos; } );
+  assert( memberIt != structIt->second.members.end() );
+  assert( std::find_if( std::next( memberIt ),
+                        structIt->second.members.end(),
+                        []( MemberData const & md ) { return md.name.find( "Pool" ) != std::string::npos; } ) == structIt->second.members.end() );
+  return std::make_pair( memberIt->type.type, memberIt->name );
+}
+
+std::string VulkanHppGenerator::getProtect( EnumValueData const & evd ) const
+{
+  assert( evd.protect.empty() || ( evd.protect == getProtectFromTitle( evd.extension ) ) );
+  return evd.protect.empty() ? getProtectFromTitle( evd.extension ) : evd.protect;
+}
+
+std::string VulkanHppGenerator::getProtectFromPlatform( std::string const & platform ) const
+{
+  auto platformIt = m_platforms.find( platform );
+  return ( platformIt != m_platforms.end() ) ? platformIt->second.protect : "";
+}
+
+std::string VulkanHppGenerator::getProtectFromTitle( std::string const & title ) const
+{
+  if ( m_features.find( title ) == m_features.end() )
+  {
+    auto extensionIt = m_extensions.find( title );
+    return ( extensionIt != m_extensions.end() ) ? getProtectFromPlatform( extensionIt->second.platform ) : "";
+  }
+  return "";
+}
+
+std::string VulkanHppGenerator::getProtectFromType( std::string const & type ) const
+{
+  auto typeIt = m_types.find( type );
+  assert( typeIt != m_types.end() );
+  return getProtectFromTitle( typeIt->second.referencedIn );
+}
+
+std::string VulkanHppGenerator::getVectorSize( std::vector<ParamData> const &            params,
+                                               std::map<size_t, VectorParamData> const & vectorParams,
+                                               size_t                                    returnParam,
+                                               std::string const &                       returnParamType,
+                                               std::set<size_t> const &                  templatedParams ) const
+{
+  std::string              vectorSize;
+  std::vector<std::string> lenParts = tokenize( params[returnParam].len, "->" );
+  switch ( lenParts.size() )
+  {
+    case 1:
+      {
+        std::string const & len = lenParts[0];
+        size_t              lenIdx =
+          std::distance( params.begin(), std::find_if( params.begin(), params.end(), [&len]( ParamData const & pd ) { return pd.name == len; } ) );
+        assert( lenIdx < params.size() );
+        // look for the len, not being the len of the return param, but of an other vector param
+        auto lenVectorParamIt =
+          std::find_if( vectorParams.begin(),
+                        vectorParams.end(),
+                        [&lenIdx, &returnParam]( auto const & vpi ) { return ( vpi.first != returnParam ) && ( vpi.second.lenParam == lenIdx ); } );
+        if ( lenVectorParamIt == vectorParams.end() )
+        {
+          vectorSize = lenParts[0];
+          if ( templatedParams.find( returnParam ) != templatedParams.end() )
+          {
+            vectorSize += " / sizeof( " + returnParamType + " )";
+          }
+        }
+        else
+        {
+          assert( templatedParams.find( returnParam ) == templatedParams.end() );
+          vectorSize = startLowerCase( stripPrefix( params[lenVectorParamIt->first].name, "p" ) ) + ".size()";
+        }
+      }
+      break;
+    case 2:
+      assert( vectorParams.find( returnParam ) != vectorParams.end() );
+      vectorSize = startLowerCase( stripPrefix( lenParts[0], "p" ) ) + "." + lenParts[1];
+      break;
+    default: assert( false ); break;
+  }
+  assert( !vectorSize.empty() );
+  return vectorSize;
+}
+
+bool VulkanHppGenerator::hasLen( std::vector<MemberData> const & members, MemberData const & md ) const
+{
+  if ( !md.len.empty() && !( md.len[0] == "null-terminated" ) && ( ( altLens.find( md.len[0] ) == altLens.end() ) || ( md.len[0] == "codeSize / 4" ) ) )
+  {
+    auto lenIt = findStructMemberIt( md.len.front(), members );
+    return ( lenIt == members.end() ) || lenIt->type.isValue();
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::hasParentHandle( std::string const & handle, std::string const & parent ) const
+{
+  std::string candidate = handle;
+  while ( !candidate.empty() )
+  {
+    auto const & handleIt = m_handles.find( candidate );
+    assert( handleIt != m_handles.end() );
+    if ( handleIt->second.parent == parent )
+    {
+      return true;
+    }
+    else
+    {
+      candidate = handleIt->second.parent;
+    }
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::isDeviceCommand( CommandData const & commandData ) const
+{
+  return !commandData.handle.empty() && !commandData.params.empty() && ( m_handles.find( commandData.params[0].type.type ) != m_handles.end() ) &&
+         ( commandData.params[0].type.type != "VkInstance" ) && ( commandData.params[0].type.type != "VkPhysicalDevice" );
+}
+
+bool VulkanHppGenerator::isHandleType( std::string const & type ) const
+{
+  if ( type.starts_with( "Vk" ) )
+  {
+    auto it = m_handles.find( type );
+    if ( it == m_handles.end() )
+    {
+      it = std::find_if( m_handles.begin(), m_handles.end(), [&type]( std::pair<std::string, HandleData> const & hd ) { return hd.second.alias == type; } );
+    }
+    return ( it != m_handles.end() );
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::isLenByStructMember( std::string const & name, std::vector<ParamData> const & params ) const
+{
+  // check if name specifies a member of a struct
+  std::vector<std::string> nameParts = tokenize( name, "->" );
+  if ( nameParts.size() == 1 )
+  {
+    // older versions of vk.xml used the notation parameter::member
+    nameParts = tokenize( name, "::" );
+  }
+  if ( nameParts.size() == 2 )
+  {
+    auto paramIt = std::find_if( params.begin(), params.end(), [&n = nameParts[0]]( ParamData const & pd ) { return pd.name == n; } );
+    if ( paramIt != params.end() )
+    {
+#if !defined( NDEBUG )
+      auto structureIt = m_structures.find( paramIt->type.type );
+      assert( structureIt != m_structures.end() );
+      assert( isStructMember( nameParts[1], structureIt->second.members ) );
+#endif
+      return true;
+    }
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::isLenByStructMember( std::string const & name, ParamData const & param ) const
+{
+  // check if name specifies a member of a struct
+  std::vector<std::string> nameParts = tokenize( name, "->" );
+  if ( nameParts.size() == 1 )
+  {
+    // older versions of vk.xml used the notation parameter::member
+    nameParts = tokenize( name, "::" );
+  }
+  if ( ( nameParts.size() == 2 ) && ( nameParts[0] == param.name ) )
+  {
+#if !defined( NDEBUG )
+    auto structureIt = m_structures.find( param.type.type );
+    assert( structureIt != m_structures.end() );
+    assert( isStructMember( nameParts[1], structureIt->second.members ) );
+#endif
+    return true;
+  }
+  return false;
+}
+
+bool VulkanHppGenerator::isMultiSuccessCodeConstructor( std::vector<std::map<std::string, CommandData>::const_iterator> const & constructorIts ) const
+{
+  bool ok = !constructorIts.empty();
+  if ( ok )
+  {
+    auto constructorIt = constructorIts.begin();
+    ok                 = ( 2 < ( *constructorIt )->second.successCodes.size() ) ||
+         ( ( ( *constructorIt )->second.successCodes.size() == 2 ) && ( ( *constructorIt )->second.successCodes[1] != "VK_INCOMPLETE" ) );
+#if !defined( NDEBUG )
+    for ( constructorIt = std::next( constructorIt ); constructorIt != constructorIts.end(); ++constructorIt )
+    {
+      assert( ok == ( 2 < ( *constructorIt )->second.successCodes.size() ) ||
+              ( ( ( *constructorIt )->second.successCodes.size() == 2 ) && ( ( *constructorIt )->second.successCodes[1] != "VK_INCOMPLETE" ) ) );
+    }
+#endif
+  }
+  return ok;
+}
+
+bool VulkanHppGenerator::isParam( std::string const & name, std::vector<ParamData> const & params ) const
+{
+  return std::find_if( params.begin(), params.end(), [&name]( ParamData const & pd ) { return pd.name == name; } ) != params.end();
+}
+
+bool VulkanHppGenerator::isStructMember( std::string const & name, std::vector<MemberData> const & memberData ) const
+{
+  return findStructMemberIt( name, memberData ) != memberData.end();
+}
+
+bool VulkanHppGenerator::isStructureChainAnchor( std::string const & type ) const
+{
+  if ( type.starts_with( "Vk" ) )
+  {
+    auto it = m_structures.find( type );
+    if ( it == m_structures.end() )
+    {
+      auto aliasIt = m_structureAliases.find( type );
+      if ( aliasIt != m_structureAliases.end() )
+      {
+        it = m_structures.find( aliasIt->second.alias );
+      }
+    }
+    if ( it != m_structures.end() )
+    {
+      return m_extendedStructs.find( it->first ) != m_extendedStructs.end();
+    }
+  }
+  return false;
+}
+
+std::pair<bool, std::map<size_t, std::vector<size_t>>> VulkanHppGenerator::needsVectorSizeCheck( std::vector<ParamData> const &            params,
+                                                                                                 std::map<size_t, VectorParamData> const & vectorParams,
+                                                                                                 std::vector<size_t> const &               returnParams,
+                                                                                                 std::set<size_t> const & singularParams ) const
+{
+  std::map<size_t, std::vector<size_t>> countToVectorMap;
+  for ( auto const & vpi : vectorParams )
+  {
+    if ( ( vpi.second.lenParam != INVALID_INDEX ) && ( std::find( returnParams.begin(), returnParams.end(), vpi.first ) == returnParams.end() ) &&
+         ( ( singularParams.find( vpi.second.lenParam ) == singularParams.end() ) ||
+           isLenByStructMember( params[vpi.first].len, params[vpi.second.lenParam] ) ) )
+    {
+      countToVectorMap[vpi.second.lenParam].push_back( vpi.first );
+    }
+  }
+  return std::make_pair( std::find_if( countToVectorMap.begin(),
+                                       countToVectorMap.end(),
+                                       [this, &params]( auto const & cvm ) {
+                                         return ( 1 < cvm.second.size() ) || isLenByStructMember( params[cvm.second[0]].len, params[cvm.first] );
+                                       } ) != countToVectorMap.end(),
+                         countToVectorMap );
+}
+
+void VulkanHppGenerator::readCommands( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, { { "comment", {} } } );
+
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "command", false } } );
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "command" ) );
+    readCommandsCommand( child );
+  }
+}
+
+void VulkanHppGenerator::readCommandsCommand( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  if ( attributes.find( "alias" ) != attributes.end() )
+  {
+    // for command aliases, create a copy of the aliased command
+    checkAttributes( line,
+                     attributes,
+                     {},
+                     {
+                       { "alias", {} },
+                       { "name", {} },
+                     } );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string alias, name;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "alias" )
+      {
+        alias = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+        checkForError( name.starts_with( "vk" ), line, "name <" + name + "> should begin with <vk>" );
+      }
+    }
+
+    auto commandIt = m_commands.find( alias );
+    checkForError( commandIt != m_commands.end(), line, "missing command <" + alias + ">" );
+    CommandData commandData = commandIt->second;
+    commandData.alias       = alias;
+    commandData.xmlLine     = line;
+    addCommand( name, commandData );
+  }
+  else
+  {
+    checkAttributes( line,
+                     attributes,
+                     {},
+                     { { "cmdbufferlevel", { "primary", "secondary" } },
+                       { "comment", {} },
+                       { "errorcodes", {} },
+                       { "queues", { "compute", "decode", "encode", "graphics", "opticalflow", "sparse_binding", "transfer" } },
+                       { "renderpass", { "both", "inside", "outside" } },
+                       { "successcodes", {} },
+                       { "tasks", { "action", "indirection", "state", "synchronization" } },
+                       { "videocoding", { "both", "inside", "outside" } } } );
+
+    std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+    checkElements( line, children, { { "param", false }, { "proto", true } }, { "implicitexternsyncparams" } );
+
+    CommandData commandData( line );
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "errorcodes" )
+      {
+        commandData.errorCodes = tokenize( attribute.second, "," );
+        // errorCodes are checked in checkCorrectness after complete reading
+      }
+      else if ( attribute.first == "successcodes" )
+      {
+        commandData.successCodes = tokenize( attribute.second, "," );
+        // successCodes are checked in checkCorrectness after complete reading
+      }
+    }
+
+    std::string name;
+    for ( auto child : children )
+    {
+      std::string value = child->Value();
+      if ( value == "param" )
+      {
+        std::pair<bool, ParamData> result = readCommandsCommandParam( child, commandData.params );
+        if ( result.first )
+        {
+          commandData.params.push_back( result.second );
+        }
+      }
+      else if ( value == "proto" )
+      {
+        std::tie( name, commandData.returnType ) = readCommandsCommandProto( child );
+      }
+    }
+    assert( !name.empty() );
+    checkForError( ( commandData.returnType == "VkResult" ) || commandData.errorCodes.empty(),
+                   line,
+                   "command <" + name + "> does not return a VkResult but specifies errorcodes" );
+    checkForError( ( commandData.returnType == "VkResult" ) || commandData.successCodes.empty(),
+                   line,
+                   "command <" + name + "> does not return a VkResult but specifies successcodes" );
+    for ( auto const & param : commandData.params )
+    {
+      checkForError( param.stride.empty() || isParam( param.stride, commandData.params ),
+                     param.xmlLine,
+                     "attribute <stride> holds an unknown value <" + param.stride + ">" );
+    }
+
+    addCommand( name, commandData );
+  }
+}
+
+std::pair<bool, VulkanHppGenerator::ParamData> VulkanHppGenerator::readCommandsCommandParam( tinyxml2::XMLElement const *   element,
+                                                                                             std::vector<ParamData> const & params )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line,
+                   attributes,
+                   {},
+                   { { "altlen", {} },
+                     { "api", { "vulkan", "vulkansc" } },
+                     { "externsync", {} },
+                     { "len", {} },
+                     { "noautovalidity", { "true" } },
+                     { "objecttype", { "objectType" } },
+                     { "optional", { "false", "true" } },
+                     { "stride", {} },
+                     { "validstructs", {} } } );
+
+  ParamData paramData( line );
+  for ( auto attribute : attributes )
+  {
+    if ( attribute.first == "altlen" )
+    {
+      assert( paramData.len.empty() );
+      paramData.len = attribute.second;
+      checkForError( altLens.find( paramData.len ) != altLens.end(), line, "attribute <altlen> holds unknown value <" + paramData.len + ">" );
+    }
+    else if ( attribute.first == "api" )
+    {
+      if ( attribute.second == "vulkansc" )
+      {
+        return std::make_pair( false, paramData );  // skip stuff marked as "vulkansc" !
+      }
+      assert( attribute.second == "vulkan" );
+    }
+    else if ( attribute.first == "len" )
+    {
+      if ( paramData.len.empty() )
+      {
+        checkForError( ( attribute.second == "null-terminated" ) || isParam( attribute.second, params ) || isLenByStructMember( attribute.second, params ),
+                       line,
+                       "attribute <len> holds an unknown value <" + attribute.second + ">" );
+        paramData.len = attribute.second;
+      }
+    }
+    else if ( attribute.first == "stride" )
+    {
+      paramData.stride = attribute.second;
+    }
+    else if ( attribute.first == "optional" )
+    {
+      paramData.optional = ( attribute.second == "true" );
+    }
+    else if ( attribute.first == "validstructs" )
+    {
+      std::vector<std::string> validStructs = tokenize( attribute.second, "," );
+      for ( auto const & vs : validStructs )
+      {
+        checkForError( m_structures.find( vs ) != m_structures.end(), line, "unknown struct <" + vs + "> listed in attribute <validstructs>" );
+      }
+    }
+  }
+
+  NameData nameData;
+  std::tie( nameData, paramData.type ) = readNameAndType( element );
+
+  checkForError( m_types.find( paramData.type.type ) != m_types.end(), line, "unknown type <" + paramData.type.type + ">" );
+  checkForError( paramData.type.prefix.empty() || ( paramData.type.prefix == "const" ) || ( paramData.type.prefix == "const struct" ) ||
+                   ( paramData.type.prefix == "struct" ),
+                 line,
+                 "unexpected type prefix <" + paramData.type.prefix + ">" );
+  checkForError( paramData.type.postfix.empty() || ( paramData.type.postfix == "*" ) || ( paramData.type.postfix == "**" ) ||
+                   ( paramData.type.postfix == "* const *" ),
+                 line,
+                 "unexpected type postfix <" + paramData.type.postfix + ">" );
+  checkForError( std::find_if( params.begin(), params.end(), [&name = nameData.name]( ParamData const & pd ) { return pd.name == name; } ) == params.end(),
+                 line,
+                 "command param <" + nameData.name + "> already used" );
+  paramData.name       = nameData.name;
+  paramData.arraySizes = nameData.arraySizes;
+
+  return std::make_pair( true, paramData );
+}
+
+std::pair<std::string, std::string> VulkanHppGenerator::readCommandsCommandProto( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+
+  auto [nameData, typeInfo] = readNameAndType( element );
+
+  checkForError( nameData.name.starts_with( "vk" ), line, "name <" + nameData.name + "> does not begin with <vk>" );
+  checkForError( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" );
+  checkForError( m_types.find( typeInfo.type ) != m_types.end(), line, "unknown type <" + typeInfo.type + ">" );
+  checkForError( typeInfo.prefix.empty(), line, "unexpected type prefix <" + typeInfo.prefix + ">" );
+  checkForError( typeInfo.postfix.empty(), line, "unexpected type postfix <" + typeInfo.postfix + ">" );
+  checkForError( m_commands.find( nameData.name ) == m_commands.end(), line, "command <" + nameData.name + "> already specified" );
+
+  return std::make_pair( nameData.name, typeInfo.type );
+}
+
+void VulkanHppGenerator::readEnums( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "bitwidth", { "64" } }, { "comment", {} }, { "type", { "bitmask", "enum" } } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+
+  std::string bitwidth, name, type;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "bitwidth" )
+    {
+      bitwidth = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+    else if ( attribute.first == "type" )
+    {
+      type = attribute.second;
+    }
+  }
+  assert( !name.empty() );
+
+  if ( name == "API Constants" )
+  {
+    checkElements( line, children, { { "enum", false } }, {} );
+    for ( auto const & child : children )
+    {
+      readEnumsConstant( child );
+    }
+  }
+  else
+  {
+    checkElements( line, children, {}, { "comment", "enum", "unused" } );
+    checkForError( !type.empty(), line, "enum without type" );
+
+    // get the EnumData entry in enum map
+    std::map<std::string, EnumData>::iterator enumIt = m_enums.find( name );
+    if ( enumIt == m_enums.end() )
+    {
+      enumIt =
+        std::find_if( m_enums.begin(), m_enums.end(), [&name]( std::pair<std::string, EnumData> const & enumData ) { return enumData.second.alias == name; } );
+    }
+    checkForError( enumIt != m_enums.end(), line, "enum <" + name + "> is not listed as enum in the types section" );
+    checkForError( enumIt->second.values.empty(), line, "enum <" + name + "> already holds values" );
+
+    // mark it as a bitmask, if it is one
+    bool bitmask = ( type == "bitmask" );
+    if ( bitmask )
+    {
+      checkForError( name.find( "FlagBits" ) != std::string::npos, line, "bitmask <" + name + "> does not contain <FlagBits>" );
+    }
+    enumIt->second.isBitmask = bitmask;
+    enumIt->second.bitwidth  = bitwidth;
+
+    // read the names of the enum values
+    for ( auto child : children )
+    {
+      std::string value = child->Value();
+      if ( value == "comment" )
+      {
+        readComment( child );
+      }
+      else if ( value == "enum" )
+      {
+        readEnumsEnum( child, enumIt );
+      }
+    }
+  }
+}
+
+void VulkanHppGenerator::readEnumsConstant( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "alias", {} }, { "comment", {} }, { "type", {} }, { "value", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string alias, name, value;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "alias" )
+    {
+      checkForError( m_constants.find( attribute.second ) != m_constants.end(), line, "unknown enum constant alias <" + attribute.second + ">" );
+      alias = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      checkForError( m_constants.find( attribute.second ) == m_constants.end(), line, "already specified enum constant <" + attribute.second + ">" );
+      name = attribute.second;
+    }
+    else if ( attribute.first == "value" )
+    {
+      checkForError( !attribute.second.empty(), line, "value of enum constant is empty" );
+      value = attribute.second;
+    }
+  }
+  checkForError( alias.empty() != value.empty(), line, "for enum <" + name + "> either alias or value need to be specified" );
+  m_constants[name] = alias.empty() ? value : m_constants[alias];
+}
+
+void VulkanHppGenerator::readEnumsEnum( tinyxml2::XMLElement const * element, std::map<std::string, EnumData>::iterator enumIt )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  if ( attributes.find( "alias" ) != attributes.end() )
+  {
+    checkAttributes( line, attributes, { { "alias", {} }, { "name", {} } }, { { "api", { "vulkan", "vulkansc" } }, { "comment", {} } } );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string alias, bitpos, name, value;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "alias" )
+      {
+        alias = attribute.second;
+      }
+      else if ( attribute.first == "api" )
+      {
+        if ( attribute.second == "vulkansc" )
+        {
+          return;  // skip stuff marked as "vulkansc" !
+        }
+        assert( attribute.second == "vulkan" );
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+    }
+    assert( !name.empty() );
+
+    enumIt->second.addEnumAlias( line, name, alias );
+  }
+  else
+  {
+    checkAttributes( line, attributes, { { "name", {} } }, { { "bitpos", {} }, { "comment", {} }, { "value", {} } } );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string alias, bitpos, name, protect, value;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "bitpos" )
+      {
+        bitpos = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+      else if ( attribute.first == "value" )
+      {
+        value = attribute.second;
+      }
+    }
+
+    std::string prefix = generateEnumSuffixes( enumIt->first, enumIt->second.isBitmask, m_tags ).first;
+    checkForError( name.starts_with( prefix ), line, "encountered enum value <" + name + "> that does not begin with expected prefix <" + prefix + ">" );
+
+    checkForError( bitpos.empty() ^ value.empty(), line, "invalid set of attributes for enum <" + name + ">" );
+    enumIt->second.addEnumValue( line, name, protect, !bitpos.empty(), "" );
+  }
+}
+
+std::string VulkanHppGenerator::readComment( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  return element->GetText();
+}
+
+void VulkanHppGenerator::readExtensions( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "extension", false } } );
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "extension" ) );
+    readExtensionsExtension( child );
+  }
+}
+
+void VulkanHppGenerator::readExtensionsExtension( tinyxml2::XMLElement const * element )
+{
+  int                                       line       = element->GetLineNum();
+  std::map<std::string, std::string>        attributes = getAttributes( element );
+  std::vector<tinyxml2::XMLElement const *> children   = getChildElements( element );
+
+  checkAttributes( line,
+                   attributes,
+                   { { "name", {} }, { "number", {} }, { "supported", { "disabled", "vulkan", "vulkansc" } } },
+                   { { "author", {} },
+                     { "comment", {} },
+                     { "contact", {} },
+                     { "depends", {} },
+                     { "deprecatedby", {} },
+                     { "obsoletedby", {} },
+                     { "platform", {} },
+                     { "promotedto", {} },
+                     { "provisional", { "true" } },
+                     { "requires", {} },
+                     { "requiresCore", {} },
+                     { "sortorder", { "1" } },
+                     { "specialuse", { "cadsupport", "d3demulation", "debugging", "devtools", "glemulation" } },
+                     { "type", { "device", "instance" } } } );
+  checkElements( line, children, { { "require", false } } );
+
+  std::string              deprecatedBy, name, number, obsoletedBy, platform, promotedTo;
+  std::vector<std::string> depends;
+  bool                     supported = false;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "deprecatedby" )
+    {
+      deprecatedBy = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+      checkForError( m_extensions.find( name ) == m_extensions.end(), line, "already encountered extension <" + name + ">" );
+    }
+    else if ( attribute.first == "number" )
+    {
+      number = attribute.second;
+    }
+    else if ( attribute.first == "obsoletedby" )
+    {
+      obsoletedBy = attribute.second;
+    }
+    else if ( attribute.first == "platform" )
+    {
+      platform = attribute.second;
+      checkForError( m_platforms.find( platform ) != m_platforms.end(), line, "unknown platform <" + platform + ">" );
+    }
+    else if ( attribute.first == "promotedto" )
+    {
+      promotedTo = attribute.second;
+    }
+    else if ( attribute.first == "provisional" )
+    {
+      if ( platform.empty() )
+      {
+        // for now, having the attribute provisional="true" implies attribute platform="provisional" to get
+        // stuff protected by VK_ENABLE_BETA_EXTENSIONS
+        platform = "provisional";
+      }
+      checkForError( platform == "provisional",
+                     line,
+                     "while attribute <provisional> is set to \"true\", attribute <platform> is not set to \"provisional\" but to \"" + platform + "\"" );
+    }
+    else if ( ( attribute.first == "depends" ) || ( attribute.first == "requires" ) )
+    {
+      // we don't care about the logical implications of ',' and '+' here, we're just interested to get the depends strings
+      depends = tokenizeAny( attribute.second, ",+" );
+    }
+    else if ( attribute.first == "requiresCore" )
+    {
+      std::string const & requiresCore = attribute.second;
+      checkForError( std::find_if( m_features.begin(),
+                                   m_features.end(),
+                                   [&requiresCore]( std::pair<std::string, FeatureData> const & feature )
+                                   { return feature.second.number == requiresCore; } ) != m_features.end(),
+                     line,
+                     "unknown feature number <" + attribute.second + ">" );
+    }
+    else if ( attribute.first == "supported" )
+    {
+      std::vector<std::string> api = tokenize( attribute.second, "," );
+      supported                    = ( std::find( api.begin(), api.end(), "vulkan" ) != api.end() );
+    }
+  }
+
+  auto extensionIt = m_extensions.end();
+  if ( supported )
+  {
+    extensionIt = m_extensions.insert( std::make_pair( name, ExtensionData( line, deprecatedBy, number, obsoletedBy, platform, promotedTo ) ) ).first;
+    for ( auto const & d : depends )
+    {
+      checkForError( extensionIt->second.depends.insert( d ).second, line, "required depends <" + d + "> already listed" );
+    }
+
+    // extract the tag from the name, which is supposed to look like VK_<tag>_<other>
+    size_t tagStart = name.find( '_' );
+    checkForError( tagStart != std::string::npos, line, "name <" + name + "> is missing an underscore '_'" );
+    size_t tagEnd = name.find( '_', tagStart + 1 );
+    checkForError( tagEnd != std::string::npos, line, "name <" + name + "> is missing an underscore '_'" );
+    std::string tag = name.substr( tagStart + 1, tagEnd - tagStart - 1 );
+    checkForError( m_tags.find( tag ) != m_tags.end(), line, "name <" + name + "> is using an unknown tag <" + tag + ">" );
+  }
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "require" ) );
+    if ( supported )
+    {
+      readExtensionsExtensionRequire( child, extensionIt );
+    }
+    else
+    {
+      readExtensionsExtensionRequireSkipped( child );
+    }
+  }
+}
+
+void VulkanHppGenerator::readExtensionsExtensionRequire( tinyxml2::XMLElement const * element, std::map<std::string, ExtensionData>::iterator extensionIt )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "depends", {} }, { "extension", {} }, { "feature", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "command", "comment", "enum", "type" } );
+
+  std::vector<std::string> depends;
+  for ( auto const & attribute : attributes )
+  {
+    if ( ( attribute.first == "depends" ) || ( attribute.first == "extension" ) )
+    {
+      assert( depends.empty() );
+      depends = tokenizeAny( attribute.second, ",+" );
+      for ( auto const & d : depends )
+      {
+        checkForError( std::find_if( extensionIt->second.requireData.begin(),
+                                     extensionIt->second.requireData.end(),
+                                     [&d]( RequireData const & rd ) { return std::find( rd.depends.begin(), rd.depends.end(), d ) != rd.depends.end(); } ) ==
+                         extensionIt->second.requireData.end(),
+                       line,
+                       "required extension <" + d + "> already listed" );
+      }
+    }
+    else
+    {
+      assert( attribute.first == "feature" );
+      if ( m_features.find( attribute.second ) != m_features.end() )
+      {
+        assert( depends.empty() );
+        depends.push_back( attribute.second );
+      }
+      else
+      {
+        checkForError( m_skippedFeatures.find( attribute.second ) != m_skippedFeatures.end(), line, "unknown feature <" + attribute.second + ">" );
+        readExtensionsExtensionRequireSkipped( element );
+        return;
+      }
+    }
+  }
+
+  RequireData requireData( line, depends );
+  bool        requireDataEmpty = true;
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "command" )
+    {
+      readExtensionsExtensionRequireCommand( child, extensionIt->first, requireData );
+      requireDataEmpty = false;
+    }
+    else if ( value == "comment" )
+    {
+      readComment( child );
+    }
+    else if ( value == "enum" )
+    {
+      readRequireEnum( child, extensionIt->first );
+    }
+    else if ( value == "type" )
+    {
+      readExtensionsExtensionRequireType( child, extensionIt->first, requireData );
+      requireDataEmpty = false;
+    }
+  }
+  if ( !requireDataEmpty )
+  {
+    extensionIt->second.requireData.push_back( requireData );
+  }
+}
+
+void VulkanHppGenerator::readExtensionsExtensionRequireCommand( tinyxml2::XMLElement const * element,
+                                                                std::string const &          extensionName,
+                                                                RequireData &                requireData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "comment", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+  }
+  assert( !name.empty() );
+
+  // mark this command be part of this extension
+  auto commandIt = m_commands.find( name );
+  checkForError(
+    commandIt != m_commands.end(), line, "command <" + name + "> marked as required in extension <" + extensionName + "> was not listed before as a command!" );
+  if ( commandIt->second.referencedIn.empty() )
+  {
+    commandIt->second.referencedIn = extensionName;
+  }
+  else
+  {
+    checkForError( getPlatform( commandIt->second.referencedIn ) == getPlatform( extensionName ),
+                   line,
+                   "command <" + name + "> is referenced in extensions <" + commandIt->second.referencedIn + "> and <" + extensionName +
+                     "> and thus protected by different platforms <" + getPlatform( commandIt->second.referencedIn ) + "> and <" +
+                     getPlatform( extensionName ) + ">!" );
+  }
+  assert( std::find( requireData.commands.begin(), requireData.commands.end(), name ) == requireData.commands.end() );
+  requireData.commands.push_back( name );
+}
+
+void VulkanHppGenerator::readExtensionsExtensionRequireSkipped( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "comment", {} }, { "depends", {} }, { "extension", {} }, { "feature", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "command", "comment", "enum", "type" } );
+
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "command" )
+    {
+      readRequireCommandSkipped( child );
+    }
+    else if ( value == "type" )
+    {
+      readRequireTypeSkipped( child );
+    }
+  }
+}
+
+void VulkanHppGenerator::readExtensionsExtensionRequireType( tinyxml2::XMLElement const * element,
+                                                             std::string const &          extensionName,
+                                                             RequireData &                requireData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "comment", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+  }
+  assert( !name.empty() );
+
+  auto typeIt = m_types.find( name );
+  checkForError( typeIt != m_types.end(), line, "failed to find required type <" + name + ">" );
+  if ( typeIt->second.referencedIn.empty() )
+  {
+    typeIt->second.referencedIn = extensionName;
+    assert( std::find( requireData.types.begin(), requireData.types.end(), name ) == requireData.types.end() );
+    requireData.types.push_back( name );
+  }
+  else
+  {
+    checkForError( getPlatform( typeIt->second.referencedIn ) == getPlatform( extensionName ),
+                   line,
+                   "type <" + name + "> is referenced in extensions <" + typeIt->second.referencedIn + "> and <" + extensionName +
+                     "> and thus protected by different platforms <" + getPlatform( typeIt->second.referencedIn ) + "> and <" + getPlatform( extensionName ) +
+                     ">!" );
+  }
+}
+
+void VulkanHppGenerator::readFeature( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "api", { "vulkan", "vulkansc" } }, { "comment", {} }, { "name", {} }, { "number", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "require", false } }, { "remove" } );
+
+  std::string name, number, modifiedNumber;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+    else if ( attribute.first == "number" )
+    {
+      number         = attribute.second;
+      modifiedNumber = number;
+      std::replace( modifiedNumber.begin(), modifiedNumber.end(), '.', '_' );
+    }
+  }
+  assert( !name.empty() && !number.empty() );
+
+  auto attributeIt = attributes.find( "api" );
+  assert( attributeIt != attributes.end() );
+  std::vector<std::string> api = tokenize( attributeIt->second, "," );
+  if ( std::find( api.begin(), api.end(), "vulkan" ) != api.end() )
+  {
+    checkForError( name == "VK_VERSION_" + modifiedNumber, line, "unexpected formatting of name <" + name + ">" );
+    checkForError( m_features.find( name ) == m_features.end(), line, "already specified feature <" + name + ">" );
+    assert( m_skippedFeatures.find( name ) == m_skippedFeatures.end() );
+
+    auto featureIt = m_features.insert( std::make_pair( name, number ) ).first;
+    for ( auto child : children )
+    {
+      std::string value = child->Value();
+      if ( value == "remove" )
+      {
+        checkForError( false, line, "unsupported child <remove>: should be filtered by attribute <api>" );
+      }
+      else if ( value == "require" )
+      {
+        readFeatureRequire( child, featureIt );
+      }
+    }
+  }
+  else
+  {
+    // skip this feature
+    checkForError( name == "VKSC_VERSION_" + modifiedNumber, line, "unexpected formatting of name <" + name + ">" );
+    checkForError( m_skippedFeatures.insert( name ).second, line, "already specified skipped feature <" + name + ">" );
+    assert( m_features.find( name ) == m_features.end() );
+
+    for ( auto child : children )
+    {
+      std::string value = child->Value();
+      if ( value == "require" )
+      {
+        readFeatureRequireSkipped( child );
+      }
+    }
+  }
+}
+
+void VulkanHppGenerator::readFeatureRequire( tinyxml2::XMLElement const * element, std::map<std::string, FeatureData>::iterator featureIt )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, { { "comment", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "command", "comment", "enum", "type" } );
+
+  RequireData requireData( line, { "" } );
+  bool        requireDataEmpty = true;
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "command" )
+    {
+      readFeatureRequireCommand( child, featureIt, requireData );
+      requireDataEmpty = false;
+    }
+    else if ( value == "comment" )
+    {
+      readComment( child );
+    }
+    else if ( value == "enum" )
+    {
+      readRequireEnum( child, "" );
+    }
+    else if ( value == "type" )
+    {
+      readFeatureRequireType( child, featureIt, requireData );
+      requireDataEmpty = false;
+    }
+  }
+  if ( !requireDataEmpty )
+  {
+    featureIt->second.requireData.push_back( requireData );
+  }
+}
+
+void VulkanHppGenerator::readFeatureRequireCommandSkipped( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "name", {} } } );
+
+  std::string name = attributes.find( "name" )->second;
+
+  auto commandIt = m_commands.find( name );
+  checkForError( commandIt != m_commands.end(), line, "unknown required command <" + name + ">" );
+  checkForError( commandIt->second.referencedIn.empty(), line, "command <" + name + "> already listed with feature <" + commandIt->second.referencedIn + ">" );
+
+  m_commands.erase( commandIt );
+}
+
+void VulkanHppGenerator::readFeatureRequireCommand( tinyxml2::XMLElement const *                 element,
+                                                    std::map<std::string, FeatureData>::iterator featureIt,
+                                                    RequireData &                                requireData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "name", {} } } );
+
+  std::string name = attributes.find( "name" )->second;
+
+  auto commandIt = m_commands.find( name );
+  checkForError( commandIt != m_commands.end(), line, "feature <" + featureIt->first + "> requires unknown command <" + name + ">" );
+  checkForError( commandIt->second.referencedIn.empty(), line, "command <" + name + "> already listed with feature <" + commandIt->second.referencedIn + ">" );
+
+  commandIt->second.referencedIn = featureIt->first;
+
+  assert( std::find( requireData.commands.begin(), requireData.commands.end(), name ) == requireData.commands.end() );
+  requireData.commands.push_back( name );
+}
+
+void VulkanHppGenerator::readFeatureRequireSkipped( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, { { "comment", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "command", "comment", "enum", "type" } );
+
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "command" )
+    {
+      readFeatureRequireCommandSkipped( child );
+    }
+    else if ( value == "enum" )
+    {
+      readRequireEnumSkipped( child );
+    }
+    else if ( value == "type" )
+    {
+      readRequireTypeSkipped( child );
+    }
+  }
+}
+
+void VulkanHppGenerator::readFeatureRequireType( tinyxml2::XMLElement const *                 element,
+                                                 std::map<std::string, FeatureData>::iterator featureIt,
+                                                 RequireData &                                requireData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "comment", {} }, { "name", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name          = attributes.find( "name" )->second;
+  auto        requireTypeIt = std::find_if( requireData.types.begin(), requireData.types.end(), [&name]( std::string const & type ) { return type == name; } );
+  checkForError( requireTypeIt == requireData.types.end(), line, "type <" + name + "> already listed for this feature!" );
+
+  // some types are in fact includes (like vk_platform) or defines (like VK_API_VERSION)
+  if ( ( m_defines.find( name ) == m_defines.end() ) && ( m_includes.find( name ) == m_includes.end() ) )
+  {
+    auto typeIt = m_types.find( name );
+    checkForError( typeIt != m_types.end(), line, "feature <" + featureIt->first + "> requires unknown type <" + name + ">" );
+    checkForError( typeIt->second.referencedIn.empty() || ( typeIt->second.referencedIn == featureIt->first ),
+                   line,
+                   "type <" + name + "> already listed on feature <" + typeIt->second.referencedIn + ">" );
+    typeIt->second.referencedIn = featureIt->first;
+
+    requireData.types.push_back( name );
+  }
+}
+
+void VulkanHppGenerator::readFormats( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "format", false } } );
+
+  for ( auto child : children )
+  {
+    readFormatsFormat( child );
+  }
+}
+
+void VulkanHppGenerator::readFormatsFormat( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line,
+                   attributes,
+                   { { "blockSize", { "1", "2", "3", "4", "5", "6", "8", "12", "16", "24", "32" } },
+                     { "class", {} },
+                     { "name", {} },
+                     { "texelsPerBlock", { "1", "16", "20", "25", "30", "36", "40", "48", "50", "60", "64", "80", "100", "120", "144" } } },
+                   { { "blockExtent", { "1", "2", "4", "5", "6", "8", "10", "12" } },
+                     { "chroma", { "420", "422", "444" } },
+                     { "compressed", { "ASTC HDR", "ASTC LDR", "BC", "EAC", "ETC", "ETC2", "PVRTC" } },
+                     { "packed", { "8", "16", "32" } } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "component", false } }, { "plane", "spirvimageformat" } );
+
+  FormatData  format( line );
+  std::string name;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "blockExtent" )
+    {
+      checkForError( tokenize( attribute.second, "," ).size() == 3, line, "unexpected number of elements in attribute <blockExtent>" );
+      format.blockExtent = attribute.second;
+    }
+    if ( attribute.first == "blockSize" )
+    {
+      format.blockSize = attribute.second;
+    }
+    else if ( attribute.first == "chroma" )
+    {
+      format.chroma = attribute.second;
+    }
+    else if ( attribute.first == "class" )
+    {
+      format.classAttribute = attribute.second;
+    }
+    else if ( attribute.first == "compressed" )
+    {
+      format.compressed = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+    else if ( attribute.first == "packed" )
+    {
+      format.packed = attribute.second;
+    }
+    else if ( attribute.first == "texelsPerBlock" )
+    {
+      format.texelsPerBlock = attribute.second;
+    }
+  }
+
+  auto formatIt = m_enums.find( "VkFormat" );
+  assert( formatIt != m_enums.end() );
+
+  checkForError( std::find_if( formatIt->second.values.begin(),
+                               formatIt->second.values.end(),
+                               [&name]( EnumValueData const & evd ) { return evd.name == name; } ) != formatIt->second.values.end() ||
+                   ( formatIt->second.aliases.find( name ) != formatIt->second.aliases.end() ),
+                 line,
+                 "encountered unknown format <" + name + ">" );
+  auto [it, inserted] = m_formats.insert( std::make_pair( name, format ) );
+  checkForError( inserted, line, "format <" + name + "> already specified on line " + std::to_string( it->second.xmlLine ) );
+
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "component" )
+    {
+      readFormatsFormatComponent( child, it->second );
+    }
+    else if ( value == "plane" )
+    {
+      readFormatsFormatPlane( child, it->second );
+    }
+    else if ( value == "spirvimageformat" )
+    {
+      readFormatsFormatSPIRVImageFormat( child, it->second );
+    }
+  }
+
+  if ( it->second.components.front().bits == "compressed" )
+  {
+    for ( auto componentIt = std::next( it->second.components.begin() ); componentIt != it->second.components.end(); ++componentIt )
+    {
+      checkForError( componentIt->bits == "compressed", line, "component is expected to be marked as compressed in attribute <bits>" );
+    }
+  }
+  if ( !it->second.components.front().planeIndex.empty() )
+  {
+    for ( auto componentIt = std::next( it->second.components.begin() ); componentIt != it->second.components.end(); ++componentIt )
+    {
+      checkForError( !componentIt->planeIndex.empty(), line, "component is expected to have a planeIndex" );
+    }
+    size_t planeCount = 1 + std::stoi( it->second.components.back().planeIndex );
+    checkForError( it->second.planes.size() == planeCount, line, "number of planes does not fit to largest planeIndex of the components" );
+  }
+}
+
+void VulkanHppGenerator::readFormatsFormatComponent( tinyxml2::XMLElement const * element, FormatData & formatData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line,
+                   attributes,
+                   { { "bits", { "1", "2", "4", "5", "6", "8", "9", "10", "11", "12", "16", "24", "32", "64", "compressed" } },
+                     { "name", {} },
+                     { "numericFormat", { "SFLOAT", "SINT", "SNORM", "SRGB", "SSCALED", "UFLOAT", "UINT", "UNORM", "USCALED" } } },
+                   { { "planeIndex", { "0", "1", "2" } } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  formatData.components.emplace_back( line );
+  ComponentData & component = formatData.components.back();
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "bits" )
+    {
+      checkForError(
+        ( attribute.second != "compressed" ) || !formatData.compressed.empty(), line, "component of a not compressed format is marked as compressed" );
+      component.bits = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      component.name = attribute.second;
+    }
+    else if ( attribute.first == "numericFormat" )
+    {
+      component.numericFormat = attribute.second;
+    }
+    else if ( attribute.first == "planeIndex" )
+    {
+      component.planeIndex = attribute.second;
+    }
+  }
+}
+
+void VulkanHppGenerator::readFormatsFormatPlane( tinyxml2::XMLElement const * element, FormatData & formatData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes(
+    line, attributes, { { "compatible", {} }, { "index", { "0", "1", "2" } }, { "heightDivisor", { "1", "2" } }, { "widthDivisor", { "1", "2" } } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  formatData.planes.emplace_back( line );
+  PlaneData & plane = formatData.planes.back();
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "compatible" )
+    {
+      plane.compatible = attribute.second;
+      auto formatIt    = m_enums.find( "VkFormat" );
+      assert( formatIt != m_enums.end() );
+      checkForError( std::find_if( formatIt->second.values.begin(),
+                                   formatIt->second.values.end(),
+                                   [&plane]( EnumValueData const & evd ) { return evd.name == plane.compatible; } ) != formatIt->second.values.end(),
+                     line,
+                     "encountered unknown format <" + plane.compatible + ">" );
+    }
+    else if ( attribute.first == "index" )
+    {
+      size_t index = std::stoi( attribute.second );
+      checkForError( index + 1 == formatData.planes.size(), line, "unexpected index <" + attribute.second + ">" );
+    }
+    else if ( attribute.first == "heightDivisor" )
+    {
+      plane.heightDivisor = attribute.second;
+    }
+    else if ( attribute.first == "widthDivisor" )
+    {
+      plane.widthDivisor = attribute.second;
+    }
+  }
+}
+
+void VulkanHppGenerator::readFormatsFormatSPIRVImageFormat( tinyxml2::XMLElement const * element, FormatData & formatData )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  for ( auto const & attribute : attributes )
+  {
+    assert( attribute.first == "name" );
+    checkForError( formatData.spirvImageFormat.empty(), line, "spirvimageformat <" + attribute.second + "> already specified" );
+    formatData.spirvImageFormat = attribute.second;
+  }
+}
+
+std::pair<VulkanHppGenerator::NameData, VulkanHppGenerator::TypeInfo> VulkanHppGenerator::readNameAndType( tinyxml2::XMLElement const * element )
+{
+  int                                       line     = element->GetLineNum();
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "name", true } }, { { "enum" }, { "type" } } );
+
+  NameData nameData;
+  TypeInfo typeInfo;
+  for ( auto child : children )
+  {
+    line = child->GetLineNum();
+    checkAttributes( line, getAttributes( child ), {}, {} );
+    checkElements( line, getChildElements( child ), {} );
+
+    std::string value = child->Value();
+    if ( value == "enum" )
+    {
+      nameData.arraySizes.push_back( child->GetText() );
+      checkForError( child->PreviousSibling() && ( strcmp( child->PreviousSibling()->Value(), "[" ) == 0 ) && child->NextSibling() &&
+                       ( strcmp( child->NextSibling()->Value(), "]" ) == 0 ),
+                     line,
+                     std::string( "array specifiation is ill-formatted: <" ) + nameData.arraySizes.back() + ">" );
+      checkForError(
+        m_constants.find( nameData.arraySizes.back() ) != m_constants.end(), line, "using unknown enum value <" + nameData.arraySizes.back() + ">" );
+    }
+    else if ( value == "name" )
+    {
+      nameData.name = child->GetText();
+      std::string bitCount;
+      std::tie( nameData.arraySizes, bitCount ) = readModifiers( child->NextSibling() );
+      checkForError( bitCount.empty(), line, "name <" + nameData.name + "> with unsupported bitCount <" + bitCount + ">" );
+    }
+    else if ( value == "type" )
+    {
+      typeInfo = readTypeInfo( child );
+    }
+  }
+  return std::make_pair( nameData, typeInfo );
+}
+
+void VulkanHppGenerator::readPlatforms( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "platform", false } } );
+
+  for ( auto child : children )
+  {
+    readPlatformsPlatform( child );
+  }
+}
+
+void VulkanHppGenerator::readPlatformsPlatform( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "comment", {} }, { "name", {} }, { "protect", {} } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name, protect;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+      checkForError( !name.empty(), line, "attribute <name> is empty" );
+    }
+    else if ( attribute.first == "protect" )
+    {
+      protect = attribute.second;
+      checkForError( !protect.empty(), line, "attribute <protect> is empty" );
+    }
+  }
+  assert( !name.empty() && !protect.empty() );
+
+  checkForError( std::find_if( m_platforms.begin(),
+                               m_platforms.end(),
+                               [&protect]( std::pair<std::string, PlatformData> const & p ) { return p.second.protect == protect; } ) == m_platforms.end(),
+                 line,
+                 "platform protect <" + protect + "> already specified" );
+  checkForError( m_platforms.insert( std::make_pair( name, PlatformData( protect ) ) ).second, line, "platform name <" + name + "> already specified" );
+}
+
+void VulkanHppGenerator::readRegistry( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line,
+                 children,
+                 { { "commands", true },
+                   { "comment", false },
+                   { "enums", false },
+                   { "extensions", true },
+                   { "feature", false },
+                   { "platforms", true },
+                   { "spirvcapabilities", true },
+                   { "spirvextensions", true },
+                   { "tags", true },
+                   { "types", true } },
+                 { "formats" } );
+  for ( auto child : children )
+  {
+    const std::string value = child->Value();
+    if ( value == "commands" )
+    {
+      readCommands( child );
+    }
+    else if ( value == "comment" )
+    {
+      std::string comment = readComment( child );
+      if ( comment.find( "\nCopyright" ) == 0 )
+      {
+        setVulkanLicenseHeader( child->GetLineNum(), comment );
+      }
+    }
+    else if ( value == "enums" )
+    {
+      readEnums( child );
+    }
+    else if ( value == "extensions" )
+    {
+      readExtensions( child );
+    }
+    else if ( value == "feature" )
+    {
+      readFeature( child );
+    }
+    else if ( value == "formats" )
+    {
+      readFormats( child );
+    }
+    else if ( value == "platforms" )
+    {
+      readPlatforms( child );
+    }
+    else if ( value == "spirvcapabilities" )
+    {
+      readSPIRVCapabilities( child );
+    }
+    else if ( value == "spirvextensions" )
+    {
+      readSPIRVExtensions( child );
+    }
+    else if ( value == "tags" )
+    {
+      readTags( child );
+    }
+    else if ( value == "types" )
+    {
+      readTypes( child );
+    }
+  }
+}
+
+void VulkanHppGenerator::readRequireCommandSkipped( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "comment", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name = attributes.find( "name" )->second;
+
+  // some commands might be skipped by multiple extensions!
+  auto commandIt = m_commands.find( name );
+  if ( commandIt != m_commands.end() )
+  {
+    checkForError( m_skippedCommands.insert( name ).second, line, "to be skipped command <" + name + "> is already marked as skipped" );
+    m_commands.erase( commandIt );
+  }
+  else
+  {
+    checkForError( m_skippedCommands.find( name ) != m_skippedCommands.end(),
+                   line,
+                   "to be skipped command <" + name + "> is neither listed as command nor as skipped command" );
+  }
+}
+
+void VulkanHppGenerator::readRequireEnum( tinyxml2::XMLElement const * element, std::string const & extensionName )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  if ( attributes.find( "alias" ) != attributes.end() )
+  {
+    checkAttributes( line, attributes, { { "alias", {} }, { "name", {} } }, { { "api", { "vulkan", "vulkansc" } }, { "comment", {} }, { "extends", {} } } );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string alias, bitpos, name, extends, extnumber, offset, value;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "alias" )
+      {
+        alias = attribute.second;
+      }
+      if ( attribute.first == "api" )
+      {
+        if ( attribute.second == "vulkansc" )
+        {
+          return;  // skip stuff marked as "vulkansc" !
+        }
+        assert( attribute.second == "vulkan" );
+      }
+      else if ( attribute.first == "extends" )
+      {
+        extends = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+    }
+
+    if ( !extends.empty() )
+    {
+      auto enumIt = m_enums.find( extends );
+      checkForError( enumIt != m_enums.end(), line, "feature extends unknown enum <" + extends + ">" );
+
+      // add this enum name to the list of aliases
+      enumIt->second.addEnumAlias( line, name, alias );
+    }
+  }
+  else
+  {
+    checkAttributes( line,
+                     attributes,
+                     { { "name", {} } },
+                     { { "api", { "vulkan", "vulkansc" } },
+                       { "bitpos", {} },
+                       { "comment", {} },
+                       { "extends", {} },
+                       { "dir", { "-" } },
+                       { "extnumber", {} },
+                       { "offset", {} },
+                       { "protect", { "VK_ENABLE_BETA_EXTENSIONS" } },
+                       { "value", {} } } );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string bitpos, name, extends, offset, protect, value;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "api" )
+      {
+        if ( attribute.second == "vulkansc" )
+        {
+          return;  // skip stuff marked as "vulkansc" !
+        }
+        assert( attribute.second == "vulkan" );
+      }
+      else if ( attribute.first == "bitpos" )
+      {
+        bitpos = attribute.second;
+      }
+      else if ( attribute.first == "extends" )
+      {
+        extends = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+      else if ( attribute.first == "offset" )
+      {
+        offset = attribute.second;
+      }
+      else if ( attribute.first == "protect" )
+      {
+        protect = attribute.second;
+      }
+      else if ( attribute.first == "value" )
+      {
+        value = attribute.second;
+      }
+    }
+
+    if ( !extends.empty() )
+    {
+      auto enumIt = m_enums.find( extends );
+      if ( enumIt == m_enums.end() )
+      {
+        // need to re-add a previously removed enum !!
+        enumIt = m_skippedEnums.find( extends );
+        checkForError( enumIt != m_skippedEnums.end(), line, "feature extends unknown enum <" + extends + ">" );
+        enumIt = m_enums.insert( *enumIt ).first;
+
+        auto typeIt = m_skippedTypes.find( extends );
+        assert( ( m_types.find( extends ) == m_types.end() ) || ( typeIt != m_skippedTypes.end() ) );
+        typeIt->second.referencedIn = extensionName;
+        m_types[extends]            = typeIt->second;
+        m_skippedTypes.erase( typeIt );
+      }
+
+      // add this enum name to the list of values
+      checkForError( bitpos.empty() + offset.empty() + value.empty() == 2,
+                     line,
+                     "exactly one out of bitpos = <" + bitpos + ">, offset = <" + offset + ">, and value = <" + value + "> are supposed to be empty" );
+      enumIt->second.addEnumValue( element->GetLineNum(), name, protect, !bitpos.empty(), extensionName );
+    }
+    else if ( value.empty() )
+    {
+      checkForError( m_constants.find( name ) != m_constants.end(), line, "unknown required enum <" + name + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readRequireEnumSkipped( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line,
+                   attributes,
+                   { { "name", {} } },
+                   { { "alias", {} },
+                     { "bitpos", {} },
+                     { "comment", {} },
+                     { "extends", {} },
+                     { "dir", { "-" } },
+                     { "extnumber", {} },
+                     { "offset", {} },
+                     { "protect", {} },
+                     { "value", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string extends, name;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "extends" )
+    {
+      extends = attribute.second;
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+  }
+  assert( !name.empty() );
+
+  if ( extends == "VkResult" )
+  {
+    // check that the to be skipped enum value is not already listed
+    auto enumIt = m_enums.find( extends );
+    assert( enumIt != m_enums.end() );
+    auto valueIt =
+      std::find_if( enumIt->second.values.begin(), enumIt->second.values.end(), [&name]( EnumValueData const & evd ) { return evd.name == name; } );
+    checkForError( valueIt == enumIt->second.values.end(),
+                   line,
+                   "to be skipped enum value <" + name + "> extending enum <" + extends + "> is regularly specified for that enum" );
+
+    // look for all the errorCodes (and successCodes) and remove this enum value!
+    for ( auto & command : m_commands )
+    {
+      auto errorCodeIt = std::find( command.second.errorCodes.begin(), command.second.errorCodes.end(), name );
+      if ( errorCodeIt != command.second.errorCodes.end() )
+      {
+        command.second.errorCodes.erase( errorCodeIt );
+      }
+      assert( std::find( command.second.successCodes.begin(), command.second.successCodes.end(), name ) == command.second.successCodes.end() );
+    }
+  }
+}
+
+void VulkanHppGenerator::readRequireTypeSkipped( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, { { "comment", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name = attributes.find( "name" )->second;
+
+  // some types are not really types, but defines
+  auto typeIt = m_types.find( name );
+  if ( typeIt != m_types.end() )
+  {
+    assert( typeIt->second.referencedIn.empty() );
+    assert( m_skippedTypes.find( name ) == m_skippedTypes.end() );
+
+    switch ( typeIt->second.category )
+    {
+      case TypeCategory::Bitmask:
+        assert( m_bitmasks.find( name ) != m_bitmasks.end() );
+        m_bitmasks.erase( name );
+        break;
+      case TypeCategory::Enum:
+        {
+          auto enumIt = m_enums.find( name );
+          assert( enumIt != m_enums.end() );
+          assert( m_skippedEnums.find( name ) == m_skippedEnums.end() );
+          m_skippedEnums[name] = enumIt->second;
+          m_enums.erase( enumIt );
+        }
+        break;
+      case TypeCategory::FuncPointer:
+        assert( m_funcPointers.find( name ) != m_funcPointers.end() );
+        m_funcPointers.erase( name );
+        break;
+      case TypeCategory::Handle:
+        assert( m_handles.find( name ) != m_handles.end() );
+        m_handles.erase( name );
+        break;
+      case TypeCategory::Struct:
+        assert( m_structures.find( name ) != m_structures.end() );
+        m_structures.erase( name );
+        break;
+      default: assert( false ); break;
+    }
+
+    m_skippedTypes[name] = typeIt->second;
+    m_types.erase( typeIt );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilities( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "spirvcapability" } );
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "spirvcapability" ) );
+    readSPIRVCapabilitiesSPIRVCapability( child );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapability( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "enable" } );
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "enable" ) );
+    readSPIRVCapabilitiesSPIRVCapabilityEnable( child );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapabilityEnable( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkElements( line, getChildElements( element ), {}, {} );
+
+  if ( attributes.find( "extension" ) != attributes.end() )
+  {
+    readSPIRVCapabilitiesSPIRVCapabilityEnableExtension( line, attributes );
+  }
+  else if ( attributes.find( "property" ) != attributes.end() )
+  {
+    readSPIRVCapabilitiesSPIRVCapabilityEnableProperty( line, attributes );
+  }
+  else if ( attributes.find( "struct" ) != attributes.end() )
+  {
+    readSPIRVCapabilitiesSPIRVCapabilityEnableStruct( line, attributes );
+  }
+  else if ( attributes.find( "version" ) != attributes.end() )
+  {
+    readSPIRVCapabilitiesSPIRVCapabilityEnableVersion( line, attributes );
+  }
+  else
+  {
+    checkForError( false, line, "unknown set of attributes specified for SPIR-V capability" );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapabilityEnableExtension( int xmlLine, std::map<std::string, std::string> const & attributes )
+{
+  checkAttributes( xmlLine, attributes, { { "extension", {} } }, {} );
+
+  checkForError( attributes.size() == 1, xmlLine, "unexpected attributes in addition to <extension> specified for SPIR-V capability" );
+  for ( auto const & attribute : attributes )
+  {
+    assert( attribute.first == "extension" );
+    checkForError(
+      m_extensions.find( attribute.second ) != m_extensions.end(), xmlLine, "unknown extension <" + attribute.second + "> specified for SPIR-V capability" );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapabilityEnableProperty( int xmlLine, std::map<std::string, std::string> const & attributes )
+{
+  checkAttributes( xmlLine, attributes, { { "member", {} }, { "property", {} }, { "requires", {} }, { "value", {} } }, {} );
+
+  std::string member, property, value;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "member" )
+    {
+      member = attribute.second;
+    }
+    else if ( attribute.first == "property" )
+    {
+      property = attribute.second;
+    }
+    if ( attribute.first == "requires" )
+    {
+      std::vector<std::string> requiresAttribute = tokenize( attribute.second, "," );
+      for ( auto const & r : requiresAttribute )
+      {
+        checkForError( ( m_features.find( r ) != m_features.end() ) || ( m_extensions.find( r ) != m_extensions.end() ),
+                       xmlLine,
+                       "unknown requires <" + r + "> specified for SPIR-V capability" );
+      }
+    }
+    else if ( attribute.first == "value" )
+    {
+      value = attribute.second;
+    }
+  }
+  assert( !member.empty() && !property.empty() && !value.empty() );
+
+  auto propertyIt = m_structures.find( property );
+  checkForError( propertyIt != m_structures.end(), xmlLine, "unknown property <" + property + "> specified for SPIR-V capability" );
+  auto memberIt = findStructMemberIt( member, propertyIt->second.members );
+  checkForError( memberIt != propertyIt->second.members.end(), xmlLine, "unknown member <" + member + "> specified for SPIR-V capability" );
+  if ( memberIt->type.type == "VkBool32" )
+  {
+    checkForError( ( value == "VK_FALSE" ) || ( value == "VK_TRUE" ),
+                   xmlLine,
+                   "unknown value <" + value + "> for boolean member <" + member + "> specified for SPIR-V capability" );
+  }
+  else
+  {
+    auto bitmaskIt = m_bitmasks.find( memberIt->type.type );
+    checkForError( bitmaskIt != m_bitmasks.end(), xmlLine, "attribute member = <" + member + "> specified for SPIR-V capability is not a bitmask" );
+    assert( !bitmaskIt->second.requirements.empty() );
+    auto enumIt = m_enums.find( bitmaskIt->second.requirements );
+    checkForError( enumIt != m_enums.end(),
+                   xmlLine,
+                   "attribute member = <" + member + "> specified for SPIR-V capability requires an unknown enum <" + bitmaskIt->second.requirements + ">" );
+    auto valueIt =
+      std::find_if( enumIt->second.values.begin(), enumIt->second.values.end(), [&value]( EnumValueData const & evd ) { return evd.name == value; } );
+    checkForError( valueIt != enumIt->second.values.end(), xmlLine, "unknown attribute value = <" + value + "> specified for SPIR-V capability" );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapabilityEnableStruct( int xmlLine, std::map<std::string, std::string> const & attributes )
+{
+  checkAttributes( xmlLine, attributes, { { "feature", {} }, { "struct", {} } }, { { "alias", {} }, { "requires", {} } } );
+
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "requires" )
+    {
+      std::vector<std::string> requiresAttribute = tokenize( attribute.second, "," );
+      for ( auto const & r : requiresAttribute )
+      {
+        checkForError( ( m_features.find( r ) != m_features.end() ) || ( m_extensions.find( r ) != m_extensions.end() ),
+                       xmlLine,
+                       "unknown requires <" + r + "> specified for SPIR-V capability" );
+      }
+    }
+    else if ( attribute.first == "struct" )
+    {
+      checkForError( ( m_structures.find( attribute.second ) != m_structures.end() ) ||
+                       ( m_structureAliases.find( attribute.second ) != m_structureAliases.end() ),
+                     xmlLine,
+                     "unknown structure <" + attribute.second + "> specified for SPIR-V capability" );
+      checkForError( attributes.find( "feature" ) != attributes.end(),
+                     xmlLine,
+                     "missing feature attribute for SPIR-V capability specified with struct <" + attribute.second + ">" );
+    }
+    else
+    {
+      assert( ( attribute.first == "alias" ) || ( attribute.first == "feature" ) );
+    }
+  }
+}
+
+void VulkanHppGenerator::readSPIRVCapabilitiesSPIRVCapabilityEnableVersion( int xmlLine, std::map<std::string, std::string> const & attributes )
+{
+  checkAttributes( xmlLine, attributes, { { "version", {} } }, {} );
+
+  checkForError( attributes.size() == 1, xmlLine, "unexpected attributes in addition to <version> specified for SPIR-V capability" );
+  for ( auto const & attribute : attributes )
+  {
+    assert( attribute.first == "version" );
+    std::string feature = attribute.second;
+    if ( feature.starts_with( "VK_API_" ) )
+    {
+      feature.erase( 3, 4 );  // remove "API_" from the version -> VK_VERSION_x_y
+    }
+    checkForError( feature.starts_with( "VK_VERSION_" ), xmlLine, "unknown version <" + attribute.second + "> specified for SPIR-V capability" );
+    checkForError( m_features.find( feature ) != m_features.end(), xmlLine, "unknown version <" + attribute.second + "> specified for SPIR-V capability" );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVExtensions( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "spirvextension" } );
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "spirvextension" ) );
+    readSPIRVExtensionsExtension( child );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVExtensionsExtension( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "name", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, {}, { "enable" } );
+
+  for ( auto child : children )
+  {
+    assert( child->Value() == std::string( "enable" ) );
+    readSPIRVExtensionsExtensionEnable( child );
+  }
+}
+
+void VulkanHppGenerator::readSPIRVExtensionsExtensionEnable( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, {}, { { "extension", {} }, { "version", {} } } );
+  checkElements( line, getChildElements( element ), {}, {} );
+
+  checkForError( !attributes.empty(), line, "no version or extension specified for SPIR-V extension" );
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "extension" )
+    {
+      checkForError(
+        m_extensions.find( attribute.second ) != m_extensions.end(), line, "unknown extension <" + attribute.second + "> specified for SPIR-V extension" );
+    }
+    else
+    {
+      assert( attribute.first == "version" );
+      std::string feature = attribute.second;
+      if ( feature.starts_with( "VK_API_" ) )
+      {
+        feature.erase( 3, 4 );  // remove "API_" from the version -> VK_VERSION_x_y
+      }
+      checkForError( feature.starts_with( "VK_VERSION_" ), line, "unknown version <" + attribute.second + "> specified for SPIR-V extension" );
+      checkForError( m_features.find( feature ) != m_features.end(), line, "unknown version <" + attribute.second + "> specified for SPIR-V extension" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTags( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "tag", false } } );
+
+  for ( auto child : children )
+  {
+    readTagsTag( child );
+  }
+}
+
+void VulkanHppGenerator::readTagsTag( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line, attributes, { { "author", {} }, { "contact", {} }, { "name", {} } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      checkForError( m_tags.find( attribute.second ) == m_tags.end(), line, "tag named <" + attribute.second + "> has already been specified" );
+      m_tags.insert( attribute.second );
+    }
+    else
+    {
+      checkForError( ( attribute.first == "author" ) || ( attribute.first == "contact" ), line, "unknown attribute <" + attribute.first + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTypes( tinyxml2::XMLElement const * element )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), { { "comment", {} } }, {} );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "comment", false }, { "type", false } } );
+
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "comment" )
+    {
+      readComment( child );
+    }
+    else
+    {
+      assert( value == "type" );
+      readTypesType( child );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTypesType( tinyxml2::XMLElement const * element )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+
+  auto categoryIt = attributes.find( "category" );
+  if ( categoryIt != attributes.end() )
+  {
+    if ( categoryIt->second == "basetype" )
+    {
+      readTypesTypeBasetype( element, attributes );
+    }
+    else if ( categoryIt->second == "bitmask" )
+    {
+      readTypesTypeBitmask( element, attributes );
+    }
+    else if ( categoryIt->second == "define" )
+    {
+      readTypesTypeDefine( element, attributes );
+    }
+    else if ( categoryIt->second == "enum" )
+    {
+      readTypesTypeEnum( element, attributes );
+    }
+    else if ( categoryIt->second == "funcpointer" )
+    {
+      readTypesTypeFuncpointer( element, attributes );
+    }
+    else if ( categoryIt->second == "handle" )
+    {
+      readTypesTypeHandle( element, attributes );
+    }
+    else if ( categoryIt->second == "include" )
+    {
+      readTypesTypeInclude( element, attributes );
+    }
+    else if ( categoryIt->second == "struct" )
+    {
+      readTypesTypeStruct( element, false, attributes );
+    }
+    else
+    {
+      checkForError( categoryIt->second == "union", element->GetLineNum(), "unknown type category <" + categoryIt->second + ">" );
+      readTypesTypeStruct( element, true, attributes );
+    }
+  }
+  else
+  {
+    auto requiresIt = attributes.find( "requires" );
+    if ( requiresIt != attributes.end() )
+    {
+      readTypesTypeRequires( element, attributes );
+    }
+    else
+    {
+      checkForError( ( attributes.size() == 1 ) && ( attributes.begin()->first == "name" ) && ( attributes.begin()->second == "int" ), line, "unknown type" );
+      checkForError( m_types.insert( std::make_pair( attributes.begin()->second, TypeData{ .category = TypeCategory::Unknown } ) ).second,
+                     line,
+                     "type <" + attributes.begin()->second + "> already specified" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeBasetype( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "category", { "basetype" } } }, {} );
+
+  NameData nameData;
+  TypeInfo typeInfo;
+  std::tie( nameData, typeInfo ) = readNameAndType( element );
+
+  if ( typeInfo.prefix == "typedef" )
+  {
+    // remove redundant typeInfo.prefix "typedef"
+    typeInfo.prefix.clear();
+  }
+
+  checkForError( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" );
+  checkForError( typeInfo.prefix.empty(), line, "unexpected type prefix <" + typeInfo.prefix + ">" );
+  checkForError( typeInfo.postfix.empty() || ( typeInfo.postfix == "*" ), line, "unexpected type postfix <" + typeInfo.postfix + ">" );
+
+  if ( !typeInfo.type.empty() )
+  {
+    checkForError( m_baseTypes.insert( std::make_pair( nameData.name, BaseTypeData( typeInfo, line ) ) ).second,
+                   line,
+                   "basetype <" + nameData.name + "> already specified" );
+  }
+  checkForError( m_types.insert( std::make_pair( nameData.name, TypeData{ .category = TypeCategory::BaseType } ) ).second,
+                 line,
+                 "basetype <" + nameData.name + "> already specified as a type" );
+}
+
+void VulkanHppGenerator::readTypesTypeBitmask( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+
+  auto aliasIt = attributes.find( "alias" );
+  if ( aliasIt != attributes.end() )
+  {
+    checkAttributes( line, attributes, { { "alias", {} }, { "category", { "bitmask" } }, { "name", {} } }, {} );
+    checkElements( line, getChildElements( element ), {} );
+
+    std::string alias, name;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "alias" )
+      {
+        alias = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+    }
+
+    auto bitmasksIt = m_bitmasks.find( alias );
+    checkForError( bitmasksIt != m_bitmasks.end(), line, "missing alias <" + alias + ">." );
+    checkForError(
+      bitmasksIt->second.alias.empty(), line, "alias for bitmask <" + bitmasksIt->first + "> already specified as <" + bitmasksIt->second.alias + ">" );
+    bitmasksIt->second.alias = name;
+    checkForError( m_types.insert( std::make_pair( name, TypeData{ .category = TypeCategory::Bitmask } ) ).second,
+                   line,
+                   "aliased bitmask <" + name + "> already specified as a type" );
+  }
+  else
+  {
+    checkAttributes( line, attributes, { { "category", { "bitmask" } } }, { { "api", { "vulkan", "vulkansc" } }, { "bitvalues", {} }, { "requires", {} } } );
+
+    std::string bitvalues, requirements;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "api" )
+      {
+        if ( attribute.second == "vulkansc" )
+        {
+          return;  // skip stuff marked as "vulkansc" !
+        }
+        assert( attribute.second == "vulkan" );
+      }
+      else if ( attribute.first == "bitvalues" )
+      {
+        bitvalues = attribute.second;
+      }
+      else if ( attribute.first == "requires" )
+      {
+        requirements = attribute.second;
+      }
+    }
+
+    NameData nameData;
+    TypeInfo typeInfo;
+    std::tie( nameData, typeInfo ) = readNameAndType( element );
+
+    checkForError( nameData.name.starts_with( "Vk" ), line, "name <" + nameData.name + "> does not begin with <Vk>" );
+    checkForError( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" );
+    checkForWarning( ( typeInfo.type == "VkFlags" ) || ( typeInfo.type == "VkFlags64" ), line, "unexpected bitmask type <" + typeInfo.type + ">" );
+    checkForError( typeInfo.prefix == "typedef", line, "unexpected type prefix <" + typeInfo.prefix + ">" );
+    checkForError( typeInfo.postfix.empty(), line, "unexpected type postfix <" + typeInfo.postfix + ">" );
+    checkForError( bitvalues.empty() || requirements.empty(), line, "attributes <bitvalues> and <requires> are both specified" );
+    checkForError( ( typeInfo.type != "VkFlags64" ) || !bitvalues.empty(), line, "bitmask of type <VkFlags64> needs attribute bitvalues to be set" );
+
+    if ( !bitvalues.empty() )
+    {
+      requirements = bitvalues;
+    }
+    m_bitmasks.insert( std::make_pair( nameData.name, BitmaskData( requirements, typeInfo.type, line ) ) );
+    checkForError( m_types.insert( std::make_pair( nameData.name, TypeData{ .category = TypeCategory::Bitmask } ) ).second,
+                   line,
+                   "bitmask <" + nameData.name + "> already specified as a type" );
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeDefine( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "category", { "define" } } }, { { "api", { "vulkan", "vulkansc" } }, { "name", {} }, { "requires", {} } } );
+
+  std::string name, require;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "api" )
+    {
+      if ( attribute.second == "vulkansc" )
+      {
+        return;  // skip stuff marked as "vulkansc" !
+      }
+      assert( attribute.second == "vulkan" );
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+    }
+    else if ( attribute.first == "requires" )
+    {
+      require = attribute.second;
+    }
+  }
+
+  if ( !name.empty() )
+  {
+    checkForError( !element->FirstChildElement(), line, "unknown formatting of type category=define name <" + name + ">" );
+    checkForError( element->LastChild() && element->LastChild()->ToText() && element->LastChild()->ToText()->Value(),
+                   line,
+                   "unknown formatting of type category=define named <" + name + ">" );
+
+    // filter out the check for the different types of VK_DEFINE_NON_DISPATCHABLE_HANDLE
+    if ( name == "VK_USE_64_BIT_PTR_DEFINES" )
+    {
+      m_typesafeCheck = "#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )";
+    }
+    else if ( ( name == "VK_DEFINE_NON_DISPATCHABLE_HANDLE" ) && ( m_typesafeCheck.empty() ) )
+    {
+      std::string text  = element->LastChild()->ToText()->Value();
+      size_t      start = text.find( "#if defined(__LP64__)" );
+      checkForError( start != std::string::npos, line, "unexpected text in type category=define named <" + name + ">" );
+      size_t end = text.find_first_of( "\r\n", start + 1 );
+      checkForError( end != std::string::npos, line, "unexpected text in type category=define named <" + name + ">" );
+      m_typesafeCheck = text.substr( start, end - start );
+    }
+  }
+  else if ( element->GetText() )
+  {
+    std::string text = element->GetText();
+    if ( ( text.find( "class" ) != std::string::npos ) || ( text.find( "struct" ) != std::string::npos ) )
+    {
+      // here are a couple of structs as defines, which really are types!
+      tinyxml2::XMLElement const * child = element->FirstChildElement();
+      checkForError( child && ( strcmp( child->Value(), "name" ) == 0 ) && child->GetText(), line, "unexpected formatting of type category=define" );
+      name = child->GetText();
+      checkForError(
+        m_types.insert( std::make_pair( name, TypeData{ .category = TypeCategory::Define } ) ).second, line, "type <" + name + "> has already been speficied" );
+    }
+    else
+    {
+      tinyxml2::XMLElement const * child = element->FirstChildElement();
+      checkForError( child && !child->FirstAttribute() && ( strcmp( child->Value(), "name" ) == 0 ) && child->GetText(),
+                     line,
+                     "unknown formatting of type category define" );
+      name = trim( child->GetText() );
+      if ( name == "VK_HEADER_VERSION" )
+      {
+        m_version = trimEnd( element->LastChild()->ToText()->Value() );
+      }
+      // ignore all the other defines
+      checkForWarning( !child->NextSiblingElement() ||
+                         ( child->NextSiblingElement() && !child->NextSiblingElement()->FirstAttribute() &&
+                           ( strcmp( child->NextSiblingElement()->Value(), "type" ) == 0 ) && !child->NextSiblingElement()->NextSiblingElement() ),
+                       line,
+                       "unknown formatting of type category define" );
+    }
+  }
+
+  assert( !name.empty() );
+  checkForError( m_defines.insert( { name, { require, line } } ).second, line, "define <" + name + "> has already been specified" );
+}
+
+void VulkanHppGenerator::readTypesTypeEnum( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "category", { "enum" } }, { "name", {} } }, { { "alias", {} } } );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string alias, name;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "alias" )
+    {
+      alias = attribute.second;
+      checkForError( !alias.empty(), line, "enum with empty alias" );
+    }
+    else if ( attribute.first == "name" )
+    {
+      name = attribute.second;
+      checkForError( !name.empty(), line, "enum with empty name" );
+      checkForError( m_enums.find( name ) == m_enums.end(), line, "enum <" + name + "> already specified" );
+    }
+  }
+  assert( !name.empty() );
+
+  if ( alias.empty() )
+  {
+    checkForError( m_enums.insert( std::make_pair( name, EnumData{ .xmlLine = line } ) ).second, line, "enum <" + name + "> already specified" );
+  }
+  else
+  {
+    auto enumIt = m_enums.find( alias );
+    checkForError( enumIt != m_enums.end(), line, "enum with unknown alias <" + alias + ">" );
+    checkForError( enumIt->second.alias.empty(), line, "enum <" + enumIt->first + "> already has an alias <" + enumIt->second.alias + ">" );
+    enumIt->second.alias = name;
+  }
+  checkForError(
+    m_types.insert( std::make_pair( name, TypeData{ .category = TypeCategory::Enum } ) ).second, line, "enum <" + name + "> already specified as a type" );
+}
+
+void VulkanHppGenerator::readTypesTypeFuncpointer( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "category", { "funcpointer" } } }, { { "requires", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "name", true } }, { "type" } );
+
+  std::string requirements;
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "requires" )
+    {
+      requirements = attribute.second;
+    }
+  }
+
+  auto                  funcPointerIt = m_funcPointers.end();
+  std::set<std::string> argumentNames;
+  for ( auto const & child : children )
+  {
+    std::string value     = child->Value();
+    int         childLine = child->GetLineNum();
+    if ( value == "name" )
+    {
+      std::string name = child->GetText();
+      checkForError( !name.empty(), childLine, "funcpointer with empty name" );
+      checkForError( m_funcPointers.find( name ) == m_funcPointers.end(), childLine, "funcpointer <" + name + "> already specified" );
+      funcPointerIt = m_funcPointers.insert( std::make_pair( name, FuncPointerData( requirements, line ) ) ).first;
+      checkForError( m_types.insert( std::make_pair( name, TypeData{ .category = TypeCategory::FuncPointer } ) ).second,
+                     childLine,
+                     "funcpointer <" + name + "> already specified as a type" );
+    }
+    else if ( value == "type" )
+    {
+      assert( funcPointerIt != m_funcPointers.end() );
+      std::string type = child->GetText();
+      funcPointerIt->second.arguments.push_back( { type, childLine } );
+
+      auto         sibling      = child->NextSibling();
+      char const * siblingValue = sibling->Value();
+      assert( siblingValue != nullptr );
+      std::string argumentName = siblingValue;
+      argumentName             = argumentName.substr( argumentName.find_first_not_of( "* " ) );
+      argumentName             = argumentName.substr( 0, argumentName.find_first_of( ",)" ) );
+      checkForError( argumentNames.insert( argumentName ).second,
+                     childLine,
+                     "funcpointer <" + funcPointerIt->first + "> already has an argument named <" + argumentName + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeHandle( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+
+  auto aliasIt = attributes.find( "alias" );
+  if ( aliasIt != attributes.end() )
+  {
+    checkAttributes( line, attributes, { { "alias", {} }, { "category", { "handle" } }, { "name", {} } }, {} );
+    checkElements( line, getChildElements( element ), {} );
+
+    auto handlesIt = m_handles.find( aliasIt->second );
+    checkForError( handlesIt != m_handles.end(), line, "using unspecified alias <" + aliasIt->second + ">." );
+    checkForError( handlesIt->second.alias.empty(), line, "handle <" + handlesIt->first + "> already has an alias <" + handlesIt->second.alias + ">" );
+    handlesIt->second.alias = attributes.find( "name" )->second;
+    checkForError( m_types.insert( std::make_pair( handlesIt->second.alias, TypeData{ .category = TypeCategory::Handle } ) ).second,
+                   line,
+                   "handle alias <" + handlesIt->second.alias + "> already specified as a type" );
+  }
+  else
+  {
+    checkAttributes( line, attributes, { { "category", { "handle" } } }, { { "objtypeenum", {} }, { "parent", {} } } );
+
+    std::string objTypeEnum, parent;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "objtypeenum" )
+      {
+        objTypeEnum = attribute.second;
+      }
+      else if ( attribute.first == "parent" )
+      {
+        parent = attribute.second;
+      }
+    }
+
+    NameData nameData;
+    TypeInfo typeInfo;
+    std::tie( nameData, typeInfo ) = readNameAndType( element );
+    const bool isDispatchable      = typeInfo.type == "VK_DEFINE_HANDLE";
+
+    checkForError( nameData.name.starts_with( "Vk" ), line, "name <" + nameData.name + "> does not begin with <Vk>" );
+    checkForError( nameData.arraySizes.empty(), line, "name <" + nameData.name + "> with unsupported arraySizes" );
+    checkForError( ( typeInfo.type == "VK_DEFINE_HANDLE" ) || ( typeInfo.type == "VK_DEFINE_NON_DISPATCHABLE_HANDLE" ),
+                   line,
+                   "handle with invalid type <" + typeInfo.type + ">" );
+    checkForError( typeInfo.prefix.empty(), line, "unexpected type prefix <" + typeInfo.prefix + ">" );
+    checkForError( typeInfo.postfix == "(", line, "unexpected type postfix <" + typeInfo.postfix + ">" );
+    checkForError( !objTypeEnum.empty(), line, "handle <" + nameData.name + "> does not specify attribute \"objtypeenum\"" );
+
+    checkForError( parent.find( ',' ) == std::string::npos, line, "mulitple parents specified for handle <" + nameData.name + ">" );
+    checkForError( m_handles.insert( std::make_pair( nameData.name, HandleData( parent, objTypeEnum, isDispatchable, line ) ) ).second,
+                   line,
+                   "handle <" + nameData.name + "> already specified" );
+    checkForError( m_types.insert( std::make_pair( nameData.name, TypeData{ .category = TypeCategory::Handle } ) ).second,
+                   line,
+                   "handle <" + nameData.name + "> already specified as a type" );
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeInclude( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "category", { "include" } }, { "name", {} } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  std::string name = attributes.find( "name" )->second;
+  checkForError( m_includes.insert( name ).second, element->GetLineNum(), "include named <" + name + "> already specified" );
+}
+
+void VulkanHppGenerator::readTypesTypeRequires( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, attributes, { { "name", {} }, { "requires", {} } }, {} );
+  checkElements( line, getChildElements( element ), {} );
+
+  for ( auto attribute : attributes )
+  {
+    if ( attribute.first == "name" )
+    {
+      checkForError( m_types.insert( std::make_pair( attribute.second, TypeData{ .category = TypeCategory::Requires } ) ).second,
+                     line,
+                     "type named <" + attribute.second + "> already specified" );
+    }
+    else
+    {
+      assert( attribute.first == "requires" );
+      checkForError( m_includes.find( attribute.second ) != m_includes.end(), line, "type requires unknown include <" + attribute.second + ">" );
+    }
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeStruct( tinyxml2::XMLElement const * element, bool isUnion, std::map<std::string, std::string> const & attributes )
+{
+  int line = element->GetLineNum();
+  if ( attributes.find( "alias" ) != attributes.end() )
+  {
+    checkAttributes( line, attributes, { { "alias", {} }, { "category", { "struct" } }, { "name", {} } }, {} );
+    checkElements( line, getChildElements( element ), {}, {} );
+
+    std::string alias, name;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "alias" )
+      {
+        alias = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+    }
+
+    checkForError(
+      m_structureAliases.insert( std::make_pair( name, StructureAliasData( alias, line ) ) ).second, line, "structure alias <" + name + "> already used" );
+    checkForError( m_structureAliasesInverse[alias].insert( name ).second, line, "structure alias <" + name + "> already used with structure <" + alias + ">" );
+    checkForError( m_types.insert( std::make_pair( name, TypeData{ .category = TypeCategory::Struct } ) ).second,
+                   line,
+                   "struct <" + name + "> already specified as a type" );
+  }
+  else
+  {
+    checkAttributes( line,
+                     attributes,
+                     { { "category", { isUnion ? "union" : "struct" } }, { "name", {} } },
+                     { { "allowduplicate", { "false", "true" } }, { "comment", {} }, { "returnedonly", { "true" } }, { "structextends", {} } } );
+    std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+    checkElements( line, children, {}, { "member", "comment" } );
+
+    std::string              category, name;
+    std::vector<std::string> structExtends;
+    bool                     allowDuplicate = false;
+    bool                     returnedOnly   = false;
+    for ( auto const & attribute : attributes )
+    {
+      if ( attribute.first == "allowduplicate" )
+      {
+        allowDuplicate = ( attribute.second == "true" );
+      }
+      else if ( attribute.first == "category" )
+      {
+        category = attribute.second;
+      }
+      else if ( attribute.first == "name" )
+      {
+        name = attribute.second;
+      }
+      else if ( attribute.first == "returnedonly" )
+      {
+        checkForError( attribute.second == "true", line, "unknown value for attribute returnedonly: <" + attribute.second + ">" );
+        returnedOnly = true;
+      }
+      else if ( attribute.first == "structextends" )
+      {
+        structExtends = tokenize( attribute.second, "," );
+      }
+    }
+    assert( !name.empty() );
+    // make this warn a check, as soon as vk.xml has been fixed on attribute "allowduplicate" !
+    checkForWarning( !allowDuplicate || !structExtends.empty(), line, "attribute <allowduplicate> is true, but no structures are listed in <structextends>" );
+
+    checkForError( m_structures.find( name ) == m_structures.end(), line, "struct <" + name + "> already specfied" );
+    std::map<std::string, StructureData>::iterator it = m_structures.insert( std::make_pair( name, StructureData( structExtends, line ) ) ).first;
+    it->second.allowDuplicate                         = allowDuplicate;
+    it->second.isUnion                                = isUnion;
+    it->second.returnedOnly                           = returnedOnly;
+
+    for ( auto child : children )
+    {
+      std::string value = child->Value();
+      if ( value == "comment" )
+      {
+        readComment( child );
+      }
+      else if ( value == "member" )
+      {
+        readTypesTypeStructMember( child, it->second.members, isUnion );
+      }
+    }
+    it->second.subStruct = determineSubStruct( *it );
+
+    // check if multiple structure members use the very same (not empty) len attribute
+    // Note: even though the arrays are not marked as optional, they still might be mutually exclusive (like in
+    // VkWriteDescriptorSet)! That is, there's not enough information available in vk.xml to decide on that, so we
+    // need this external knowledge!
+    static std::set<std::string> mutualExclusiveStructs = {
+      "VkAccelerationStructureBuildGeometryInfoKHR", "VkAccelerationStructureTrianglesOpacityMicromapEXT", "VkMicromapBuildInfoEXT", "VkWriteDescriptorSet"
+    };
+    static std::set<std::string> multipleLenStructs = { "VkImageConstraintsInfoFUCHSIA",
+                                                        "VkIndirectCommandsLayoutTokenNV",
+                                                        "VkPresentInfoKHR",
+                                                        "VkSemaphoreWaitInfo",
+                                                        "VkSubmitInfo",
+                                                        "VkSubpassDescription",
+                                                        "VkSubpassDescription2",
+                                                        "VkWin32KeyedMutexAcquireReleaseInfoKHR",
+                                                        "VkWin32KeyedMutexAcquireReleaseInfoNV" };
+    bool                         warned             = false;
+    for ( auto m0It = it->second.members.begin(); !warned && ( m0It != it->second.members.end() ); ++m0It )
+    {
+      if ( !m0It->len.empty() && ( m0It->len.front() != "null-terminated" ) )
+      {
+        for ( auto m1It = std::next( m0It ); !warned && ( m1It != it->second.members.end() ); ++m1It )
+        {
+          if ( !m1It->len.empty() && ( m0It->len.front() == m1It->len.front() ) )
+          {
+            if ( mutualExclusiveStructs.find( it->first ) != mutualExclusiveStructs.end() )
+            {
+              it->second.mutualExclusiveLens = true;
+            }
+            else
+            {
+              checkForWarning(
+                multipleLenStructs.find( it->first ) != multipleLenStructs.end(),
+                line,
+                "Encountered structure <" + it->first +
+                  "> with multiple members referencing the same member for len. Need to be checked if they are supposed to be mutually exclusive." );
+              warned = true;
+            }
+          }
+        }
+      }
+    }
+
+    m_extendedStructs.insert( structExtends.begin(), structExtends.end() );
+    checkForError(
+      m_types.insert( std::make_pair( name, TypeData{ .category = ( category == "struct" ) ? TypeCategory::Struct : TypeCategory::Union } ) ).second,
+      line,
+      "struct <" + name + "> already specified as a type" );  // log type and alias in m_types
+  }
+}
+
+void VulkanHppGenerator::readTypesTypeStructMember( tinyxml2::XMLElement const * element, std::vector<MemberData> & members, bool isUnion )
+{
+  int                                line       = element->GetLineNum();
+  std::map<std::string, std::string> attributes = getAttributes( element );
+  checkAttributes( line,
+                   attributes,
+                   {},
+                   { { "altlen", {} },
+                     { "api", { "vulkan", "vulkansc" } },
+                     { "externsync", { "true" } },
+                     { "len", {} },
+                     { "limittype", { "bitmask", "bits", "exact", "max", "min", "mul", "noauto", "pot", "range", "struct" } },
+                     { "noautovalidity", { "true" } },
+                     { "objecttype", { "objectType" } },
+                     { "optional", { "false", "true" } },
+                     { "selection", {} },
+                     { "selector", {} },
+                     { "values", {} } } );
+  std::vector<tinyxml2::XMLElement const *> children = getChildElements( element );
+  checkElements( line, children, { { "name", true }, { "type", true } }, { "comment", "enum" } );
+
+  MemberData memberData( line );
+
+  for ( auto const & attribute : attributes )
+  {
+    if ( attribute.first == "api" )
+    {
+      if ( attribute.second == "vulkansc" )
+      {
+        return;  // skip stuff marked as "vulkansc" !
+      }
+      assert( attribute.second == "vulkan" );
+    }
+    else if ( attribute.first == "altlen" )
+    {
+      assert( memberData.len.empty() );
+      memberData.len = tokenize( attribute.second, "," );
+      checkForError( memberData.len.size() == 1, line, "member attribute <altlen> holds unknown number of data: " + std::to_string( memberData.len.size() ) );
+      checkForError( altLens.find( memberData.len[0] ) != altLens.end(), line, "member attribute <altlen> holds unknown value <" + memberData.len[0] + ">" );
+    }
+    else if ( attribute.first == "len" )
+    {
+      if ( memberData.len.empty() )
+      {
+        memberData.len = tokenize( attribute.second, "," );
+        checkForError( !memberData.len.empty() && ( memberData.len.size() <= 2 ),
+                       line,
+                       "member attribute <len> holds unknown number of data: " + std::to_string( memberData.len.size() ) );
+        auto lenMember = findStructMemberIt( memberData.len[0], members );
+        checkForError( lenMember != members.end() || ( memberData.len[0] == "null-terminated" ),
+                       line,
+                       "member attribute <len> holds unknown value <" + memberData.len[0] + ">" );
+        if ( lenMember != members.end() )
+        {
+          checkForError( lenMember->type.prefix.empty(),
+                         line,
+                         "member attribute <len> references a member of unexpected type <" + lenMember->type.compose( "VULKAN_HPP_NAMESPACE" ) + ">" );
+        }
+        if ( 1 < memberData.len.size() )
+        {
+          checkForError( ( memberData.len[1] == "1" ) || ( memberData.len[1] == "null-terminated" ),
+                         line,
+                         "member attribute <len> holds unknown second value <" + memberData.len[1] + ">" );
+        }
+      }
+    }
+    else if ( attribute.first == "noautovalidity" )
+    {
+      memberData.noAutoValidity = ( attribute.second == "true" );
+    }
+    else if ( attribute.first == "optional" )
+    {
+      std::vector<std::string> optional = tokenize( attribute.second, "," );
+      memberData.optional.reserve( optional.size() );
+      for ( auto const & o : optional )
+      {
+        memberData.optional.push_back( o == "true" );
+      }
+    }
+    else if ( attribute.first == "selection" )
+    {
+      checkForError( isUnion, line, "attribute <selection> is used with a non-union structure." );
+      memberData.selection = tokenize( attribute.second, "," );
+    }
+    else if ( attribute.first == "selector" )
+    {
+      memberData.selector = attribute.second;
+      auto selectorIt     = findStructMemberIt( memberData.selector, members );
+      checkForError( selectorIt != members.end(), line, "member attribute <selector> holds unknown value <" + memberData.selector + ">" );
+      checkForError( m_enums.find( selectorIt->type.type ) != m_enums.end(),
+                     line,
+                     "member attribute <selector> references unknown enum type <" + selectorIt->type.type + ">" );
+    }
+    else if ( attribute.first == "values" )
+    {
+      std::vector<std::string> values = tokenize( attribute.second, "," );
+      checkForError( values.size() == 1, line, "attribute \"values\" holds multiple values <" + attribute.first + ">, but it's expected to hold just one" );
+      memberData.value = values[0];
+    }
+  }
+
+  for ( auto child : children )
+  {
+    std::string value = child->Value();
+    if ( value == "enum" )
+    {
+      readTypesTypeStructMemberEnum( child, memberData );
+    }
+    else if ( value == "name" )
+    {
+      readTypesTypeStructMemberName( child, memberData, members );
+    }
+    else if ( value == "type" )
+    {
+      readTypesTypeStructMemberType( child, memberData );
+    }
+  }
+
+  members.push_back( memberData );
+}
+
+void VulkanHppGenerator::readTypesTypeStructMemberEnum( tinyxml2::XMLElement const * element, MemberData & memberData )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+  checkElements( line, getChildElements( element ), {}, {} );
+
+  std::string enumString = element->GetText();
+
+  checkForError( element->PreviousSibling() && ( strcmp( element->PreviousSibling()->Value(), "[" ) == 0 ) && element->NextSibling() &&
+                   ( strcmp( element->NextSibling()->Value(), "]" ) == 0 ),
+                 line,
+                 std::string( "structure member array specifiation is ill-formatted: <" ) + enumString + ">" );
+
+  memberData.arraySizes.push_back( enumString );
+  checkForError( memberData.usedConstant.empty(), line, "struct already holds a constant <" + memberData.usedConstant + ">" );
+  memberData.usedConstant = enumString;
+}
+
+void VulkanHppGenerator::readTypesTypeStructMemberName( tinyxml2::XMLElement const * element, MemberData & memberData, std::vector<MemberData> const & members )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+  checkElements( line, getChildElements( element ), {}, {} );
+
+  std::string name = element->GetText();
+  checkForError( !isStructMember( name, members ), line, "structure member name <" + name + "> already used" );
+
+  memberData.name                                        = name;
+  std::tie( memberData.arraySizes, memberData.bitCount ) = readModifiers( element->NextSibling() );
+}
+
+void VulkanHppGenerator::readTypesTypeStructMemberType( tinyxml2::XMLElement const * element, MemberData & memberData )
+{
+  int line = element->GetLineNum();
+  checkAttributes( line, getAttributes( element ), {}, {} );
+  checkElements( line, getChildElements( element ), {}, {} );
+
+  memberData.type = readTypeInfo( element );
+}
+
+VulkanHppGenerator::TypeInfo VulkanHppGenerator::readTypeInfo( tinyxml2::XMLElement const * element ) const
+{
+  TypeInfo                  typeInfo;
+  tinyxml2::XMLNode const * previousSibling = element->PreviousSibling();
+  if ( previousSibling && previousSibling->ToText() )
+  {
+    typeInfo.prefix = trim( previousSibling->Value() );
+  }
+  typeInfo.type                         = element->GetText();
+  tinyxml2::XMLNode const * nextSibling = element->NextSibling();
+  if ( nextSibling && nextSibling->ToText() )
+  {
+    typeInfo.postfix = trimStars( trimEnd( nextSibling->Value() ) );
+  }
+  return typeInfo;
+}
+
+void VulkanHppGenerator::registerDeleter( std::string const & name, std::pair<std::string, CommandData> const & commandData )
+{
+  if ( ( commandData.first.substr( 2, 7 ) == "Destroy" ) || ( commandData.first.substr( 2, 4 ) == "Free" ) )
+  {
+    std::string key;
+    size_t      valueIndex;
+    switch ( commandData.second.params.size() )
+    {
+      case 2:
+      case 3:
+        assert( commandData.second.params.back().type.type == "VkAllocationCallbacks" );
+        key        = ( commandData.second.params.size() == 2 ) ? "" : commandData.second.params[0].type.type;
+        valueIndex = commandData.second.params.size() - 2;
+        break;
+      case 4:
+        key        = commandData.second.params[0].type.type;
+        valueIndex = 3;
+        assert( m_handles.find( commandData.second.params[valueIndex].type.type ) != m_handles.end() );
+        m_handles.find( commandData.second.params[valueIndex].type.type )->second.deletePool = commandData.second.params[1].type.type;
+        break;
+      default: assert( false ); valueIndex = 0;
+    }
+    auto keyHandleIt = m_handles.find( key );
+    assert( keyHandleIt != m_handles.end() );
+    keyHandleIt->second.childrenHandles.insert( commandData.second.params[valueIndex].type.type );
+
+    auto handleIt = m_handles.find( commandData.second.params[valueIndex].type.type );
+    assert( handleIt != m_handles.end() );
+    handleIt->second.deleteCommand = name;
+  }
+}
+
+void VulkanHppGenerator::rescheduleRAIIHandle( std::string &                              str,
+                                               std::pair<std::string, HandleData> const & handle,
+                                               std::set<std::string> &                    listedHandles,
+                                               std::set<std::string> const &              specialFunctions ) const
+{
+  listedHandles.insert( handle.first );
+  if ( !handle.second.parent.empty() && ( listedHandles.find( handle.second.parent ) == listedHandles.end() ) )
+  {
+    auto parentIt = m_handles.find( handle.second.parent );
+    assert( parentIt != m_handles.end() );
+    str += generateRAIIHandle( *parentIt, listedHandles, specialFunctions );
+  }
+
+  for ( auto constructorIt : handle.second.constructorIts )
+  {
+    for ( auto const & param : constructorIt->second.params )
+    {
+      auto handleIt = m_handles.find( param.type.type );
+      if ( handleIt != m_handles.end() && ( listedHandles.find( param.type.type ) == listedHandles.end() ) )
+      {
+        str += generateRAIIHandle( *handleIt, listedHandles, specialFunctions );
+      }
+    }
+  }
+}
+
+std::vector<std::string> VulkanHppGenerator::selectCommandsByHandle( std::vector<RequireData> const & requireData,
+                                                                     std::set<std::string> const &    handleCommands,
+                                                                     std::set<std::string> &          listedCommands ) const
+{
+  std::vector<std::string> selectedCommands;
+  for ( auto const & require : requireData )
+  {
+    for ( auto const & command : require.commands )
+    {
+      if ( ( handleCommands.find( command ) != handleCommands.end() ) && listedCommands.insert( command ).second )
+      {
+        selectedCommands.push_back( command );
+      }
+    }
+  }
+  return selectedCommands;
+}
+
+void VulkanHppGenerator::setVulkanLicenseHeader( int line, std::string const & comment )
+{
+  checkForError( m_vulkanLicenseHeader.empty(), line, "second encounter of a Copyright comment" );
+  m_vulkanLicenseHeader = comment;
+
+  // replace any '\n' with "\n// "
+  for ( size_t pos = m_vulkanLicenseHeader.find( '\n' ); pos != std::string::npos; pos = m_vulkanLicenseHeader.find( '\n', pos + 1 ) )
+  {
+    m_vulkanLicenseHeader.replace( pos, 1, "\n// " );
+  }
+  // remove any trailing spaces
+  m_vulkanLicenseHeader = trimEnd( m_vulkanLicenseHeader );
+
+  // and add a little message on our own
+  m_vulkanLicenseHeader += "\n\n// This header is generated from the Khronos Vulkan XML API Registry.";
+  m_vulkanLicenseHeader = trim( m_vulkanLicenseHeader ) + "\n";
+}
+
+bool VulkanHppGenerator::skipLeadingGrandParent( std::pair<std::string, HandleData> const & handle ) const
+{
+  bool skip = false;
+  assert( !handle.second.constructorIts.empty() );
+  auto constructorIt = handle.second.constructorIts.begin();
+  if ( ( 1 < ( *constructorIt )->second.params.size() ) && isHandleType( ( *constructorIt )->second.params[0].type.type ) &&
+       ( ( *constructorIt )->second.params[1].type.type == handle.second.parent ) )
+  {
+    auto parentIt = m_handles.find( handle.second.parent );
+    assert( parentIt != m_handles.end() );
+    skip = ( ( *constructorIt )->second.params[0].type.type == parentIt->second.parent );
+#if !defined( NDEBUG )
+    for ( auto it = std::next( constructorIt ); it != handle.second.constructorIts.end(); ++it )
+    {
+      assert( ( *it )->second.params[0].type.type == ( *constructorIt )->second.params[0].type.type );
+      assert( !skip || ( ( *it )->second.params[1].type.type == ( *constructorIt )->second.params[1].type.type ) );
+    }
+#endif
+  }
+  return skip;
+}
+
+std::string VulkanHppGenerator::toString( TypeCategory category )
+{
+  switch ( category )
+  {
+    case TypeCategory::Bitmask: return "bitmask";
+    case TypeCategory::BaseType: return "basetype";
+    case TypeCategory::Define: return "define";
+    case TypeCategory::Enum: return "enum";
+    case TypeCategory::FuncPointer: return "funcpointer";
+    case TypeCategory::Handle: return "handle";
+    case TypeCategory::Requires: return "requires";
+    case TypeCategory::Struct: return "struct";
+    case TypeCategory::Union: return "union";
+    case TypeCategory::Unknown: return "unkown";
+    default: assert( false ); return "";
+  }
+}
+
+void VulkanHppGenerator::EnumData::addEnumAlias( int line, std::string const & name, std::string const & aliasName )
+{
+  auto aliasIt = aliases.find( name );
+  checkForError(
+    ( aliasIt == aliases.end() ) || ( aliasIt->second.name == aliasName ), line, "enum alias <" + name + "> already listed for a different enum value" );
+  aliases.insert( std::make_pair( name, EnumAliasData( aliasName, line ) ) );
+}
+
+void VulkanHppGenerator::EnumData::addEnumValue(
+  int line, std::string const & valueName, std::string const & protect, bool bitpos, std::string const & extension )
+{
+  auto valueIt = std::find_if( values.begin(), values.end(), [&valueName]( EnumValueData const & evd ) { return evd.name == valueName; } );
+  if ( valueIt == values.end() )
+  {
+    values.emplace_back( line, valueName, protect, extension, bitpos );
+  }
+}
+
+std::string VulkanHppGenerator::TypeInfo::compose( std::string const & nameSpace ) const
+{
+  return prefix + ( prefix.empty() ? "" : " " ) +
+         ( nameSpace.empty() ? type : ( ( ( type.substr( 0, 2 ) == "Vk" ) ? ( nameSpace + "::" ) : "" ) + stripPrefix( type, "Vk" ) ) ) +
+         ( postfix.empty() ? "" : " " ) + postfix;
+}
+
+VulkanHppGenerator::RequireData::RequireData( int line, std::vector<std::string> const & depends_ ) : depends( depends_ ), xmlLine( line ) {}
+
+//
+// VulkanHppGenerator local functions
+//
+
+// check the validity of an attributes map
+// line       : the line in the xml file where the attributes are listed
+// attributes : the map of name/value pairs of the encountered attributes
+// required   : the required attributes, with a set of allowed values per attribute
+// optional   : the optional attributes, with a set of allowed values per attribute
+void checkAttributes( int                                                  line,
+                      std::map<std::string, std::string> const &           attributes,
+                      std::map<std::string, std::set<std::string>> const & required,
+                      std::map<std::string, std::set<std::string>> const & optional )
+{
+  // check if all required attributes are included and if there is a set of allowed values, check if the actual
+  // value is part of that set
+  for ( auto const & r : required )
+  {
+    auto attributesIt = attributes.find( r.first );
+    checkForError( attributesIt != attributes.end(), line, "missing attribute <" + r.first + ">" );
+    if ( !r.second.empty() )
+    {
+      std::vector<std::string> values = tokenize( attributesIt->second, "," );
+      for ( auto const & v : values )
+      {
+        checkForError( r.second.find( v ) != r.second.end(), line, "unexpected attribute value <" + v + "> in attribute <" + attributesIt->first + ">" );
+      }
+    }
+  }
+  // check if all not required attributes or optional, and if there is a set of allowed values, check if the
+  // actual value is part of that set
+  for ( auto const & a : attributes )
+  {
+    if ( required.find( a.first ) == required.end() )
+    {
+      auto optionalIt = optional.find( a.first );
+      if ( optionalIt == optional.end() )
+      {
+        checkForWarning( false, line, "unknown attribute <" + a.first + ">" );
+        continue;
+      }
+      if ( !optionalIt->second.empty() )
+      {
+        std::vector<std::string> values = tokenize( a.second, "," );
+        for ( auto const & v : values )
+        {
+          checkForWarning(
+            optionalIt->second.find( v ) != optionalIt->second.end(), line, "unexpected attribute value <" + v + "> in attribute <" + a.first + ">" );
+        }
+      }
+    }
+  }
+}
+
+void checkElements( int                                               line,
+                    std::vector<tinyxml2::XMLElement const *> const & elements,
+                    std::map<std::string, bool> const &               required,
+                    std::set<std::string> const &                     optional )
+{
+  std::map<std::string, size_t> encountered;
+  for ( auto const & e : elements )
+  {
+    std::string value = e->Value();
+    encountered[value]++;
+    checkForWarning(
+      ( required.find( value ) != required.end() ) || ( optional.find( value ) != optional.end() ), e->GetLineNum(), "unknown element <" + value + ">" );
+  }
+  for ( auto const & r : required )
+  {
+    auto encounteredIt = encountered.find( r.first );
+    checkForError( encounteredIt != encountered.end(), line, "missing required element <" + r.first + ">" );
+    // check: r.second (means: required excactly once) => (encouteredIt->second == 1)
+    checkForError( !r.second || ( encounteredIt->second == 1 ),
+                   line,
+                   "required element <" + r.first + "> is supposed to be listed exactly once, but is listed " + std::to_string( encounteredIt->second ) );
+  }
+}
+
+void checkForError( bool condition, int line, std::string const & message )
+{
+  if ( !condition )
+  {
+    throw std::runtime_error( "VulkanHppGenerator: Spec error on line " + std::to_string( line ) + ": " + message );
+  }
+}
+
+void checkForWarning( bool condition, int line, std::string const & message )
+{
+  if ( !condition )
+  {
+    std::cerr << "VulkanHppGenerator: Spec warning on line " << std::to_string( line ) << ": " << message << "!" << std::endl;
+  }
+}
+
+std::string findTag( std::set<std::string> const & tags, std::string const & name, std::string const & postfix )
+{
+  auto tagIt = std::find_if( tags.begin(), tags.end(), [&name, &postfix]( std::string const & t ) { return name.ends_with( t + postfix ); } );
+  return ( tagIt != tags.end() ) ? *tagIt : "";
+}
+
+std::string generateCArraySizes( std::vector<std::string> const & sizes )
+{
+  std::string arraySizes;
+  for ( auto const & s : sizes )
+  {
+    arraySizes += "[" + s + "]";
+  }
+  return arraySizes;
+}
+
+std::pair<std::string, std::string> generateEnumSuffixes( std::string const & name, bool bitmask, std::set<std::string> const & tags )
+{
+  std::string prefix, postfix;
+  if ( name == "VkResult" )
+  {
+    prefix = "VK_";
+  }
+  else
+  {
+    if ( bitmask )
+    {
+      // for a bitmask enum, start with "VK", cut off the trailing "FlagBits", and convert that name to upper case
+      // end that with "Bit"
+      size_t pos = name.find( "FlagBits" );
+      assert( pos != std::string::npos );
+      std::string shortenedName = name;
+      shortenedName.erase( pos, strlen( "FlagBits" ) );
+      std::string tag = findTag( tags, shortenedName );
+      prefix          = toUpperCase( stripPostfix( shortenedName, tag ) ) + "_";
+    }
+    else
+    {
+      // for a non-bitmask enum, convert the name to upper case
+      prefix = toUpperCase( name ) + "_";
+    }
+
+    // if the enum name contains a tag move it from the prefix to the postfix to generate correct enum value
+    // names.
+    for ( auto const & tag : tags )
+    {
+      if ( prefix.ends_with( tag + "_" ) )
+      {
+        prefix.erase( prefix.length() - tag.length() - 1 );
+        postfix = "_" + tag;
+        break;
+      }
+      else if ( name.ends_with( tag ) )
+      {
+        postfix = "_" + tag;
+        break;
+      }
+    }
+  }
+
+  return std::make_pair( prefix, postfix );
+}
+
+std::string generateEnumValueName( std::string const & enumName, std::string const & valueName, bool bitmask, std::set<std::string> const & tags )
+{
+  std::string prefix, postfix;
+  std::tie( prefix, postfix ) = generateEnumSuffixes( enumName, bitmask, tags );
+  std::string tag             = findTag( tags, valueName, "" );
+  if ( postfix == "_" + tag )
+  {
+    tag = findTag( tags, valueName, postfix );
+  }
+
+  std::string result = "e" + toCamelCase( stripPostfix( stripPrefix( valueName, prefix ), postfix ) );
+  if ( bitmask )
+  {
+    size_t pos = result.find( "Bit" );
+    if ( pos != std::string::npos )
+    {
+      result.erase( pos, 3 );
+    }
+  }
+  if ( !tag.empty() && ( result.substr( result.length() - tag.length() ) == toCamelCase( tag ) ) )
+  {
+    result = result.substr( 0, result.length() - tag.length() ) + tag;
+  }
+  return result;
+}
+
+std::string generateNamespacedType( std::string const & type )
+{
+  return type.starts_with( "Vk" ) ? ( "VULKAN_HPP_NAMESPACE::" + stripPrefix( type, "Vk" ) ) : type;
+}
+
+std::string generateNoDiscard( bool returnsSomething, bool multiSuccessCodes, bool multiErrorCodes )
+{
+  return ( returnsSomething || multiSuccessCodes ) ? "VULKAN_HPP_NODISCARD " : ( multiErrorCodes ? "VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS " : "" );
+}
+
+std::string generateStandardArray( std::string const & type, std::vector<std::string> const & sizes )
+{
+  std::string arrayString = "std::array<" + type + "," + sizes.back() + ">";
+  for ( size_t i = sizes.size() - 2; i < sizes.size(); i-- )
+  {
+    arrayString = "std::array<" + arrayString + "," + sizes[i] + ">";
+  }
+  return arrayString;
+}
+
+std::string generateStandardArrayWrapper( std::string const & type, std::vector<std::string> const & sizes )
+{
+  std::string arrayString = "VULKAN_HPP_NAMESPACE::ArrayWrapper" + std::to_string( sizes.size() ) + "D<" + type;
+  for ( auto const & size : sizes )
+  {
+    arrayString += ", " + size;
+  }
+  arrayString += ">";
+  return arrayString;
+}
+
+std::string generateSuccessCode( std::string const & code, std::set<std::string> const & tags )
+{
+  std::string tag = findTag( tags, code );
+  // on each success code: prepend 'VULKAN_HPP_NAMESPACE::Result::e', strip "VK_" and a tag, convert it to camel
+  // case, and add the tag again
+  return "VULKAN_HPP_NAMESPACE::Result::e" + toCamelCase( stripPostfix( stripPrefix( code, "VK_" ), tag ) ) + tag;
+}
+
+std::map<std::string, std::string> getAttributes( tinyxml2::XMLElement const * element )
+{
+  std::map<std::string, std::string> attributes;
+  for ( auto attribute = element->FirstAttribute(); attribute; attribute = attribute->Next() )
+  {
+    assert( attributes.find( attribute->Name() ) == attributes.end() );
+    attributes[attribute->Name()] = attribute->Value();
+  }
+  return attributes;
+}
+
+template <typename ElementContainer>
+std::vector<tinyxml2::XMLElement const *> getChildElements( ElementContainer const * element )
+{
+  std::vector<tinyxml2::XMLElement const *> childElements;
+  for ( tinyxml2::XMLElement const * childElement = element->FirstChildElement(); childElement; childElement = childElement->NextSiblingElement() )
+  {
+    childElements.push_back( childElement );
+  }
+  return childElements;
+}
+
+std::pair<std::vector<std::string>, std::string> readModifiers( tinyxml2::XMLNode const * node )
+{
+  std::vector<std::string> arraySizes;
+  std::string              bitCount;
+  if ( node && node->ToText() )
+  {
+    // following the name there might be some array size
+    std::string value = node->Value();
+    assert( !value.empty() );
+    if ( value[0] == '[' )
+    {
+      std::string::size_type endPos = 0;
+      while ( endPos + 1 != value.length() )
+      {
+        std::string::size_type startPos = value.find( '[', endPos );
+        checkForError( startPos != std::string::npos, node->GetLineNum(), "could not find '[' in <" + value + ">" );
+        endPos = value.find( ']', startPos );
+        checkForError( endPos != std::string::npos, node->GetLineNum(), "could not find ']' in <" + value + ">" );
+        checkForError( startPos + 2 <= endPos, node->GetLineNum(), "missing content between '[' and ']' in <" + value + ">" );
+        arraySizes.push_back( value.substr( startPos + 1, endPos - startPos - 1 ) );
+      }
+    }
+    else if ( value[0] == ':' )
+    {
+      bitCount = value.substr( 1 );
+    }
+    else
+    {
+      checkForError( ( value[0] == ';' ) || ( value[0] == ')' ), node->GetLineNum(), "unknown modifier <" + value + ">" );
+    }
+  }
+  return std::make_pair( arraySizes, bitCount );
+}
+
+std::string readSnippet( std::string const & snippetFile )
+{
+  std::ifstream ifs( std::string( BASE_PATH ) + "/snippets/" + snippetFile );
+  assert( !ifs.fail() );
+  std::ostringstream oss;
+  oss << ifs.rdbuf();
+  return oss.str();
+}
+
+std::string replaceWithMap( std::string const & input, std::map<std::string, std::string> replacements )
+{
+  // This will match ${someVariable} and contain someVariable in match group 1
+  std::regex re( R"(\$\{([^\}]+)\})" );
+  auto       it  = std::sregex_iterator( input.begin(), input.end(), re );
+  auto       end = std::sregex_iterator();
+
+  // No match, just return the original string
+  if ( it == end )
+  {
+    assert( replacements.empty() );
+    return input;
+  }
+
+#if !defined( NDEBUG )
+  std::set<std::string> matchedReplacements;
+#endif
+
+  std::string result = "";
+  while ( it != end )
+  {
+    std::smatch match         = *it;
+    auto        itReplacement = replacements.find( match[1].str() );
+    assert( itReplacement != replacements.end() );
+#if !defined( NDEBUG )
+    matchedReplacements.insert( match[1].str() );
+#endif
+
+    result += match.prefix().str() + ( ( itReplacement != replacements.end() ) ? itReplacement->second : match[0].str() );
+    ++it;
+
+    // we've passed the last match. Append the rest of the orignal string
+    if ( it == end )
+    {
+      result += match.suffix().str();
+    }
+  }
+#if !defined( NDEBUG )
+  std::set<std::string> missedReplacements;
+  for ( auto r : replacements )
+  {
+    if ( matchedReplacements.find( r.first ) == matchedReplacements.end() )
+    {
+      missedReplacements.insert( r.first );
+    }
+  }
+  assert( missedReplacements.empty() );
+#endif
+  return result;
+}
+
+std::string startLowerCase( std::string const & input )
+{
+  assert( !input.empty() );
+  return static_cast<char>( tolower( input[0] ) ) + input.substr( 1 );
+}
+
+std::string startUpperCase( std::string const & input )
+{
+  assert( !input.empty() );
+  return static_cast<char>( toupper( input[0] ) ) + input.substr( 1 );
+}
+
+std::string stripPostfix( std::string const & value, std::string const & postfix )
+{
+  std::string strippedValue = value;
+  if ( strippedValue.ends_with( postfix ) )
+  {
+    strippedValue.erase( strippedValue.length() - postfix.length() );
+  }
+  return strippedValue;
+}
+
+std::string stripPluralS( std::string const & name, std::set<std::string> const & tags )
+{
+  std::string strippedName = name;
+  std::string tag          = findTag( tags, name );
+  if ( strippedName.ends_with( "s" + tag ) )
+  {
+    size_t pos = strippedName.rfind( 's' );
+    if ( ( 2 <= pos ) && ( strippedName.substr( pos - 2, 3 ) == "ies" ) )
+    {
+      strippedName.replace( pos - 2, 3, "y" );
+    }
+    else
+    {
+      strippedName.erase( pos, 1 );
+    }
+  }
+  return strippedName;
+}
+
+std::string stripPrefix( std::string const & value, std::string const & prefix )
+{
+  std::string strippedValue = value;
+  if ( strippedValue.starts_with( prefix ) )
+  {
+    strippedValue.erase( 0, prefix.length() );
+  }
+  return strippedValue;
+}
+
+std::string toCamelCase( std::string const & value )
+{
+  assert( !value.empty() && ( isupper( value[0] ) || isdigit( value[0] ) ) );
+  std::string result;
+  result.reserve( value.size() );
+  bool keepUpper = true;
+  for ( auto c : value )
+  {
+    if ( c == '_' )
+    {
+      keepUpper = true;
+    }
+    else if ( isdigit( c ) )
+    {
+      keepUpper = true;
+      result.push_back( c );
+    }
+    else if ( keepUpper )
+    {
+      result.push_back( c );
+      keepUpper = false;
+    }
+    else
+    {
+      result.push_back( static_cast<char>( tolower( c ) ) );
+    }
+  }
+  return result;
+}
+
+std::string toUpperCase( std::string const & name )
+{
+  std::string convertedName;
+  bool        previousIsLowerCase = false;
+  bool        previousIsDigit     = false;
+  for ( auto c : name )
+  {
+    if ( ( isupper( c ) && ( previousIsLowerCase || previousIsDigit ) ) || ( isdigit( c ) && previousIsLowerCase ) )
+    {
+      convertedName.push_back( '_' );
+    }
+    convertedName.push_back( static_cast<char>( toupper( c ) ) );
+    previousIsLowerCase = !!islower( c );
+    previousIsDigit     = !!isdigit( c );
+  }
+  return convertedName;
+}
+
+std::vector<std::string> tokenize( std::string const & tokenString, std::string const & separator )
+{
+  std::vector<std::string> tokens;
+  if ( !tokenString.empty() )
+  {
+    size_t start = 0, end;
+    do
+    {
+      end = tokenString.find( separator, start );
+      if ( start != end )
+      {
+        tokens.push_back( trim( tokenString.substr( start, end - start ) ) );
+      }
+      start = end + separator.length();
+    } while ( end != std::string::npos );
+  }
+  return tokens;
+}
+
+std::vector<std::string> tokenizeAny( std::string const & tokenString, std::string const & separators )
+{
+  std::vector<std::string> tokens;
+  if ( !tokenString.empty() )
+  {
+    size_t start = 0, end;
+    do
+    {
+      end = tokenString.find_first_of( separators, start );
+      if ( start != end )
+      {
+        tokens.push_back( trim( tokenString.substr( start, end - start ) ) );
+      }
+      start = end + 1;
+    } while ( end != std::string::npos );
+  }
+  return tokens;
+}
+
+std::string trim( std::string const & input )
+{
+  std::string result = input;
+  result.erase( result.begin(), std::find_if( result.begin(), result.end(), []( char c ) { return !std::isspace( c ); } ) );
+  result.erase( std::find_if( result.rbegin(), result.rend(), []( char c ) { return !std::isspace( c ); } ).base(), result.end() );
+  return result;
+}
+
+std::string trimEnd( std::string const & input )
+{
+  std::string result = input;
+  result.erase( std::find_if( result.rbegin(), result.rend(), []( char c ) { return !std::isspace( c ); } ).base(), result.end() );
+  return result;
+}
+
+std::string trimStars( std::string const & input )
+{
+  std::string result = input;
+  size_t      pos    = result.find( '*' );
+  while ( pos != std::string::npos )
+  {
+    if ( ( 0 < pos ) && ( result[pos - 1] != ' ' ) && ( result[pos - 1] != '*' ) )
+    {
+      result.insert( pos, 1, ' ' );
+      ++pos;
+    }
+    else if ( ( pos < result.length() - 1 ) && ( result[pos + 1] != ' ' ) && ( result[pos + 1] != '*' ) )
+    {
+      result.insert( pos + 1, 1, ' ' );
+    }
+    pos = result.find( '*', pos + 1 );
+  }
+  return result;
+}
+
+void writeToFile( std::string const & str, std::string const & fileName )
+{
+  std::ofstream ofs( fileName );
+  assert( !ofs.fail() );
+  ofs << str;
+  ofs.close();
+
+#if defined( CLANG_FORMAT_EXECUTABLE )
+  std::cout << "VulkanHppGenerator: Formatting " << fileName << " ..." << std::endl;
+  std::string commandString = "\"" CLANG_FORMAT_EXECUTABLE "\" -i --style=file " + fileName;
+  int         ret           = std::system( commandString.c_str() );
+  if ( ret != 0 )
+  {
+    std::cout << "VulkanHppGenerator: failed to format file " << fileName << " with error <" << ret << ">\n";
+  }
+#endif
+}
+
+std::string toString( tinyxml2::XMLError error )
+{
+  switch ( error )
+  {
+    case tinyxml2::XML_SUCCESS: return "XML_SUCCESS";
+    case tinyxml2::XML_NO_ATTRIBUTE: return "XML_NO_ATTRIBUTE";
+    case tinyxml2::XML_WRONG_ATTRIBUTE_TYPE: return "XML_WRONG_ATTRIBUTE_TYPE";
+    case tinyxml2::XML_ERROR_FILE_NOT_FOUND: return "XML_ERROR_FILE_NOT_FOUND";
+    case tinyxml2::XML_ERROR_FILE_COULD_NOT_BE_OPENED: return "XML_ERROR_FILE_COULD_NOT_BE_OPENED";
+    case tinyxml2::XML_ERROR_FILE_READ_ERROR: return "XML_ERROR_FILE_READ_ERROR";
+    case tinyxml2::XML_ERROR_PARSING_ELEMENT: return "XML_ERROR_PARSING_ELEMENT";
+    case tinyxml2::XML_ERROR_PARSING_ATTRIBUTE: return "XML_ERROR_PARSING_ATTRIBUTE";
+    case tinyxml2::XML_ERROR_PARSING_TEXT: return "XML_ERROR_PARSING_TEXT";
+    case tinyxml2::XML_ERROR_PARSING_CDATA: return "XML_ERROR_PARSING_CDATA";
+    case tinyxml2::XML_ERROR_PARSING_COMMENT: return "XML_ERROR_PARSING_COMMENT";
+    case tinyxml2::XML_ERROR_PARSING_DECLARATION: return "XML_ERROR_PARSING_DECLARATION";
+    case tinyxml2::XML_ERROR_PARSING_UNKNOWN: return "XML_ERROR_PARSING_UNKNOWN";
+    case tinyxml2::XML_ERROR_EMPTY_DOCUMENT: return "XML_ERROR_EMPTY_DOCUMENT";
+    case tinyxml2::XML_ERROR_MISMATCHED_ELEMENT: return "XML_ERROR_MISMATCHED_ELEMENT";
+    case tinyxml2::XML_ERROR_PARSING: return "XML_ERROR_PARSING";
+    case tinyxml2::XML_CAN_NOT_CONVERT_TEXT: return "XML_CAN_NOT_CONVERT_TEXT";
+    case tinyxml2::XML_NO_TEXT_NODE: return "XML_NO_TEXT_NODE";
+    default: return "unknown error code <" + std::to_string( error ) + ">";
+  }
+}
+
+int main( int argc, char ** argv )
+{
+  try
+  {
+    tinyxml2::XMLDocument doc;
+
+    std::string filename = ( argc == 1 ) ? VK_SPEC : argv[1];
+
+#if defined( CLANG_FORMAT_EXECUTABLE )
+    std::cout << "VulkanHppGenerator: Found ";
+    std::string commandString = "\"" CLANG_FORMAT_EXECUTABLE "\" --version ";
+    int         ret           = std::system( commandString.c_str() );
+    if ( ret != 0 )
+    {
+      std::cout << "VulkanHppGenerator: failed to determine clang_format version with error <" << ret << ">\n";
+    }
+#endif
+
+    std::cout << "VulkanHppGenerator: Loading " << filename << std::endl;
+    tinyxml2::XMLError error = doc.LoadFile( filename.c_str() );
+    if ( error != tinyxml2::XML_SUCCESS )
+    {
+      std::cout << "VulkanHppGenerator: failed to load file " << filename << " with error <" << toString( error ) << ">" << std::endl;
+      return -1;
+    }
+
+    std::cout << "VulkanHppGenerator: Parsing " << filename << std::endl;
+    VulkanHppGenerator generator( doc );
+
+    generator.generateVulkanHppFile();
+    generator.generateVulkanEnumsHppFile();
+    generator.generateVulkanFormatTraitsHppFile();
+    generator.prepareVulkanFuncs();
+    generator.generateVulkanFuncsHppFile();
+    generator.generateVulkanHandlesHppFile();
+    generator.generateVulkanHashHppFile();
+    generator.prepareRAIIHandles();
+    generator.generateVulkanRAIIHppFile();
+    generator.generateVulkanStaticAssertionsHppFile();
+    generator.generateVulkanStructsHppFile();
+    generator.generateVulkanToStringHppFile();
+
+#if !defined( CLANG_FORMAT_EXECUTABLE )
+    std::cout << "VulkanHppGenerator: could not find clang-format. The generated files will not be formatted accordingly.\n";
+#endif
+  }
+  catch ( std::exception const & e )
+  {
+    std::cout << "caught exception: " << e.what() << std::endl;
+    return -1;
+  }
+  catch ( ... )
+  {
+    std::cout << "caught unknown exception" << std::endl;
+    return -1;
+  }
+}
diff --git a/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.hpp b/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.hpp
new file mode 100644
index 0000000..3f0e232
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan-hpp/VulkanHppGenerator.hpp
@@ -0,0 +1,1117 @@
+// Copyright(c) 2015-2019, NVIDIA CORPORATION. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <iostream>
+#include <map>
+#include <set>
+#include <tinyxml2.h>
+#include <vector>
+
+const size_t INVALID_INDEX = (size_t)~0;
+
+template <typename BitType>
+class Flags
+{
+public:
+  using MaskType = typename std::underlying_type<BitType>::type;
+
+  constexpr Flags() noexcept : m_mask( 0 ) {}
+
+  constexpr Flags( BitType bit ) noexcept : m_mask( static_cast<MaskType>( bit ) ) {}
+
+  constexpr explicit Flags( MaskType flags ) noexcept : m_mask( flags ) {}
+
+  constexpr bool operator!() const noexcept
+  {
+    return !m_mask;
+  }
+
+  constexpr bool operator&( BitType const & rhs ) const noexcept
+  {
+    return m_mask & static_cast<MaskType>( rhs );
+  }
+
+  constexpr Flags<BitType> operator&( Flags<BitType> const & rhs ) const noexcept
+  {
+    return Flags<BitType>( m_mask & rhs.m_mask );
+  }
+
+  constexpr Flags<BitType> operator|( Flags<BitType> const & rhs ) const noexcept
+  {
+    return Flags<BitType>( m_mask | rhs.m_mask );
+  }
+
+private:
+  MaskType m_mask;
+};
+
+enum class CommandFlavourFlagBits : uint8_t
+{
+  chained       = 1 << 0,
+  singular      = 1 << 1,
+  unique        = 1 << 2,
+  withAllocator = 1 << 3
+};
+using CommandFlavourFlags = Flags<CommandFlavourFlagBits>;
+
+constexpr CommandFlavourFlags operator|( CommandFlavourFlagBits const & lhs, CommandFlavourFlagBits const & rhs ) noexcept
+{
+  return CommandFlavourFlags( lhs ) | CommandFlavourFlags( rhs );
+}
+
+class VulkanHppGenerator
+{
+public:
+  VulkanHppGenerator( tinyxml2::XMLDocument const & document );
+
+  void generateVulkanEnumsHppFile() const;
+  void generateVulkanFormatTraitsHppFile() const;
+  void generateVulkanFuncsHppFile() const;
+  void generateVulkanHandlesHppFile() const;
+  void generateVulkanHashHppFile() const;
+  void generateVulkanHppFile() const;
+  void generateVulkanRAIIHppFile() const;
+  void generateVulkanStaticAssertionsHppFile() const;
+  void generateVulkanStructsHppFile() const;
+  void generateVulkanToStringHppFile() const;
+  void prepareRAIIHandles();
+  void prepareVulkanFuncs();
+
+private:
+  struct TypeInfo
+  {
+    std::string compose( std::string const & nameSpace ) const;
+
+    bool operator==( TypeInfo const & rhs ) const
+    {
+      return ( prefix == rhs.prefix ) && ( type == rhs.type ) && ( postfix == rhs.postfix );
+    }
+
+    bool operator!=( TypeInfo const & rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    bool operator<( TypeInfo const & rhs ) const
+    {
+      return ( prefix < rhs.prefix ) || ( ( prefix == rhs.prefix ) && ( ( type < rhs.type ) || ( ( type == rhs.type ) && ( postfix < rhs.postfix ) ) ) );
+    }
+
+    bool isConstPointer() const
+    {
+      return ( prefix.find( "const" ) != std::string::npos ) && ( postfix.find( '*' ) != std::string::npos );
+    }
+
+    bool isNonConstPointer() const
+    {
+      return ( prefix.find( "const" ) == std::string::npos ) && ( postfix.find( '*' ) != std::string::npos );
+    }
+
+    bool isValue() const
+    {
+      return ( ( prefix.find( '*' ) == std::string::npos ) && ( postfix.find( '*' ) == std::string::npos ) );
+    }
+
+    std::string prefix;
+    std::string type;
+    std::string postfix;
+  };
+
+  struct BaseTypeData
+  {
+    BaseTypeData( TypeInfo const & typeInfo_, int line ) : typeInfo( typeInfo_ ), xmlLine( line ) {}
+
+    TypeInfo typeInfo;
+    int      xmlLine;
+  };
+
+  struct BitmaskData
+  {
+    BitmaskData( std::string const & r, std::string const & t, int line ) : requirements( r ), type( t ), xmlLine( line ) {}
+
+    std::string requirements;
+    std::string type;
+    std::string alias;
+    int         xmlLine;
+  };
+
+  struct NameData
+  {
+    std::string              name;
+    std::vector<std::string> arraySizes;
+  };
+
+  struct ParamData
+  {
+    ParamData( int line ) : optional( false ), xmlLine( line ) {}
+
+    TypeInfo                 type;
+    std::string              name;
+    std::vector<std::string> arraySizes;
+    std::string              len;
+    bool                     optional;
+    std::string              stride;
+    int                      xmlLine;
+  };
+
+  struct CommandData
+  {
+    CommandData( int line ) : xmlLine( line ) {}
+
+    std::string              alias;
+    std::vector<std::string> errorCodes;
+    std::string              handle;
+    std::vector<ParamData>   params;
+    std::string              referencedIn;
+    std::string              returnType;
+    std::vector<std::string> successCodes;
+    int                      xmlLine;
+  };
+
+  struct DefineData
+  {
+    DefineData( std::string const & require_, int line ) : require( require_ ), xmlLine( line ) {}
+
+    std::string require;
+    int         xmlLine;
+  };
+
+  struct EnumAliasData
+  {
+    EnumAliasData( std::string const & name_, int line ) : name( name_ ), xmlLine( line ) {}
+
+    std::string name;
+    int         xmlLine;
+  };
+
+  struct EnumValueData
+  {
+    EnumValueData( int line, std::string const & name_, std::string const & protect_, std::string const & extension_, bool singleBit_ )
+      : name( name_ ), extension( extension_ ), protect( protect_ ), singleBit( singleBit_ ), xmlLine( line )
+    {
+    }
+
+    std::string name;
+    std::string extension;
+    std::string protect;
+    bool        singleBit;
+    int         xmlLine;
+  };
+
+  struct EnumData
+  {
+    void addEnumAlias( int line, std::string const & name, std::string const & alias );
+    void addEnumValue( int line, std::string const & valueName, std::string const & protect, bool bitpos, std::string const & extension );
+
+    std::string                          alias     = {};  // alias for this enum
+    std::map<std::string, EnumAliasData> aliases   = {};  // aliases for the values
+    std::string                          bitwidth  = {};
+    bool                                 isBitmask = false;
+    std::vector<EnumValueData>           values    = {};
+    int                                  xmlLine   = 0;
+  };
+
+  struct RequireData
+  {
+    RequireData( int line, std::vector<std::string> const & depends_ );
+
+    std::vector<std::string> depends;
+    std::vector<std::string> commands;
+    std::vector<std::string> types;
+    int                      xmlLine;
+  };
+
+  struct FeatureData
+  {
+    FeatureData( std::string const & number_ ) : number( number_ ) {}
+
+    std::string              number;
+    std::vector<RequireData> requireData;
+  };
+
+  struct ExtensionData
+  {
+    ExtensionData( int                 line,
+                   std::string const & deprecatedBy_,
+                   std::string const & number_,
+                   std::string const & obsoletedBy_,
+                   std::string const & platform_,
+                   std::string const & promotedTo_ )
+      : deprecatedBy( deprecatedBy_ ), number( number_ ), obsoletedBy( obsoletedBy_ ), platform( platform_ ), promotedTo( promotedTo_ ), xmlLine( line )
+    {
+    }
+
+    std::string              deprecatedBy;
+    std::string              number;
+    std::string              obsoletedBy;
+    std::string              platform;
+    std::string              promotedTo;
+    std::set<std::string>    depends;
+    std::vector<RequireData> requireData;
+    int                      xmlLine;
+  };
+
+  struct SkippedExtensionData
+  {
+    SkippedExtensionData( int line, std::string const & platform_ ) : platform( platform_ ), xmlLine( line ) {}
+
+    std::string platform;
+    int         xmlLine;
+  };
+
+  struct ComponentData
+  {
+    ComponentData( int line ) : xmlLine( line ) {}
+
+    std::string bits;
+    std::string name;
+    std::string numericFormat;
+    std::string planeIndex;
+    int         xmlLine;
+  };
+
+  struct PlaneData
+  {
+    PlaneData( int line ) : xmlLine( line ) {}
+
+    std::string compatible;
+    std::string heightDivisor;
+    std::string widthDivisor;
+    int         xmlLine;
+  };
+
+  struct FormatData
+  {
+    FormatData( int line ) : xmlLine( line ) {}
+
+    std::string                blockExtent;
+    std::string                blockSize;
+    std::string                chroma;
+    std::string                classAttribute;
+    std::vector<ComponentData> components;
+    std::string                compressed;
+    std::string                packed;
+    std::vector<PlaneData>     planes;
+    std::string                spirvImageFormat;
+    std::string                texelsPerBlock;
+    int                        xmlLine;
+  };
+
+  struct FuncPointerArgumentData
+  {
+    FuncPointerArgumentData( std::string const & t, int line ) : type( t ), xmlLine( line ) {}
+
+    std::string type;
+    int         xmlLine;
+  };
+
+  struct FuncPointerData
+  {
+    FuncPointerData( std::string const & r, int line ) : requirements( r ), xmlLine( line ) {}
+
+    std::vector<FuncPointerArgumentData> arguments;
+    std::string                          requirements;
+    int                                  xmlLine;
+  };
+
+  struct HandleData
+  {
+    HandleData( std::string const & p, std::string const & objType, bool isDispatchable, int line )
+      : objTypeEnum( objType ), parent( p ), isDispatchable( isDispatchable ), xmlLine( line )
+    {
+    }
+
+    std::string           alias;
+    std::set<std::string> childrenHandles;
+    std::set<std::string> commands;
+    std::string           deleteCommand;
+    std::string           deletePool;
+    std::string           objTypeEnum;
+    std::string           parent;
+    std::set<std::string> secondLevelCommands;
+    bool                  isDispatchable;
+    int                   xmlLine;
+
+    // RAII data
+    std::map<std::string, CommandData>::const_iterator              destructorIt;
+    std::vector<std::map<std::string, CommandData>::const_iterator> constructorIts;
+  };
+
+  struct MemberData
+  {
+    MemberData( int line ) : xmlLine( line ) {}
+
+    TypeInfo                 type;
+    std::string              name;
+    std::vector<std::string> arraySizes;
+    std::string              bitCount;
+    std::vector<std::string> len;
+    bool                     noAutoValidity = false;
+    std::vector<bool>        optional;
+    std::vector<std::string> selection;
+    std::string              selector;
+    std::string              value;
+    std::string              usedConstant;
+    int                      xmlLine;
+  };
+
+  struct PlatformData
+  {
+    PlatformData( std::string const & protect_ ) : protect( protect_ ) {}
+
+    std::string protect;
+  };
+
+  struct StructureAliasData
+  {
+    StructureAliasData( std::string const & alias_, int line ) : alias( alias_ ), xmlLine( line ) {}
+
+    std::string alias;
+    int         xmlLine;
+  };
+
+  struct StructureData
+  {
+    StructureData( std::vector<std::string> const & extends, int line ) : structExtends( extends ), xmlLine( line ) {}
+
+    bool                     allowDuplicate      = false;
+    bool                     isUnion             = false;
+    bool                     returnedOnly        = false;
+    bool                     mutualExclusiveLens = false;
+    std::vector<MemberData>  members;
+    std::vector<std::string> structExtends;
+    std::string              subStruct;
+    int                      xmlLine;
+  };
+
+  enum class TypeCategory
+  {
+    Bitmask,
+    BaseType,
+    Define,
+    Enum,
+    FuncPointer,
+    Handle,
+    Requires,
+    Struct,
+    Union,
+    Unknown
+  };
+
+  struct TypeData
+  {
+    TypeCategory category     = TypeCategory::Unknown;
+    std::string  referencedIn = {};
+  };
+
+  struct VectorParamData
+  {
+    size_t lenParam    = INVALID_INDEX;
+    size_t strideParam = INVALID_INDEX;
+  };
+
+private:
+  void        addCommand( std::string const & name, CommandData & commandData );
+  void        addMissingFlagBits( std::vector<RequireData> & requireData, std::string const & referencedIn );
+  std::string addTitleAndProtection( std::string const & title, std::string const & strIf, std::string const & strElse = {} ) const;
+  bool        allVectorSizesSupported( std::vector<ParamData> const & params, std::map<size_t, VectorParamData> const & vectorParams ) const;
+  void        appendDispatchLoaderDynamicCommands( std::vector<RequireData> const & requireData,
+                                                   std::set<std::string> &          listedCommands,
+                                                   std::string const &              title,
+                                                   std::string &                    commandMembers,
+                                                   std::string &                    initialCommandAssignments,
+                                                   std::string &                    instanceCommandAssignments,
+                                                   std::string &                    deviceCommandAssignments ) const;
+  void        appendRAIIDispatcherCommands( std::vector<RequireData> const & requireData,
+                                            std::set<std::string> &          listedCommands,
+                                            std::string const &              title,
+                                            std::string &                    contextInitializers,
+                                            std::string &                    contextMembers,
+                                            std::string &                    deviceAssignments,
+                                            std::string &                    deviceMembers,
+                                            std::string &                    instanceAssignments,
+                                            std::string &                    instanceMembers ) const;
+  void        checkBitmaskCorrectness() const;
+  void        checkCommandCorrectness() const;
+  void        checkCorrectness() const;
+  void        checkDefineCorrectness() const;
+  void        checkEnumCorrectness() const;
+  void        checkEnumCorrectness( std::vector<RequireData> const & requireData ) const;
+  bool        checkEquivalentSingularConstructor( std::vector<std::map<std::string, CommandData>::const_iterator> const & constructorIts,
+                                                  std::map<std::string, CommandData>::const_iterator                      constructorIt,
+                                                  std::vector<ParamData>::const_iterator                                  lenIt ) const;
+  void        checkExtensionCorrectness() const;
+  void        checkFuncPointerCorrectness() const;
+  void        checkHandleCorrectness() const;
+  void        checkStructCorrectness() const;
+  void checkStructMemberCorrectness( std::string const & structureName, std::vector<MemberData> const & members, std::set<std::string> & sTypeValues ) const;
+  std::string              combineDataTypes( std::map<size_t, VectorParamData> const & vectorParams,
+                                             std::vector<size_t> const &               returnParams,
+                                             bool                                      enumerating,
+                                             std::vector<std::string> const &          dataTypes,
+                                             CommandFlavourFlags                       flavourFlags,
+                                             bool                                      raii ) const;
+  bool                     containsArray( std::string const & type ) const;
+  bool                     containsFuncPointer( std::string const & type ) const;
+  bool                     containsFloatingPoints( std::vector<MemberData> const & members ) const;
+  bool                     containsUnion( std::string const & type ) const;
+  std::vector<size_t>      determineConstPointerParams( std::vector<ParamData> const & params ) const;
+  std::vector<std::string> determineDataTypes( std::vector<VulkanHppGenerator::ParamData> const & params,
+                                               std::map<size_t, VectorParamData> const &          vectorParams,
+                                               std::vector<size_t> const &                        returnParams,
+                                               std::set<size_t> const &                           templatedParams ) const;
+  size_t                   determineDefaultStartIndex( std::vector<ParamData> const & params, std::set<size_t> const & skippedParams ) const;
+  bool                     determineEnumeration( std::map<size_t, VectorParamData> const & vectorParams, std::vector<size_t> const & returnParams ) const;
+  size_t                   determineInitialSkipCount( std::string const & command ) const;
+  std::vector<size_t>      determineReturnParams( std::vector<ParamData> const & params ) const;
+  std::vector<std::map<std::string, CommandData>::const_iterator>
+    determineRAIIHandleConstructors( std::string const & handleType, std::map<std::string, CommandData>::const_iterator destructorIt ) const;
+  std::map<std::string, CommandData>::const_iterator determineRAIIHandleDestructor( std::string const & handleType ) const;
+  std::set<size_t>                        determineSingularParams( size_t returnParam, std::map<size_t, VectorParamData> const & vectorParams ) const;
+  std::set<size_t>                        determineSkippedParams( std::vector<ParamData> const &            params,
+                                                                  size_t                                    initialSkipCount,
+                                                                  std::map<size_t, VectorParamData> const & vectorParams,
+                                                                  std::vector<size_t> const &               returnParam,
+                                                                  bool                                      singular ) const;
+  std::string                             determineSubStruct( std::pair<std::string, StructureData> const & structure ) const;
+  std::map<size_t, VectorParamData>       determineVectorParams( std::vector<ParamData> const & params ) const;
+  std::set<size_t>                        determineVoidPointerParams( std::vector<ParamData> const & params ) const;
+  void                                    distributeSecondLevelCommands( std::set<std::string> const & specialFunctions );
+  std::string                             findBaseName( std::string aliasName, std::map<std::string, EnumAliasData> const & aliases ) const;
+  std::vector<MemberData>::const_iterator findStructMemberIt( std::string const & name, std::vector<MemberData> const & memberData ) const;
+  std::vector<MemberData>::const_iterator findStructMemberItByType( std::string const & type, std::vector<MemberData> const & memberData ) const;
+  std::pair<std::string, std::string>     generateAllocatorTemplates( std::vector<size_t> const &               returnParams,
+                                                                      std::vector<std::string> const &          returnDataTypes,
+                                                                      std::map<size_t, VectorParamData> const & vectorParams,
+                                                                      CommandFlavourFlags                       flavourFlags,
+                                                                      bool                                      definition ) const;
+  std::string                             generateArgumentListEnhanced( std::vector<ParamData> const &            params,
+                                                                        std::vector<size_t> const &               returnParams,
+                                                                        std::map<size_t, VectorParamData> const & vectorParams,
+                                                                        std::set<size_t> const &                  skippedParams,
+                                                                        std::set<size_t> const &                  singularParams,
+                                                                        std::set<size_t> const &                  templatedParams,
+                                                                        bool                                      definition,
+                                                                        CommandFlavourFlags                       flavourFlags,
+                                                                        bool                                      withDispatcher ) const;
+  std::string                             generateArgumentListStandard( std::vector<ParamData> const & params, std::set<size_t> const & skippedParams ) const;
+  std::string                             generateArgumentTemplates( std::vector<ParamData> const &            params,
+                                                                     std::vector<size_t> const &               returnParams,
+                                                                     std::map<size_t, VectorParamData> const & vectorParams,
+                                                                     std::set<size_t> const &                  templatedParams,
+                                                                     CommandFlavourFlags                       flavourFlags,
+                                                                     bool                                      raii ) const;
+  std::string                             generateBaseTypes() const;
+  std::string generateBitmask( std::map<std::string, BitmaskData>::const_iterator bitmaskIt, std::string const & surroundingProtect ) const;
+  std::string generateBitmasksToString() const;
+  std::string generateBitmasksToString( std::vector<RequireData> const & requireData, std::set<std::string> & listedBitmasks, std::string const & title ) const;
+  std::string generateBitmaskToString( std::map<std::string, BitmaskData>::const_iterator bitmaskIt ) const;
+  std::string generateCallArgumentsEnhanced( CommandData const &      commandData,
+                                             size_t                   initialSkipCount,
+                                             bool                     nonConstPointerAsNullptr,
+                                             std::set<size_t> const & singularParams,
+                                             std::set<size_t> const & templatedParams,
+                                             bool                     raiiHandleMemberFunction ) const;
+  std::string generateCallArgumentsRAIIFactory( std::vector<ParamData> const & params,
+                                                size_t                         initialSkipCount,
+                                                std::set<size_t> const &       skippedParams,
+                                                std::set<size_t> const &       singularParams ) const;
+  std::string generateCallArgumentsStandard( std::string const & handle, std::vector<ParamData> const & params ) const;
+  std::string generateCallArgumentEnhanced( std::vector<ParamData> const & params,
+                                            size_t                         paramIndex,
+                                            bool                           nonConstPointerAsNullptr,
+                                            std::set<size_t> const &       singularParams,
+                                            std::set<size_t> const &       templatedParams ) const;
+  std::string generateCallArgumentEnhancedConstPointer( ParamData const &        param,
+                                                        size_t                   paramIndex,
+                                                        std::set<size_t> const & singularParams,
+                                                        std::set<size_t> const & templatedParams ) const;
+  std::string generateCallArgumentEnhancedNonConstPointer( ParamData const &        param,
+                                                           size_t                   paramIndex,
+                                                           bool                     nonConstPointerAsNullptr,
+                                                           std::set<size_t> const & singularParams ) const;
+  std::string generateCallArgumentEnhancedValue( std::vector<ParamData> const & params, size_t paramIndex, std::set<size_t> const & singularParams ) const;
+  std::string generateCallSequence( std::string const &                       name,
+                                    CommandData const &                       commandData,
+                                    std::vector<size_t> const &               returnParams,
+                                    std::map<size_t, VectorParamData> const & vectorParams,
+                                    size_t                                    initialSkipCount,
+                                    std::set<size_t> const &                  singularParams,
+                                    std::set<size_t> const &                  templatedParams,
+                                    CommandFlavourFlags                       flavourFlags,
+                                    bool                                      raii ) const;
+  std::string generateChainTemplates( std::vector<size_t> const & returnParams, bool chained ) const;
+  std::string generateCommand( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandDefinitions() const;
+  std::string
+    generateCommandDefinitions( std::vector<RequireData> const & requireData, std::set<std::string> & listedCommands, std::string const & title ) const;
+  std::string generateCommandDefinitions( std::string const & command, std::string const & handle ) const;
+  std::string generateCommandEnhanced( std::string const &                       name,
+                                       CommandData const &                       commandData,
+                                       size_t                                    initialSkipCount,
+                                       bool                                      definition,
+                                       std::map<size_t, VectorParamData> const & vectorParams,
+                                       std::vector<size_t> const &               returnParams,
+                                       CommandFlavourFlags                       flavourFlags = {} ) const;
+  std::string generateCommandName( std::string const &            vulkanCommandName,
+                                   std::vector<ParamData> const & params,
+                                   size_t                         initialSkipCount,
+                                   std::set<std::string> const &  tags,
+                                   CommandFlavourFlags            flavourFlags = {} ) const;
+  std::string
+    generateCommandResultMultiSuccessNoErrors( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandResultMultiSuccessNoErrors0Return( std::string const & name,
+                                                                CommandData const & commandData,
+                                                                size_t              initialSkipCount,
+                                                                bool                definition ) const;
+  std::string generateCommandResultMultiSuccessNoErrors2Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParams ) const;
+  std::string
+    generateCommandResultMultiSuccessWithErrors( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandResultMultiSuccessWithErrors1Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultMultiSuccessWithErrors2Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParamIndices ) const;
+  std::string generateCommandResultMultiSuccessWithErrors3Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParamIndices ) const;
+  std::string
+    generateCommandResultSingleSuccessNoErrors( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string
+    generateCommandResultSingleSuccessWithErrors( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnChain(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnHandle(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnHandle1Vector( std::string const &                        name,
+                                                                                CommandData const &                        commandData,
+                                                                                size_t                                     initialSkipCount,
+                                                                                bool                                       definition,
+                                                                                size_t                                     returnParam,
+                                                                                std::pair<size_t, VectorParamData> const & vectorParamIndex ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnHandle2Vector( std::string const &                       name,
+                                                                                CommandData const &                       commandData,
+                                                                                size_t                                    initialSkipCount,
+                                                                                bool                                      definition,
+                                                                                size_t                                    returnParam,
+                                                                                std::map<size_t, VectorParamData> const & vectorParamIndices ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnValue(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( std::string const &                       name,
+                                                                                CommandData const &                       commandData,
+                                                                                size_t                                    initialSkipCount,
+                                                                                bool                                      definition,
+                                                                                size_t                                    returnParam,
+                                                                                std::map<size_t, VectorParamData> const & vectorParamIndices ) const;
+  std::string generateCommandResultSingleSuccessWithErrors1ReturnVoid(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandResultSingleSuccessWithErrors2Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParamIndices ) const;
+  std::string
+    generateCommandResultWithErrors0Return( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandSet( bool                             definition,
+                                  std::string const &              standard,
+                                  std::vector<std::string> const & enhanced = {},
+                                  std::vector<std::string> const & unique   = {} ) const;
+  std::string generateCommandSet( std::string const & standard, std::string const & enhanced ) const;
+  std::string generateCommandStandard( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandValue( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string generateCommandVoid0Return( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition ) const;
+  std::string
+    generateCommandVoid1Return( std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, size_t returnParam ) const;
+  std::string generateCommandVoid2Return(
+    std::string const & name, CommandData const & commandData, size_t initialSkipCount, bool definition, std::vector<size_t> const & returnParamIndices ) const;
+  std::string generateConstexprString( std::string const & structName ) const;
+  std::string generateDataDeclarations( CommandData const &                       commandData,
+                                        std::vector<size_t> const &               returnParams,
+                                        std::map<size_t, VectorParamData> const & vectorParams,
+                                        std::set<size_t> const &                  templatedParams,
+                                        CommandFlavourFlags                       flavourFlags,
+                                        bool                                      raii,
+                                        std::vector<std::string> const &          dataTypes,
+                                        std::string const &                       dataType,
+                                        std::string const &                       returnType,
+                                        std::string const &                       returnVariable ) const;
+  std::string generateDataDeclarations1Return( CommandData const &                       commandData,
+                                               std::vector<size_t> const &               returnParams,
+                                               std::map<size_t, VectorParamData> const & vectorParams,
+                                               std::set<size_t> const &                  templatedParams,
+                                               CommandFlavourFlags                       flavourFlags,
+                                               std::vector<std::string> const &          dataTypes,
+                                               std::string const &                       dataType,
+                                               std::string const &                       returnType,
+                                               std::string const &                       returnVariable ) const;
+  std::string generateDataDeclarations2Returns( CommandData const &                       commandData,
+                                                std::vector<size_t> const &               returnParams,
+                                                std::map<size_t, VectorParamData> const & vectorParams,
+                                                CommandFlavourFlags                       flavourFlags,
+                                                bool                                      raii,
+                                                std::vector<std::string> const &          dataTypes,
+                                                std::string const &                       dataType,
+                                                std::string const &                       returnVariable ) const;
+  std::string generateDataDeclarations3Returns( CommandData const &              commandData,
+                                                std::vector<size_t> const &      returnParams,
+                                                CommandFlavourFlags              flavourFlags,
+                                                bool                             raii,
+                                                std::vector<std::string> const & dataTypes ) const;
+  std::string generateDataPreparation( CommandData const &                       commandData,
+                                       size_t                                    initialSkipCount,
+                                       std::vector<size_t> const &               returnParams,
+                                       std::map<size_t, VectorParamData> const & vectorParams,
+                                       std::set<size_t> const &                  templatedParams,
+                                       CommandFlavourFlags                       flavourFlags,
+                                       bool                                      enumerating ) const;
+  std::string generateDataSizeChecks( CommandData const &                       commandData,
+                                      std::vector<size_t> const &               returnParams,
+                                      std::vector<std::string> const &          returnParamTypes,
+                                      std::map<size_t, VectorParamData> const & vectorParams,
+                                      std::set<size_t> const &                  templatedParams,
+                                      bool                                      singular ) const;
+  std::string generateDispatchLoaderDynamic() const;  // uses vkGet*ProcAddress to get function pointers
+  std::string generateDispatchLoaderStatic() const;   // uses exported symbols from loader
+  std::string generateDestroyCommand( std::string const & name, CommandData const & commandData ) const;
+  std::string
+    generateDispatchLoaderDynamicCommandAssignment( std::string const & commandName, CommandData const & commandData, std::string const & firstArg ) const;
+  std::string generateDispatchLoaderStaticCommands( std::vector<RequireData> const & requireData,
+                                                    std::set<std::string> &          listedCommands,
+                                                    std::string const &              title ) const;
+  std::string generateEnum( std::pair<std::string, EnumData> const & enumData, std::string const & surroundingProtect ) const;
+  std::string generateEnums() const;
+  std::string generateEnums( std::vector<RequireData> const & requireData, std::set<std::string> & listedEnums, std::string const & title ) const;
+  std::string generateEnumsToString() const;
+  std::string generateEnumsToString( std::vector<RequireData> const & requireData, std::set<std::string> & listedEnums, std::string const & title ) const;
+  std::string generateEnumInitializer( TypeInfo const &                   type,
+                                       std::vector<std::string> const &   arraySizes,
+                                       std::vector<EnumValueData> const & values,
+                                       bool                               bitmask ) const;
+  std::string generateEnumToString( std::pair<std::string, EnumData> const & enumData ) const;
+  std::string generateFailureCheck( std::vector<std::string> const & successCodes ) const;
+  std::string generateFormatTraits() const;
+  std::string generateFunctionPointerCheck( std::string const & function, std::string const & referencedIn ) const;
+  std::string generateHandle( std::pair<std::string, HandleData> const & handle, std::set<std::string> & listedHandles ) const;
+  std::string generateHandleCommandDeclarations( std::set<std::string> const & commands ) const;
+  std::string generateHandleDependencies( std::pair<std::string, HandleData> const & handle, std::set<std::string> & listedHandles ) const;
+  std::string generateHandleEmpty( HandleData const & handleData ) const;
+  std::string generateHandleHashStructures( std::vector<RequireData> const & requireData, std::string const & title ) const;
+  std::string generateHandleHashStructures() const;
+  std::string generateHandles() const;
+  std::string generateIndexTypeTraits() const;
+  std::string
+              generateLenInitializer( std::vector<MemberData>::const_iterator                                                                                 mit,
+                                      std::map<std::vector<MemberData>::const_iterator, std::vector<std::vector<MemberData>::const_iterator>>::const_iterator litit,
+                                      bool mutualExclusiveLens ) const;
+  std::string generateName( TypeInfo const & typeInfo ) const;
+  std::string generateNoExcept( std::vector<std::string> const &          errorCodes,
+                                std::vector<size_t> const &               returnParams,
+                                std::map<size_t, VectorParamData> const & vectorParams,
+                                CommandFlavourFlags                       flavourFlags,
+                                bool                                      vectorSizeCheck,
+                                bool                                      raii ) const;
+  std::string generateObjectDeleter( std::string const & commandName, CommandData const & commandData, size_t initialSkipCount, size_t returnParam ) const;
+  std::pair<std::string, std::string> generateProtection( std::string const & protect ) const;
+  std::string                         generateRAIICommandDefinitions() const;
+  std::string
+    generateRAIICommandDefinitions( std::vector<RequireData> const & requireData, std::set<std::string> & listedCommands, std::string const & title ) const;
+  std::string generateRAIIDispatchers() const;
+  std::string generateRAIIHandle( std::pair<std::string, HandleData> const & handle,
+                                  std::set<std::string> &                    listedHandles,
+                                  std::set<std::string> const &              specialFunctions ) const;
+  std::string generateRAIIHandleCommand( std::string const & command, size_t initialSkipCount, bool definition ) const;
+  std::string generateRAIIHandleCommandDeclarations( std::pair<std::string, HandleData> const & handle, std::set<std::string> const & specialFunctions ) const;
+  std::string generateRAIIHandleCommandEnhanced( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                 size_t                                             initialSkipCount,
+                                                 std::vector<size_t> const &                        returnParams,
+                                                 std::map<size_t, VectorParamData> const &          vectorParamIndices,
+                                                 bool                                               definition,
+                                                 CommandFlavourFlags                                flavourFlags = {} ) const;
+  std::string generateRAIIHandleCommandFactory( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                size_t                                             initialSkipCount,
+                                                std::vector<size_t> const &                        returnParams,
+                                                std::map<size_t, VectorParamData> const &          vectorParams,
+                                                bool                                               definition,
+                                                CommandFlavourFlags                                flavourFlags = {} ) const;
+  std::string generateRAIIHandleCommandFactoryArgumentList( std::vector<ParamData> const & params,
+                                                            std::set<size_t> const &       skippedParams,
+                                                            bool                           definition,
+                                                            bool                           singular ) const;
+  std::string generateRAIIHandleCommandResult( std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessNoErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                   size_t                                             initialSkipCount,
+                                                                   bool                                               definition ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessNoErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                          size_t                                             initialSkipCount,
+                                                                          bool                                               definition,
+                                                                          std::vector<size_t> const &                        returnParams ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessWithErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                     size_t                                             initialSkipCount,
+                                                                     bool                                               definition ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessWithErrors1Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                            size_t                                             initialSkipCount,
+                                                                            bool                                               definition,
+                                                                            size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessWithErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                            size_t                                             initialSkipCount,
+                                                                            bool                                               definition,
+                                                                            std::vector<size_t> const &                        returnParamIndices ) const;
+  std::string generateRAIIHandleCommandResultMultiSuccessWithErrors3Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                            size_t                                             initialSkipCount,
+                                                                            bool                                               definition,
+                                                                            std::vector<size_t> const &                        returnParamIndices ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessNoErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                    size_t                                             initialSkipCount,
+                                                                    bool                                               definition ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                      size_t                                             initialSkipCount,
+                                                                      bool                                               definition ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                             size_t                                             initialSkipCount,
+                                                                             bool                                               definition,
+                                                                             size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnChain( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                  size_t                                             initialSkipCount,
+                                                                                  bool                                               definition,
+                                                                                  size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnHandle( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                   size_t                                             initialSkipCount,
+                                                                                   bool                                               definition,
+                                                                                   size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                  size_t                                             initialSkipCount,
+                                                                                  bool                                               definition,
+                                                                                  size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnValue2Vectors( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                          size_t                                             initialSkipCount,
+                                                                                          bool                                               definition,
+                                                                                          size_t                                             returnParam,
+                                                                                          std::map<size_t, VectorParamData> const & vectorParams ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors1ReturnVoid( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                                 size_t                                             initialSkipCount,
+                                                                                 bool                                               definition,
+                                                                                 size_t                                             returnParam ) const;
+  std::string generateRAIIHandleCommandResultSingleSuccessWithErrors2Return( std::map<std::string, CommandData>::const_iterator commandIt,
+                                                                             size_t                                             initialSkipCount,
+                                                                             bool                                               definition,
+                                                                             std::vector<size_t> const &                        returnParamIndices ) const;
+  std::string generateRAIIHandleCommandValue( std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition ) const;
+  std::string generateRAIIHandleCommandVoid( std::map<std::string, CommandData>::const_iterator commandIt, size_t initialSkipCount, bool definition ) const;
+  std::pair<std::string, std::string> generateRAIIHandleStaticCreate( std::pair<std::string, HandleData> const &         handle,
+                                                                      std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                      std::string const &                                enter,
+                                                                      std::string const &                                leave ) const;
+
+  std::pair<std::string, std::string> generateRAIIHandleConstructor( std::pair<std::string, HandleData> const &         handle,
+                                                                     std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                     std::string const &                                enter,
+                                                                     std::string const &                                leave ) const;
+  std::pair<std::string, std::string> generateRAIIHandleStaticCreate1Return2Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                                    std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                    std::string const &                                enter,
+                                                                                    std::string const &                                leave,
+                                                                                    size_t                                             returnParam,
+                                                                                    std::map<size_t, VectorParamData> const & vectorParamIndices ) const;
+  std::pair<std::string, std::string> generateRAIIHandleConstructor1Return2Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                                   std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                   std::string const &                                enter,
+                                                                                   std::string const &                                leave,
+                                                                                   size_t                                             returnParam,
+                                                                                   std::map<size_t, VectorParamData> const & vectorParamIndices ) const;
+  std::pair<std::string, std::string> generateRAIIHandleStaticCreates( std::pair<std::string, HandleData> const & handle ) const;
+  std::pair<std::string, std::string> generateRAIIHandleConstructors( std::pair<std::string, HandleData> const & handle ) const;
+
+  std::string generateRAIIHandleStaticCreateToConstructorArgument( ParamData const & param,
+                                                                   bool singular ) const;
+  std::string generateRAIIHandleStaticCreateToConstructorArguments( std::pair<std::string, HandleData> const & handle,
+                                                                    std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt ) const;
+
+  std::string generateRAIIHandleConstructorArgument( ParamData const & param, bool definition, bool singular, bool takesOwnership ) const;
+  std::string generateRAIIHandleConstructorArguments( std::pair<std::string, HandleData> const &                             handle,
+                                                      std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                      bool                                                                   singular,
+                                                      bool                                                                   takesOwnership ) const;
+  std::string generateRAIIHandleConstructorCallArguments( std::pair<std::string, HandleData> const &                             handle,
+                                                          std::map<std::string, VulkanHppGenerator::CommandData>::const_iterator constructorIt,
+                                                          bool                                                                   nonConstPointerAsNullptr,
+                                                          std::set<size_t> const &                                               singularParams,
+                                                          bool allocatorIsMemberVariable,
+                                                          bool handleParamsAreMembers ) const;
+  std::string generateRAIIHandleStaticCreateEnumerate( std::pair<std::string, HandleData> const &         handle,
+                                                       std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                       std::vector<ParamData>::const_iterator             handleParamIt,
+                                                       std::vector<ParamData>::const_iterator             lenParamIt,
+                                                       std::string const &                                enter,
+                                                       std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorEnumerate( std::pair<std::string, HandleData> const &         handle,
+                                                      std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                      std::vector<ParamData>::const_iterator             handleParamIt,
+                                                      std::vector<ParamData>::const_iterator             lenParamIt,
+                                                      std::string const &                                enter,
+                                                      std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorInitializationList( std::pair<std::string, HandleData> const &         handle,
+                                                               std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                               std::map<std::string, CommandData>::const_iterator destructorIt,
+                                                               bool                                               takesOwnership ) const;
+  std::string generateRAIIHandleConstructorParamName( std::string const & type, std::map<std::string, CommandData>::const_iterator destructorIt ) const;
+  std::pair<std::string, std::string> generateRAIIHandleStaticCreateResult( std::pair<std::string, HandleData> const &         handle,
+                                                                            std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                            std::string const &                                enter,
+                                                                            std::string const &                                leave ) const;
+  std::pair<std::string, std::string> generateRAIIHandleConstructorResult( std::pair<std::string, HandleData> const &         handle,
+                                                                           std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                           std::string const &                                enter,
+                                                                           std::string const &                                leave ) const;
+  std::string generateRAIIHandleStaticCreateResultSingleSuccessWithErrors1Return0Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                                         std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                         std::string const &                                enter,
+                                                                                         std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorResultSingleSuccessWithErrors1Return0Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                                        std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                        std::string const &                                enter,
+                                                                                        std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorTakeOwnership( std::pair<std::string, HandleData> const & handle ) const;
+  std::string generateRAIIHandleStaticCreateVector( std::pair<std::string, HandleData> const &         handle,
+                                                    std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                    std::vector<ParamData>::const_iterator             handleParamIt,
+                                                    std::string const &                                enter,
+                                                    std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorVector( std::pair<std::string, HandleData> const &         handle,
+                                                   std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                   std::vector<ParamData>::const_iterator             handleParamIt,
+                                                   std::string const &                                enter,
+                                                   std::string const &                                leave ) const;
+  std::string generateRAIIHandleStaticCreateVectorSingular( std::pair<std::string, HandleData> const &         handle,
+                                                            std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                            std::vector<ParamData>::const_iterator             handleParamIt,
+                                                            std::string const &                                enter,
+                                                            std::string const &                                leave ) const;
+  std::string generateRAIIHandleConstructorVectorSingular( std::pair<std::string, HandleData> const &         handle,
+                                                           std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                           std::vector<ParamData>::const_iterator             handleParamIt,
+                                                           std::string const &                                enter,
+                                                           std::string const &                                leave ) const;
+  std::pair<std::string, std::string> generateRAIIHandleConstructorVoid( std::pair<std::string, HandleData> const &         handle,
+                                                                         std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                         std::string const &                                enter,
+                                                                         std::string const &                                leave ) const;
+  std::string                         generateRAIIHandleConstructorVoid1Return0Vector( std::pair<std::string, HandleData> const &         handle,
+                                                                                       std::map<std::string, CommandData>::const_iterator constructorIt,
+                                                                                       std::string const &                                enter,
+                                                                                       std::string const &                                leave ) const;
+  std::string generateRAIIHandleContext( std::pair<std::string, HandleData> const & handle, std::set<std::string> const & specialFunctions ) const;
+  std::string generateRAIIHandleDestructorCallArguments( std::string const &                                handleType,
+                                                         std::map<std::string, CommandData>::const_iterator destructorIt ) const;
+  std::tuple<std::string, std::string, std::string, std::string, std::string, std::string, std::string>
+              generateRAIIHandleDetails( std::pair<std::string, HandleData> const & handle ) const;
+  std::string generateRAIIHandleForwardDeclarations( std::vector<RequireData> const & requireData, std::string const & title ) const;
+  std::string generateRAIIHandles() const;
+  std::string generateRAIIHandleSingularConstructorArguments( std::pair<std::string, HandleData> const &         handle,
+                                                              std::map<std::string, CommandData>::const_iterator constructorIt ) const;
+  std::string generateRAIIHandleVectorSizeCheck( std::string const &                           name,
+                                                 CommandData const &                           commandData,
+                                                 size_t                                        initialSkipCount,
+                                                 std::map<size_t, std::vector<size_t>> const & countToVectorMap,
+                                                 std::set<size_t> const &                      skippedParams ) const;
+  std::string generateResultAssignment( CommandData const & commandData ) const;
+  std::string generateResultCheck(
+    CommandData const & commandData, std::string const & className, std::string const & classSeparator, std::string commandName, bool enumerating ) const;
+  std::string generateResultExceptions() const;
+  std::string generateReturnStatement( std::string const & commandName,
+                                       CommandData const & commandData,
+                                       std::string const & returnVariable,
+                                       std::string const & returnType,
+                                       std::string const & dataType,
+                                       size_t              initialSkipCount,
+                                       size_t              returnParam,
+                                       CommandFlavourFlags flavourFlags,
+                                       bool                enumerating,
+                                       bool                raii ) const;
+  std::string generateReturnType( CommandData const &                       commandData,
+                                  std::vector<size_t> const &               returnParams,
+                                  std::map<size_t, VectorParamData> const & vectorParams,
+                                  CommandFlavourFlags                       flavourFlags,
+                                  bool                                      raii,
+                                  std::string const &                       dataType ) const;
+  std::string generateReturnVariable( CommandData const &                       commandData,
+                                      std::vector<size_t> const &               returnParams,
+                                      std::map<size_t, VectorParamData> const & vectorParams,
+                                      CommandFlavourFlags                       flavourFlags ) const;
+  std::string
+    generateSizeCheck( std::vector<std::vector<MemberData>::const_iterator> const & arrayIts, std::string const & structName, bool mutualExclusiveLens ) const;
+  std::string generateStaticAssertions() const;
+  std::string generateStaticAssertions( std::vector<RequireData> const & requireData, std::string const & title ) const;
+  std::string generateStruct( std::pair<std::string, StructureData> const & structure, std::set<std::string> & listedStructs ) const;
+  std::string generateStructCompareOperators( std::pair<std::string, StructureData> const & structure ) const;
+  std::string generateStructConstructors( std::pair<std::string, StructureData> const & structData ) const;
+  std::string generateStructConstructorsEnhanced( std::pair<std::string, StructureData> const & structData ) const;
+  std::string generateStructConstructorArgument( bool listedArgument, MemberData const & memberData, bool withDefault ) const;
+  std::string generateStructHashStructure( std::pair<std::string, StructureData> const & structure, std::set<std::string> & listedStructs ) const;
+  std::string generateStructHashStructures() const;
+  std::string generateStructHashSum( std::string const & structName, std::vector<MemberData> const & members ) const;
+  std::string generateStructs() const;
+  std::string generateStructure( std::pair<std::string, StructureData> const & structure ) const;
+  std::string generateStructExtendsStructs() const;
+  std::string
+    generateStructExtendsStructs( std::vector<RequireData> const & requireData, std::set<std::string> & listedStructs, std::string const & title ) const;
+  std::string generateStructForwardDeclarations() const;
+  std::string generateStructForwardDeclarations( std::vector<RequireData> const & requireData, std::string const & title ) const;
+  std::tuple<std::string, std::string, std::string, std::string> generateStructMembers( std::pair<std::string, StructureData> const & structData ) const;
+  std::string                         generateStructSetter( std::string const & structureName, std::vector<MemberData> const & memberData, size_t index ) const;
+  std::string                         generateStructSubConstructor( std::pair<std::string, StructureData> const & structData ) const;
+  std::string                         generateSuccessCheck( std::vector<std::string> const & successCodes ) const;
+  std::string                         generateSuccessCodeList( std::vector<std::string> const & successCodes, bool enumerating ) const;
+  std::string                         generateThrowResultException() const;
+  std::string                         generateTypenameCheck( std::vector<size_t> const &               returnParams,
+                                                             std::map<size_t, VectorParamData> const & vectorParams,
+                                                             bool                                      definition,
+                                                             std::vector<std::string> const &          dataTypes,
+                                                             CommandFlavourFlags                       flavourFlags ) const;
+  std::string                         generateUnion( std::pair<std::string, StructureData> const & structure ) const;
+  std::string                         generateUniqueTypes( std::string const & parentType, std::set<std::string> const & childrenTypes ) const;
+  std::string                         generateVectorSizeCheck( std::string const &                           name,
+                                                               CommandData const &                           commandData,
+                                                               size_t                                        initialSkipCount,
+                                                               std::map<size_t, std::vector<size_t>> const & countToVectorMap,
+                                                               std::set<size_t> const &                      skippedParams,
+                                                               bool                                          onlyThrows ) const;
+  std::pair<std::string, std::string> getParentTypeAndName( std::pair<std::string, HandleData> const & handle ) const;
+  std::string                         getPlatform( std::string const & title ) const;
+  std::pair<std::string, std::string> getPoolTypeAndName( std::string const & type ) const;
+  std::string                         getProtect( EnumValueData const & evd ) const;
+  std::string                         getProtectFromPlatform( std::string const & platform ) const;
+  std::string                         getProtectFromTitle( std::string const & title ) const;
+  std::string                         getProtectFromType( std::string const & type ) const;
+  std::string                         getVectorSize( std::vector<ParamData> const &            params,
+                                                     std::map<size_t, VectorParamData> const & vectorParamIndices,
+                                                     size_t                                    returnParam,
+                                                     std::string const &                       returnParamType,
+                                                     std::set<size_t> const &                  templatedParams ) const;
+  bool                                hasLen( std::vector<MemberData> const & members, MemberData const & md ) const;
+  bool                                hasParentHandle( std::string const & handle, std::string const & parent ) const;
+  bool                                isDeviceCommand( CommandData const & commandData ) const;
+  bool                                isHandleType( std::string const & type ) const;
+  bool                                isLenByStructMember( std::string const & name, std::vector<ParamData> const & params ) const;
+  bool                                isLenByStructMember( std::string const & name, ParamData const & param ) const;
+  bool isMultiSuccessCodeConstructor( std::vector<std::map<std::string, CommandData>::const_iterator> const & constructorIts ) const;
+  bool isParam( std::string const & name, std::vector<ParamData> const & params ) const;
+  bool isStructMember( std::string const & name, std::vector<MemberData> const & memberData ) const;
+  bool isStructureChainAnchor( std::string const & type ) const;
+  std::pair<bool, std::map<size_t, std::vector<size_t>>> needsVectorSizeCheck( std::vector<ParamData> const &            params,
+                                                                               std::map<size_t, VectorParamData> const & vectorParams,
+                                                                               std::vector<size_t> const &               returnParams,
+                                                                               std::set<size_t> const &                  singularParams ) const;
+  void                                                   readCommands( tinyxml2::XMLElement const * element );
+  void                                                   readCommandsCommand( tinyxml2::XMLElement const * element );
+  std::pair<bool, ParamData>          readCommandsCommandParam( tinyxml2::XMLElement const * element, std::vector<ParamData> const & params );
+  std::pair<std::string, std::string> readCommandsCommandProto( tinyxml2::XMLElement const * element );
+  std::string                         readComment( tinyxml2::XMLElement const * element );
+  void                                readEnums( tinyxml2::XMLElement const * element );
+  void                                readEnumsConstant( tinyxml2::XMLElement const * element );
+  void                                readEnumsEnum( tinyxml2::XMLElement const * element, std::map<std::string, EnumData>::iterator enumIt );
+  void                                readExtensions( tinyxml2::XMLElement const * element );
+  void                                readExtensionsExtension( tinyxml2::XMLElement const * element );
+  void readExtensionsExtensionRequire( tinyxml2::XMLElement const * element, std::map<std::string, ExtensionData>::iterator extensionIt );
+  void readExtensionsExtensionRequireCommand( tinyxml2::XMLElement const * element, std::string const & extensionName, RequireData & requireData );
+  void readExtensionsExtensionRequireSkipped( tinyxml2::XMLElement const * element );
+  void readExtensionsExtensionRequireType( tinyxml2::XMLElement const * element, std::string const & extensionName, RequireData & requireData );
+  void readFeature( tinyxml2::XMLElement const * element );
+  void readFeatureRequire( tinyxml2::XMLElement const * element, std::map<std::string, FeatureData>::iterator featureIt );
+  void readFeatureRequireCommand( tinyxml2::XMLElement const * element, std::map<std::string, FeatureData>::iterator featureIt, RequireData & requireData );
+  void readFeatureRequireCommandSkipped( tinyxml2::XMLElement const * element );
+  void readFeatureRequireSkipped( tinyxml2::XMLElement const * element );
+  void readFeatureRequireType( tinyxml2::XMLElement const * element, std::map<std::string, FeatureData>::iterator featureIt, RequireData & requireData );
+  void readFormats( tinyxml2::XMLElement const * element );
+  void readFormatsFormat( tinyxml2::XMLElement const * element );
+  void readFormatsFormatComponent( tinyxml2::XMLElement const * element, FormatData & formatData );
+  void readFormatsFormatPlane( tinyxml2::XMLElement const * element, FormatData & formatData );
+  void readFormatsFormatSPIRVImageFormat( tinyxml2::XMLElement const * element, FormatData & formatData );
+  std::pair<NameData, TypeInfo> readNameAndType( tinyxml2::XMLElement const * elements );
+  void                          readPlatforms( tinyxml2::XMLElement const * element );
+  void                          readPlatformsPlatform( tinyxml2::XMLElement const * element );
+  void                          readRegistry( tinyxml2::XMLElement const * element );
+  void                          readRequireCommandSkipped( tinyxml2::XMLElement const * element );
+  void                          readRequireEnum( tinyxml2::XMLElement const * element, std::string const & extensionName );
+  void                          readRequireEnumSkipped( tinyxml2::XMLElement const * element );
+  void                          readRequireTypeSkipped( tinyxml2::XMLElement const * element );
+  void                          readSPIRVCapabilities( tinyxml2::XMLElement const * element );
+  void                          readSPIRVCapabilitiesSPIRVCapability( tinyxml2::XMLElement const * element );
+  void                          readSPIRVCapabilitiesSPIRVCapabilityEnable( tinyxml2::XMLElement const * element );
+  void                          readSPIRVCapabilitiesSPIRVCapabilityEnableExtension( int xmlLine, std::map<std::string, std::string> const & attributes );
+  void                          readSPIRVCapabilitiesSPIRVCapabilityEnableProperty( int xmlLine, std::map<std::string, std::string> const & attributes );
+  void                          readSPIRVCapabilitiesSPIRVCapabilityEnableStruct( int xmlLine, std::map<std::string, std::string> const & attributes );
+  void                          readSPIRVCapabilitiesSPIRVCapabilityEnableVersion( int xmlLine, std::map<std::string, std::string> const & attributes );
+  void                          readSPIRVExtensions( tinyxml2::XMLElement const * element );
+  void                          readSPIRVExtensionsExtension( tinyxml2::XMLElement const * element );
+  void                          readSPIRVExtensionsExtensionEnable( tinyxml2::XMLElement const * element );
+  void                          readTags( tinyxml2::XMLElement const * element );
+  void                          readTagsTag( tinyxml2::XMLElement const * element );
+  void                          readTypes( tinyxml2::XMLElement const * element );
+  void                          readTypesType( tinyxml2::XMLElement const * element );
+  void                          readTypesTypeBasetype( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeBitmask( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeDefine( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeEnum( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeFuncpointer( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeHandle( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeInclude( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void                          readTypesTypeRequires( tinyxml2::XMLElement const * element, std::map<std::string, std::string> const & attributes );
+  void     readTypesTypeStruct( tinyxml2::XMLElement const * element, bool isUnion, std::map<std::string, std::string> const & attributes );
+  void     readTypesTypeStructMember( tinyxml2::XMLElement const * element, std::vector<MemberData> & members, bool isUnion );
+  void     readTypesTypeStructMemberEnum( tinyxml2::XMLElement const * element, MemberData & memberData );
+  void     readTypesTypeStructMemberName( tinyxml2::XMLElement const * element, MemberData & memberData, std::vector<MemberData> const & members );
+  void     readTypesTypeStructMemberType( tinyxml2::XMLElement const * element, MemberData & memberData );
+  TypeInfo readTypeInfo( tinyxml2::XMLElement const * element ) const;
+  void     registerDeleter( std::string const & name, std::pair<std::string, CommandData> const & commandData );
+  void     rescheduleRAIIHandle( std::string &                              str,
+                                 std::pair<std::string, HandleData> const & handle,
+                                 std::set<std::string> &                    listedHandles,
+                                 std::set<std::string> const &              specialFunctions ) const;
+  std::vector<std::string> selectCommandsByHandle( std::vector<RequireData> const & requireData,
+                                                   std::set<std::string> const &    handleCommands,
+                                                   std::set<std::string> &          listedCommands ) const;
+  void                     setVulkanLicenseHeader( int line, std::string const & comment );
+  bool                     skipLeadingGrandParent( std::pair<std::string, HandleData> const & handle ) const;
+  std::string              toString( TypeCategory category );
+
+private:
+  std::map<std::string, BaseTypeData>                                 m_baseTypes;
+  std::map<std::string, BitmaskData>                                  m_bitmasks;
+  std::map<std::string, CommandData>                                  m_commands;
+  std::map<std::string, std::string>                                  m_constants;
+  std::map<std::string, DefineData>                                   m_defines;
+  std::map<std::string, EnumData>                                     m_enums;
+  std::set<std::string>                                               m_extendedStructs;  // structs which are referenced by the structextends tag
+  std::map<std::string, ExtensionData>                                m_extensions;
+  std::map<int, std::map<std::string, ExtensionData>::const_iterator> m_extensionsByNumber;
+  std::map<std::string, FeatureData>                                  m_features;
+  std::map<std::string, FormatData>                                   m_formats;
+  std::map<std::string, FuncPointerData>                              m_funcPointers;
+  std::map<std::string, HandleData>                                   m_handles;
+  std::set<std::string>                                               m_includes;
+  std::map<std::string, PlatformData>                                 m_platforms;
+  std::set<std::string>                                               m_RAIISpecialFunctions;
+  std::map<std::string, EnumData>                                     m_skippedEnums;
+  std::set<std::string>                                               m_skippedCommands;
+  std::set<std::string>                                               m_skippedFeatures;
+  std::map<std::string, TypeData>                                     m_skippedTypes;
+  std::map<std::string, StructureData>                                m_structures;
+  std::map<std::string, StructureAliasData>                           m_structureAliases;
+  std::map<std::string, std::set<std::string>>                        m_structureAliasesInverse;
+  std::set<std::string>                                               m_tags;
+  std::map<std::string, TypeData>                                     m_types;
+  std::string                                                         m_typesafeCheck;
+  std::string                                                         m_version;
+  std::string                                                         m_vulkanLicenseHeader;
+};
diff --git a/host/libs/graphics_detector/include/vulkan/vk_icd.h b/host/libs/graphics_detector/include/vulkan/vk_icd.h
new file mode 100644
index 0000000..1133fa3
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vk_icd.h
@@ -0,0 +1,258 @@
+//
+// File: vk_icd.h
+//
+/*
+ * Copyright (c) 2015-2023 LunarG, Inc.
+ * Copyright (c) 2015-2023 The Khronos Group Inc.
+ * Copyright (c) 2015-2023 Valve Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#pragma once
+
+#include "vulkan.h"
+#include <stdbool.h>
+
+// Loader-ICD version negotiation API.  Versions add the following features:
+//   Version 0 - Initial.  Doesn't support vk_icdGetInstanceProcAddr
+//               or vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 1 - Add support for vk_icdGetInstanceProcAddr.
+//   Version 2 - Add Loader/ICD Interface version negotiation
+//               via vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 3 - Add ICD creation/destruction of KHR_surface objects.
+//   Version 4 - Add unknown physical device extension querying via
+//               vk_icdGetPhysicalDeviceProcAddr.
+//   Version 5 - Tells ICDs that the loader is now paying attention to the
+//               application version of Vulkan passed into the ApplicationInfo
+//               structure during vkCreateInstance.  This will tell the ICD
+//               that if the loader is older, it should automatically fail a
+//               call for any API version > 1.0.  Otherwise, the loader will
+//               manually determine if it can support the expected version.
+//   Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices.
+//   Version 7 - If an ICD supports any of the following functions, they must be
+//               queryable with vk_icdGetInstanceProcAddr:
+//                   vk_icdNegotiateLoaderICDInterfaceVersion
+//                   vk_icdGetPhysicalDeviceProcAddr
+//                   vk_icdEnumerateAdapterPhysicalDevices (Windows only)
+//               In addition, these functions no longer need to be exported directly.
+//               This version allows drivers provided through the extension
+//               VK_LUNARG_direct_driver_loading be able to support the entire
+//               Driver-Loader interface.
+
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7
+#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
+#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
+
+// Old typedefs that don't follow a proper naming convention but are preserved for compatibility
+typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
+// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
+// file directly, it won't be found.
+#ifndef PFN_GetPhysicalDeviceProcAddr
+typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName);
+#endif
+
+// Typedefs for loader/ICD interface
+typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID,
+    uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+
+// Prototypes for loader/ICD interface
+#if !defined(VK_NO_PROTOTYPES)
+#ifdef __cplusplus
+extern "C" {
+#endif
+    VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion);
+    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName);
+    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID,
+        uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+#ifdef __cplusplus
+}
+#endif
+#endif
+
+/*
+ * The ICD must reserve space for a pointer for the loader's dispatch
+ * table, at the start of <each object>.
+ * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro.
+ */
+
+#define ICD_LOADER_MAGIC 0x01CDC0DE
+
+typedef union {
+    uintptr_t loaderMagic;
+    void *loaderData;
+} VK_LOADER_DATA;
+
+static inline void set_loader_magic_value(void *pNewObject) {
+    VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    loader_info->loaderMagic = ICD_LOADER_MAGIC;
+}
+
+static inline bool valid_loader_magic_value(void *pNewObject) {
+    const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC;
+}
+
+/*
+ * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that
+ * contains the platform-specific connection and surface information.
+ */
+typedef enum {
+    VK_ICD_WSI_PLATFORM_MIR,
+    VK_ICD_WSI_PLATFORM_WAYLAND,
+    VK_ICD_WSI_PLATFORM_WIN32,
+    VK_ICD_WSI_PLATFORM_XCB,
+    VK_ICD_WSI_PLATFORM_XLIB,
+    VK_ICD_WSI_PLATFORM_ANDROID,
+    VK_ICD_WSI_PLATFORM_MACOS,
+    VK_ICD_WSI_PLATFORM_IOS,
+    VK_ICD_WSI_PLATFORM_DISPLAY,
+    VK_ICD_WSI_PLATFORM_HEADLESS,
+    VK_ICD_WSI_PLATFORM_METAL,
+    VK_ICD_WSI_PLATFORM_DIRECTFB,
+    VK_ICD_WSI_PLATFORM_VI,
+    VK_ICD_WSI_PLATFORM_GGP,
+    VK_ICD_WSI_PLATFORM_SCREEN,
+    VK_ICD_WSI_PLATFORM_FUCHSIA,
+} VkIcdWsiPlatform;
+
+typedef struct {
+    VkIcdWsiPlatform platform;
+} VkIcdSurfaceBase;
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    MirConnection *connection;
+    MirSurface *mirSurface;
+} VkIcdSurfaceMir;
+#endif  // VK_USE_PLATFORM_MIR_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct wl_display *display;
+    struct wl_surface *surface;
+} VkIcdSurfaceWayland;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    HINSTANCE hinstance;
+    HWND hwnd;
+} VkIcdSurfaceWin32;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    xcb_connection_t *connection;
+    xcb_window_t window;
+} VkIcdSurfaceXcb;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    Display *dpy;
+    Window window;
+} VkIcdSurfaceXlib;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+typedef struct {
+    VkIcdSurfaceBase base;
+    IDirectFB *dfb;
+    IDirectFBSurface *surface;
+} VkIcdSurfaceDirectFB;
+#endif  // VK_USE_PLATFORM_DIRECTFB_EXT
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct ANativeWindow *window;
+} VkIcdSurfaceAndroid;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceMacOS;
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceIOS;
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_GGP
+typedef struct {
+    VkIcdSurfaceBase base;
+    GgpStreamDescriptor streamDescriptor;
+} VkIcdSurfaceGgp;
+#endif  // VK_USE_PLATFORM_GGP
+
+typedef struct {
+    VkIcdSurfaceBase base;
+    VkDisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VkExtent2D imageExtent;
+} VkIcdSurfaceDisplay;
+
+typedef struct {
+    VkIcdSurfaceBase base;
+} VkIcdSurfaceHeadless;
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+typedef struct {
+    VkIcdSurfaceBase base;
+    const CAMetalLayer *pLayer;
+} VkIcdSurfaceMetal;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+#ifdef VK_USE_PLATFORM_VI_NN
+typedef struct {
+    VkIcdSurfaceBase base;
+    void *window;
+} VkIcdSurfaceVi;
+#endif // VK_USE_PLATFORM_VI_NN
+
+#ifdef VK_USE_PLATFORM_SCREEN_QNX
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct _screen_context *context;
+    struct _screen_window *window;
+} VkIcdSurfaceScreen;
+#endif  // VK_USE_PLATFORM_SCREEN_QNX
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+typedef struct {
+  VkIcdSurfaceBase base;
+} VkIcdSurfaceImagePipe;
+#endif // VK_USE_PLATFORM_FUCHSIA
diff --git a/host/libs/graphics_detector/include/vulkan/vk_layer.h b/host/libs/graphics_detector/include/vulkan/vk_layer.h
new file mode 100644
index 0000000..6bd1c9a
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vk_layer.h
@@ -0,0 +1,211 @@
+//
+// File: vk_layer.h
+//
+/*
+ * Copyright (c) 2015-2023 LunarG, Inc.
+ * Copyright (c) 2015-2023 The Khronos Group Inc.
+ * Copyright (c) 2015-2023 Valve Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#pragma once
+
+/* Need to define dispatch table
+ * Core struct can then have ptr to dispatch table at the top
+ * Along with object ptrs for current and next OBJ
+ */
+
+#include "vulkan_core.h"
+
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+
+#define MAX_NUM_UNKNOWN_EXTS 250
+
+ // Loader-Layer version negotiation API.  Versions add the following features:
+ //   Versions 0/1 - Initial.  Doesn't support vk_layerGetPhysicalDeviceProcAddr
+ //                  or vk_icdNegotiateLoaderLayerInterfaceVersion.
+ //   Version 2    - Add support for vk_layerGetPhysicalDeviceProcAddr and
+ //                  vk_icdNegotiateLoaderLayerInterfaceVersion.
+#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2
+#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1
+
+#define VK_CURRENT_CHAIN_VERSION 1
+
+// Typedef for use in the interfaces below
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+
+// Version negotiation values
+typedef enum VkNegotiateLayerStructType {
+    LAYER_NEGOTIATE_UNINTIALIZED = 0,
+    LAYER_NEGOTIATE_INTERFACE_STRUCT = 1,
+} VkNegotiateLayerStructType;
+
+// Version negotiation structures
+typedef struct VkNegotiateLayerInterface {
+    VkNegotiateLayerStructType sType;
+    void *pNext;
+    uint32_t loaderLayerInterfaceVersion;
+    PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr;
+} VkNegotiateLayerInterface;
+
+// Version negotiation functions
+typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct);
+
+// Function prototype for unknown physical device extension command
+typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device);
+
+// ------------------------------------------------------------------------------------------------
+// CreateInstance and CreateDevice support structures
+
+/* Sub type of structure for instance and device loader ext of CreateInfo.
+ * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+ * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+ * then VkLayerFunction indicates struct type pointed to by pNext
+ */
+typedef enum VkLayerFunction_ {
+    VK_LAYER_LINK_INFO = 0,
+    VK_LOADER_DATA_CALLBACK = 1,
+    VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2,
+    VK_LOADER_FEATURES = 3,
+} VkLayerFunction;
+
+typedef struct VkLayerInstanceLink_ {
+    struct VkLayerInstanceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr;
+} VkLayerInstanceLink;
+
+/*
+ * When creating the device chain the loader needs to pass
+ * down information about it's device structure needed at
+ * the end of the chain. Passing the data via the
+ * VkLayerDeviceInfo avoids issues with finding the
+ * exact instance being used.
+ */
+typedef struct VkLayerDeviceInfo_ {
+    void *device_info;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+} VkLayerDeviceInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+						      const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA);
+typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction);
+
+typedef enum VkLoaderFeastureFlagBits {
+    VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001,
+} VkLoaderFlagBits;
+typedef VkFlags VkLoaderFeatureFlags;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerInstanceLink *pLayerInfo;
+        PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData;
+        struct {
+	        PFN_vkLayerCreateDevice pfnLayerCreateDevice;
+	        PFN_vkLayerDestroyDevice pfnLayerDestroyDevice;
+	    } layerDevice;
+        VkLoaderFeatureFlags loaderFeatures;
+    } u;
+} VkLayerInstanceCreateInfo;
+
+typedef struct VkLayerDeviceLink_ {
+    struct VkLayerDeviceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr;
+} VkLayerDeviceLink;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerDeviceLink *pLayerInfo;
+        PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData;
+    } u;
+} VkLayerDeviceCreateInfo;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct);
+
+typedef enum VkChainType {
+    VK_CHAIN_TYPE_UNKNOWN = 0,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3,
+} VkChainType;
+
+typedef struct VkChainHeader {
+    VkChainType type;
+    uint32_t version;
+    uint32_t size;
+} VkChainHeader;
+
+typedef struct VkEnumerateInstanceExtensionPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *,
+                                      VkExtensionProperties *);
+    const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceExtensionPropertiesChain;
+
+typedef struct VkEnumerateInstanceLayerPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *);
+    const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceLayerPropertiesChain;
+
+typedef struct VkEnumerateInstanceVersionChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *);
+    const struct VkEnumerateInstanceVersionChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pApiVersion) const {
+        return pfnNextLayer(pNextLink, pApiVersion);
+    }
+#endif
+} VkEnumerateInstanceVersionChain;
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vk_platform.h b/host/libs/graphics_detector/include/vulkan/vk_platform.h
new file mode 100644
index 0000000..3ff8c5d
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vk_platform.h
@@ -0,0 +1,84 @@
+//
+// File: vk_platform.h
+//
+/*
+** Copyright 2014-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+
+#ifndef VK_PLATFORM_H_
+#define VK_PLATFORM_H_
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif // __cplusplus
+
+/*
+***************************************************************************************************
+*   Platform-specific directives and type declarations
+***************************************************************************************************
+*/
+
+/* Platform-specific calling convention macros.
+ *
+ * Platforms should define these so that Vulkan clients call Vulkan commands
+ * with the same calling conventions that the Vulkan implementation expects.
+ *
+ * VKAPI_ATTR - Placed before the return type in function declarations.
+ *              Useful for C++11 and GCC/Clang-style function attribute syntax.
+ * VKAPI_CALL - Placed after the return type in function declarations.
+ *              Useful for MSVC-style calling convention syntax.
+ * VKAPI_PTR  - Placed between the '(' and '*' in function pointer types.
+ *
+ * Function declaration:  VKAPI_ATTR void VKAPI_CALL vkCommand(void);
+ * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void);
+ */
+#if defined(_WIN32)
+    // On Windows, Vulkan commands use the stdcall convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL __stdcall
+    #define VKAPI_PTR  VKAPI_CALL
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7
+    #error "Vulkan is not supported for the 'armeabi' NDK ABI"
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE)
+    // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat"
+    // calling convention, i.e. float parameters are passed in registers. This
+    // is true even if the rest of the application passes floats on the stack,
+    // as it does by default when compiling for the armeabi-v7a NDK ABI.
+    #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp")))
+    #define VKAPI_CALL
+    #define VKAPI_PTR  VKAPI_ATTR
+#else
+    // On other platforms, use the default calling convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL
+    #define VKAPI_PTR
+#endif
+
+#if !defined(VK_NO_STDDEF_H)
+    #include <stddef.h>
+#endif // !defined(VK_NO_STDDEF_H)
+
+#if !defined(VK_NO_STDINT_H)
+    #if defined(_MSC_VER) && (_MSC_VER < 1600)
+        typedef signed   __int8  int8_t;
+        typedef unsigned __int8  uint8_t;
+        typedef signed   __int16 int16_t;
+        typedef unsigned __int16 uint16_t;
+        typedef signed   __int32 int32_t;
+        typedef unsigned __int32 uint32_t;
+        typedef signed   __int64 int64_t;
+        typedef unsigned __int64 uint64_t;
+    #else
+        #include <stdint.h>
+    #endif
+#endif // !defined(VK_NO_STDINT_H)
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan.h b/host/libs/graphics_detector/include/vulkan/vulkan.h
new file mode 100644
index 0000000..3510ac9
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan.h
@@ -0,0 +1,91 @@
+#ifndef VULKAN_H_
+#define VULKAN_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+#include "vk_platform.h"
+#include "vulkan_core.h"
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "vulkan_android.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+#include <zircon/types.h>
+#include "vulkan_fuchsia.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+#include "vulkan_ios.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+#include "vulkan_macos.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+#include "vulkan_metal.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_VI_NN
+#include "vulkan_vi.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+#include "vulkan_wayland.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#include <windows.h>
+#include "vulkan_win32.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+#include <xcb/xcb.h>
+#include "vulkan_xcb.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#include <X11/Xlib.h>
+#include "vulkan_xlib.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+#include <directfb.h>
+#include "vulkan_directfb.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#include <X11/Xlib.h>
+#include <X11/extensions/Xrandr.h>
+#include "vulkan_xlib_xrandr.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#include <ggp_c/vulkan_types.h>
+#include "vulkan_ggp.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_SCREEN_QNX
+#include <screen/screen.h>
+#include "vulkan_screen.h"
+#endif
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#include "vulkan_beta.h"
+#endif
+
+#endif // VULKAN_H_
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan.hpp b/host/libs/graphics_detector/include/vulkan/vulkan.hpp
new file mode 100644
index 0000000..1b506d9
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan.hpp
@@ -0,0 +1,10388 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP
+#define VULKAN_HPP
+
+#if defined( _MSVC_LANG )
+#  define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
+#else
+#  define VULKAN_HPP_CPLUSPLUS __cplusplus
+#endif
+
+#if 201703L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 20
+#elif 201402L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 17
+#elif 201103L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 14
+#elif 199711L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 11
+#else
+#  error "vulkan.hpp needs at least c++ standard version 11"
+#endif
+
+#include <algorithm>
+#include <array>   // ArrayWrapperND
+#include <string>  // std::string
+#include <vulkan/vulkan.h>
+#if 17 <= VULKAN_HPP_CPP_VERSION
+#  include <string_view>    // std::string_view
+#endif
+
+#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+#  if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+#    define VULKAN_HPP_NO_SMART_HANDLE
+#  endif
+#else
+#  include <tuple>  // std::tie
+#  include <vector> // std::vector
+#endif
+
+#if !defined( VULKAN_HPP_NO_EXCEPTIONS )
+#  include <system_error>  // std::is_error_code_enum
+#endif
+
+#if defined( VULKAN_HPP_NO_CONSTRUCTORS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_UNION_CONSTRUCTORS
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_SETTERS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+#    define VULKAN_HPP_NO_STRUCT_SETTERS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+#    define VULKAN_HPP_NO_UNION_SETTERS
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT )
+#  include <cassert>
+#  define VULKAN_HPP_ASSERT assert
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT_ON_RESULT )
+#  define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
+#endif
+
+#if !defined( VULKAN_HPP_STATIC_ASSERT )
+# define VULKAN_HPP_STATIC_ASSERT static_assert
+#endif
+
+#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
+#  define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+#    include <dlfcn.h>
+#  elif defined( _WIN32 )
+typedef struct HINSTANCE__ * HINSTANCE;
+#    if defined( _WIN64 )
+typedef int64_t( __stdcall * FARPROC )();
+#    else
+typedef int( __stdcall * FARPROC )();
+#    endif
+extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
+extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
+extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
+#  endif
+#endif
+
+#if !defined( __has_include )
+#  define __has_include( x ) false
+#endif
+
+#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
+#  define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
+#endif
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+#  include <compare>
+#endif
+
+#if ( 201803 <= __cpp_lib_span )
+#  define VULKAN_HPP_SUPPORT_SPAN
+#  include <span>
+#endif
+
+
+static_assert( VK_HEADER_VERSION ==  239, "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
+#  if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#    define VULKAN_HPP_TYPESAFE_CONVERSION
+#  endif
+#endif
+
+// <tuple> includes <sys/sysmacros.h> through some other header
+// this results in major(x) being resolved to gnu_dev_major(x)
+// which is an expression in a constructor initializer list.
+#if defined( major )
+#  undef major
+#endif
+#if defined( minor )
+#  undef minor
+#endif
+
+// Windows defines MemoryBarrier which is deprecated and collides
+// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
+#if defined( MemoryBarrier )
+#  undef MemoryBarrier
+#endif
+
+#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
+#  if defined( __clang__ )
+#    if __has_feature( cxx_unrestricted_unions )
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( __GNUC__ )
+#    define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
+#    if 40600 <= GCC_VERSION
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( _MSC_VER )
+#    if 1900 <= _MSC_VER
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_INLINE )
+#  if defined( __clang__ )
+#    if __has_attribute( always_inline )
+#      define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#    else
+#      define VULKAN_HPP_INLINE inline
+#    endif
+#  elif defined( __GNUC__ )
+#    define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#  elif defined( _MSC_VER )
+#    define VULKAN_HPP_INLINE inline
+#  else
+#    define VULKAN_HPP_INLINE inline
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+#if defined( __cpp_constexpr )
+#  define VULKAN_HPP_CONSTEXPR constexpr
+#  if __cpp_constexpr >= 201304
+#    define VULKAN_HPP_CONSTEXPR_14 constexpr
+#  else
+#    define VULKAN_HPP_CONSTEXPR_14
+#  endif
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
+#else
+#  define VULKAN_HPP_CONSTEXPR
+#  define VULKAN_HPP_CONSTEXPR_14
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR const
+#endif
+
+#if !defined( VULKAN_HPP_NOEXCEPT )
+#  if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
+#    define VULKAN_HPP_NOEXCEPT
+#  else
+#    define VULKAN_HPP_NOEXCEPT     noexcept
+#    define VULKAN_HPP_HAS_NOEXCEPT 1
+#    if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
+#    else
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+#    endif
+#  endif
+#endif
+
+#if 14 <= VULKAN_HPP_CPP_VERSION
+#  define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
+#else
+#  define VULKAN_HPP_DEPRECATED( msg )
+#endif
+
+#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
+#  define VULKAN_HPP_NODISCARD [[nodiscard]]
+#  if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
+#  else
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#  endif
+#else
+#  define VULKAN_HPP_NODISCARD
+#  define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#endif
+
+#if !defined( VULKAN_HPP_NAMESPACE )
+#  define VULKAN_HPP_NAMESPACE vk
+#endif
+
+#define VULKAN_HPP_STRINGIFY2( text ) #text
+#define VULKAN_HPP_STRINGIFY( text )  VULKAN_HPP_STRINGIFY2( text )
+#define VULKAN_HPP_NAMESPACE_STRING   VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE )
+
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  template <typename T, size_t N>
+  class ArrayWrapper1D : public std::array<T, N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>( data )
+    {}
+
+#if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
+    // on 32 bit compiles, needs overloads on index type int to resolve ambiguities
+    VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[]( index );
+    }
+
+    T & operator[]( int index ) VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[]( index );
+    }
+#endif
+
+    operator T const * () const VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    operator T * () VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string() const
+    {
+      return std::string( this->data() );
+    }
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string_view() const
+    {
+      return std::string_view( this->data() );
+    }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    std::strong_ordering operator<=>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) <=> *static_cast<std::array<char, N> const *>( &rhs );
+    }
+#else
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+#endif
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
+    }
+  };
+
+  // specialization of relational operators between std::string and arrays of chars
+  template <size_t N>
+  bool operator<( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs < rhs.data();
+  }
+
+  template <size_t N>
+  bool operator<=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs <= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs > rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs >= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator==( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs == rhs.data();
+  }
+
+  template <size_t N>
+  bool operator!=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs != rhs.data();
+  }
+
+  template <typename T, size_t N, size_t M>
+  class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T, M>, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) )
+    {}
+  };
+
+  template <typename FlagBitsType>
+  struct FlagTraits
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false;
+  };
+
+  template <typename BitType>
+  class Flags
+  {
+  public:
+    using MaskType = typename std::underlying_type<BitType>::type;
+
+    // constructors
+    VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
+      : m_mask( 0 )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT
+      : m_mask( static_cast<MaskType>( bit ) )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT
+      : m_mask( flags )
+    {}
+
+    // relational operators
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+    auto operator<=>( Flags<BitType> const & ) const = default;
+#else
+    VULKAN_HPP_CONSTEXPR bool operator<( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask < rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator<=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask <= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask > rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask >= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator==( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask == rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator!=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask != rhs.m_mask;
+    }
+#endif
+
+    // logical operator
+    VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return !m_mask;
+    }
+
+    // bitwise operators
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask & rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask | rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask ^ rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask ^ FlagTraits<BitType>::allFlags.m_mask );
+    }
+
+    // assignment operators
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask |= rhs.m_mask;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask &= rhs.m_mask;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask ^= rhs.m_mask;
+      return *this;
+    }
+
+    // cast operators
+    explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return !!m_mask;
+    }
+
+    explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask;
+    }
+
+#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC )
+  public:
+#else
+  private:
+#endif
+    MaskType m_mask;
+  };
+
+#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+  // relational operators only needed for pre C++20
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator>( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator>=( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator<( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator<=( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator==( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator!=( bit );
+  }
+#endif
+
+  // bitwise operators
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator&( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator|( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator^( bit );
+  }
+
+  // bitwise operators on BitType
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) & rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) | rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) ^ rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator~( BitType bit ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( Flags<BitType>( bit ) );
+  }
+
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+  template <typename T>
+  class ArrayProxy
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+#  if __GNUC__ >= 9
+#    pragma GCC diagnostic push
+#    pragma GCC diagnostic ignored "-Winit-list-lifetime"
+#  endif
+
+    ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {
+    }
+
+#if __GNUC__ >= 9
+#    pragma GCC diagnostic pop
+#  endif
+
+    // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
+    // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
+    template <typename V,
+              typename std::enable_if<
+                std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+                std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+    ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( v.size() ) )
+      , m_ptr( v.data() )
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T const * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t  m_count;
+    T const * m_ptr;
+  };
+
+  template <typename T>
+  class ArrayProxyNoTemporaries
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    template <typename V>
+    ArrayProxyNoTemporaries( V && value ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;
+
+    ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxyNoTemporaries( T (& ptr)[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxyNoTemporaries( T (&& ptr)[C] ) = delete;
+
+    template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type (& ptr)[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type (&& ptr)[C] ) = delete;
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list )
+      VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
+
+    // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t.
+    template <typename V,
+              typename std::enable_if<
+                std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+                std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+    ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( v.size() ) )
+      , m_ptr( v.data() )
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t m_count;
+    T *      m_ptr;
+  };
+
+  template <typename T>
+  class StridedArrayProxy : protected ArrayProxy<T>
+  {
+  public:
+    using ArrayProxy<T>::ArrayProxy;
+
+    StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT
+      : ArrayProxy<T>( count, ptr )
+      , m_stride( stride )
+    {
+      VULKAN_HPP_ASSERT( sizeof( T ) <= stride );
+    }
+
+    using ArrayProxy<T>::begin;
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + size() * m_stride );
+    }
+
+    using ArrayProxy<T>::front;
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( begin() && size() );
+      return *reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + ( size() - 1 ) * m_stride );
+    }
+
+    using ArrayProxy<T>::empty;
+    using ArrayProxy<T>::size;
+    using ArrayProxy<T>::data;
+
+    uint32_t stride() const
+    {
+      return m_stride;
+    }
+
+  private:
+    uint32_t m_stride = sizeof( T );
+  };
+
+  template <typename RefType>
+  class Optional
+  {
+  public:
+    Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = &reference;
+    }
+    Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = ptr;
+    }
+    Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = nullptr;
+    }
+
+    operator RefType *() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+    RefType const * operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return !!m_ptr;
+    }
+
+  private:
+    RefType * m_ptr;
+  };
+
+  template <typename X, typename Y>
+  struct StructExtends
+  {
+    enum
+    {
+      value = false
+    };
+  };
+
+  template <typename Type, class...>
+  struct IsPartOfStructureChain
+  {
+    static const bool valid = false;
+  };
+
+  template <typename Type, typename Head, typename... Tail>
+  struct IsPartOfStructureChain<Type, Head, Tail...>
+  {
+    static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
+  };
+
+  template <size_t Index, typename T, typename... ChainElements>
+  struct StructureChainContains
+  {
+    static const bool value =
+      std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
+      StructureChainContains<Index - 1, T, ChainElements...>::value;
+  };
+
+  template <typename T, typename... ChainElements>
+  struct StructureChainContains<0, T, ChainElements...>
+  {
+    static const bool value =
+      std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
+  };
+
+  template <size_t Index, typename... ChainElements>
+  struct StructureChainValidation
+  {
+    using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
+    static const bool valid =
+      StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+      ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
+      StructureChainValidation<Index - 1, ChainElements...>::valid;
+  };
+
+  template <typename... ChainElements>
+  struct StructureChainValidation<0, ChainElements...>
+  {
+    static const bool valid = true;
+  };
+
+  template <typename... ChainElements>
+  class StructureChain : public std::tuple<ChainElements...>
+  {
+  public:
+    StructureChain() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
+    }
+
+    StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+    }
+
+    StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
+      : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+    }
+
+    StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
+    }
+
+    StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      std::tuple<ChainElements...>::operator=( rhs );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+      return *this;
+    }
+
+    StructureChain & operator=( StructureChain && rhs ) = delete;
+
+    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
+    T & get() VULKAN_HPP_NOEXCEPT
+    {
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
+        static_cast<std::tuple<ChainElements...> &>( *this ) );
+    }
+
+    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
+    T const & get() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
+        static_cast<std::tuple<ChainElements...> const &>( *this ) );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+        ( Which == 0 ),
+      bool>::type
+      isLinked() const VULKAN_HPP_NOEXCEPT
+    {
+      return true;
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      bool>::type
+      isLinked() const VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't unlink Structure that's not part of this StructureChain!" );
+      return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      void>::type relink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't relink Structure that's not part of this StructureChain!" );
+      auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
+      VULKAN_HPP_ASSERT( !isLinked( pNext ) );
+      auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      pNext->pNext       = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext );
+      headElement.pNext  = pNext;
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      void>::type unlink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't unlink Structure that's not part of this StructureChain!" );
+      unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
+    }
+
+  private:
+    template <int Index, typename T, int Which, typename, class First, class... Types>
+    struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<!std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
+    {};
+
+    template <int Index, typename T, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             0,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : std::integral_constant<int, Index>
+    {};
+
+    bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
+    {
+      VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const *>(
+        &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
+      while ( elementPtr )
+      {
+        if ( elementPtr->pNext == pNext )
+        {
+          return true;
+        }
+        elementPtr = elementPtr->pNext;
+      }
+      return false;
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {
+      auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      x.pNext  = &std::get<Index>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      link<Index - 1>();
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {}
+
+    void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src )
+    {
+      while ( src->pNext )
+      {
+        std::ptrdiff_t offset =
+          reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase );
+        dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset );
+        dst        = dst->pNext;
+        src        = src->pNext;
+      }
+      dst->pNext = nullptr;
+    }
+
+    void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
+    {
+      VkBaseOutStructure * elementPtr =
+        reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
+      while ( elementPtr && ( elementPtr->pNext != pNext ) )
+      {
+        elementPtr = elementPtr->pNext;
+      }
+      if ( elementPtr )
+      {
+        elementPtr->pNext = pNext->pNext;
+      }
+      else
+      {
+        VULKAN_HPP_ASSERT( false );  // fires, if the ClassType member has already been unlinked !
+      }
+    }
+  };
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  template <typename Type, typename Dispatch>
+  class UniqueHandleTraits;
+
+  template <typename Type, typename Dispatch>
+  class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
+  {
+  private:
+    using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
+
+  public:
+    using element_type = Type;
+
+    UniqueHandle()
+      : Deleter()
+      , m_value()
+    {}
+
+    explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
+      : Deleter( deleter )
+      , m_value( value )
+    {}
+
+    UniqueHandle( UniqueHandle const & ) = delete;
+
+    UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+      : Deleter( std::move( static_cast<Deleter &>( other ) ) )
+      , m_value( other.release() )
+    {}
+
+    ~UniqueHandle() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value )
+      {
+        this->destroy( m_value );
+      }
+    }
+
+    UniqueHandle & operator=( UniqueHandle const & ) = delete;
+
+    UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+    {
+      reset( other.release() );
+      *static_cast<Deleter *>( this ) = std::move( static_cast<Deleter &>( other ) );
+      return *this;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value.operator bool();
+    }
+
+    Type const * operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type * operator->() VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & operator*() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    const Type & get() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & get() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value != value )
+      {
+        if ( m_value )
+        {
+          this->destroy( m_value );
+        }
+        m_value = value;
+      }
+    }
+
+    Type release() VULKAN_HPP_NOEXCEPT
+    {
+      Type value = m_value;
+      m_value    = nullptr;
+      return value;
+    }
+
+    void swap( UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap( m_value, rhs.m_value );
+      std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
+    }
+
+  private:
+    Type m_value;
+  };
+
+  template <typename UniqueType>
+  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type>
+                    uniqueToRaw( std::vector<UniqueType> const & handles )
+  {
+    std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
+    std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
+    return newBuffer;
+  }
+
+  template <typename Type, typename Dispatch>
+  VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs,
+                               UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    lhs.swap( rhs );
+  }
+#endif
+#endif  // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+  class DispatchLoaderBase
+  {
+  public:
+    DispatchLoaderBase() = default;
+    DispatchLoaderBase( std::nullptr_t )
+#if !defined( NDEBUG )
+      : m_valid( false )
+#endif
+    {}
+
+#if !defined( NDEBUG )
+    size_t getVkHeaderVersion() const
+    {
+      VULKAN_HPP_ASSERT( m_valid );
+      return vkHeaderVersion;
+    }
+
+  private:
+    size_t vkHeaderVersion = VK_HEADER_VERSION;
+    bool   m_valid         = true;
+#endif
+  };
+
+
+#if !defined( VK_NO_PROTOTYPES )
+  class DispatchLoaderStatic : public DispatchLoaderBase
+  {
+  public:
+
+  //=== VK_VERSION_1_0 ===
+
+
+    VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
+    }
+
+
+    void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyInstance( instance, pAllocator );
+    }
+
+
+    VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+    }
+
+
+    void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
+    }
+
+
+    PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetInstanceProcAddr( instance, pName );
+    }
+
+
+    PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceProcAddr( device, pName );
+    }
+
+
+    VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
+    }
+
+
+    void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDevice( device, pAllocator );
+    }
+
+
+    VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
+    }
+
+
+    VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
+    }
+
+
+    VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueWaitIdle( queue );
+    }
+
+
+    VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeviceWaitIdle( device );
+    }
+
+
+    VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo * pAllocateInfo, const VkAllocationCallbacks * pAllocator, VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
+    }
+
+
+    void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeMemory( device, memory, pAllocator );
+    }
+
+
+    VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMapMemory( device, memory, offset, size, flags, ppData );
+    }
+
+
+    void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUnmapMemory( device, memory );
+    }
+
+
+    VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+
+    VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+
+    void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
+    }
+
+
+    VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
+    }
+
+
+    VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory( device, image, memory, memoryOffset );
+    }
+
+
+    void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
+    }
+
+
+    void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t * pPropertyCount, VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
+    }
+
+
+    VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
+    }
+
+
+    void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFence( device, fence, pAllocator );
+    }
+
+
+    VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetFences( device, fenceCount, pFences );
+    }
+
+
+    VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceStatus( device, fence );
+    }
+
+
+    VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
+    }
+
+
+    VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
+    }
+
+
+    void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySemaphore( device, semaphore, pAllocator );
+    }
+
+
+    VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
+    }
+
+
+    void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyEvent( device, event, pAllocator );
+    }
+
+
+    VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetEventStatus( device, event );
+    }
+
+
+    VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetEvent( device, event );
+    }
+
+
+    VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetEvent( device, event );
+    }
+
+
+    VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
+    }
+
+
+    void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyQueryPool( device, queryPool, pAllocator );
+    }
+
+
+    VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
+    }
+
+
+    VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
+    }
+
+
+    void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBuffer( device, buffer, pAllocator );
+    }
+
+
+    VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
+    }
+
+
+    void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferView( device, bufferView, pAllocator );
+    }
+
+
+    VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkImage * pImage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
+    }
+
+
+    void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImage( device, image, pAllocator );
+    }
+
+
+    void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource * pSubresource, VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
+    }
+
+
+    VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkImageView * pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
+    }
+
+
+    void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImageView( device, imageView, pAllocator );
+    }
+
+
+    VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
+    }
+
+
+    void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
+    }
+
+
+    VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
+    }
+
+
+    void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
+    }
+
+
+    VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
+    }
+
+
+    VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+
+    VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipeline( device, pipeline, pAllocator );
+    }
+
+
+    VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
+    }
+
+
+    void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
+    }
+
+
+    VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
+    }
+
+
+    void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySampler( device, sampler, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
+    }
+
+
+    void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
+    }
+
+
+    void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
+    }
+
+
+    VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetDescriptorPool( device, descriptorPool, flags );
+    }
+
+
+    VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo * pAllocateInfo, VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
+    }
+
+
+    VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
+    }
+
+
+    void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+    }
+
+
+    VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
+    }
+
+
+    void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
+    }
+
+
+    VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyRenderPass( device, renderPass, pAllocator );
+    }
+
+
+    void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
+    }
+
+
+    VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
+    }
+
+
+    void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCommandPool( device, commandPool, pAllocator );
+    }
+
+
+    VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandPool( device, commandPool, flags );
+    }
+
+
+    VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo * pAllocateInfo, VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
+    }
+
+
+    void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
+    }
+
+
+    VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
+    }
+
+
+    VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEndCommandBuffer( commandBuffer );
+    }
+
+
+    VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandBuffer( commandBuffer, flags );
+    }
+
+
+    void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
+    }
+
+
+    void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
+    }
+
+
+    void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
+    }
+
+
+    void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    }
+
+
+    void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
+    }
+
+
+    void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
+    }
+
+
+    void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
+    }
+
+
+    void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
+    }
+
+
+    void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
+    }
+
+
+    void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
+    }
+
+
+    void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
+    }
+
+
+    void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
+    }
+
+
+    void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+    }
+
+
+    void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    }
+
+
+    void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
+    }
+
+
+    void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
+    }
+
+
+    void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit * pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+    }
+
+
+    void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
+    }
+
+
+    void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
+    }
+
+
+    void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
+    }
+
+
+    void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue * pColor, uint32_t rangeCount, const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
+    }
+
+
+    void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
+    }
+
+
+    void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment * pAttachments, uint32_t rectCount, const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
+    }
+
+
+    void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+
+    void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+
+    void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
+    }
+
+
+    void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQuery( commandBuffer, queryPool, query );
+    }
+
+
+    void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
+    }
+
+
+    void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
+    }
+
+
+    void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
+    }
+
+
+    void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
+    }
+
+
+    void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
+    }
+
+
+    void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass( commandBuffer, contents );
+    }
+
+
+    void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass( commandBuffer );
+    }
+
+
+    void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
+    }
+
+  //=== VK_VERSION_1_1 ===
+
+
+    VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceVersion( pApiVersion );
+    }
+
+
+    VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+
+    VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+
+    void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+
+    void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
+    }
+
+
+    void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+
+    void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+
+    void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+
+    void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPool( device, commandPool, flags );
+    }
+
+
+    void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
+    }
+
+
+    VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+
+    void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+
+    void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+
+    void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+
+    void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+
+    void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+
+    void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+
+    void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
+    }
+
+  //=== VK_VERSION_1_2 ===
+
+
+    void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+
+    void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+
+    void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
+    }
+
+
+    void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
+    }
+
+
+    VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
+    }
+
+
+    VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphores( device, pWaitInfo, timeout );
+    }
+
+
+    VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphore( device, pSignalInfo );
+    }
+
+
+    VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddress( device, pInfo );
+    }
+
+
+    uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
+    }
+
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
+    }
+
+  //=== VK_VERSION_1_3 ===
+
+
+    VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties );
+    }
+
+
+    VkResult vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+    }
+
+
+    void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator );
+    }
+
+
+    VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data );
+    }
+
+
+    void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData );
+    }
+
+
+    void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo );
+    }
+
+
+    void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent2( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos );
+    }
+
+
+    void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo );
+    }
+
+
+    void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query );
+    }
+
+
+    VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence );
+    }
+
+
+    void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo );
+    }
+
+
+    void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo );
+    }
+
+
+    void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo );
+    }
+
+
+    void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo );
+    }
+
+
+    void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo );
+    }
+
+
+    void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo );
+    }
+
+
+    void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo );
+    }
+
+
+    void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRendering( commandBuffer );
+    }
+
+
+    void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCullMode( commandBuffer, cullMode );
+    }
+
+
+    void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFrontFace( commandBuffer, frontFace );
+    }
+
+
+    void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology );
+    }
+
+
+    void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors );
+    }
+
+
+    void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes, const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+    }
+
+
+    void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable );
+    }
+
+
+    void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable );
+    }
+
+
+    void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp );
+    }
+
+
+    void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable );
+    }
+
+
+    void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable );
+    }
+
+
+    void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+    }
+
+
+    void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable );
+    }
+
+
+    void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable );
+    }
+
+
+    void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable );
+    }
+
+
+    void vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_KHR_surface ===
+
+
+    void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pSurfaceFormatCount, VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pPresentModeCount, VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
+    }
+
+  //=== VK_KHR_swapchain ===
+
+
+    VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
+    }
+
+
+    void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
+    }
+
+
+    VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
+    }
+
+
+    VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
+    }
+
+
+    VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueuePresentKHR( queue, pPresentInfo );
+    }
+
+
+    VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
+    }
+
+
+    VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
+    }
+
+
+    VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pRectCount, VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
+    }
+
+
+    VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
+    }
+
+  //=== VK_KHR_display ===
+
+
+    VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t * pDisplayCount, VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
+    }
+
+
+    VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
+    }
+
+
+    VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+    }
+
+
+    VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+  //=== VK_KHR_display_swapchain ===
+
+
+    VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
+    }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+    VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display * dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+    VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t * connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
+    }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+    VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display * display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
+    }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+
+    VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+    VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+
+    VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
+    }
+
+
+    void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
+    }
+
+
+    void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+    }
+
+  //=== VK_EXT_debug_marker ===
+
+
+    VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
+    }
+
+
+    VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
+    }
+
+
+    void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
+    }
+
+
+    void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerEndEXT( commandBuffer );
+    }
+
+
+    void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
+    }
+
+  //=== VK_KHR_video_queue ===
+
+
+    VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR * pVideoProfile, VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
+    }
+
+
+    VkResult vkCreateVideoSessionKHR( VkDevice device, const VkVideoSessionCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession );
+    }
+
+
+    void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator );
+    }
+
+
+    VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements );
+    }
+
+
+    VkResult vkBindVideoSessionMemoryKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos );
+    }
+
+
+    VkResult vkCreateVideoSessionParametersKHR( VkDevice device, const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters );
+    }
+
+
+    VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo );
+    }
+
+
+    void vkDestroyVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator );
+    }
+
+
+    void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo );
+    }
+
+
+    void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo );
+    }
+
+
+    void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo );
+    }
+
+  //=== VK_KHR_video_decode_queue ===
+
+
+    void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo );
+    }
+
+  //=== VK_EXT_transform_feedback ===
+
+
+    void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
+    }
+
+
+    void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+
+    void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+
+    void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
+    }
+
+
+    void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
+    }
+
+
+    void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
+    }
+
+  //=== VK_NVX_binary_import ===
+
+
+    VkResult vkCreateCuModuleNVX( VkDevice device, const VkCuModuleCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule );
+    }
+
+
+    VkResult vkCreateCuFunctionNVX( VkDevice device, const VkCuFunctionCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction );
+    }
+
+
+    void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCuModuleNVX( device, module, pAllocator );
+    }
+
+
+    void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCuFunctionNVX( device, function, pAllocator );
+    }
+
+
+    void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo );
+    }
+
+  //=== VK_NVX_image_view_handle ===
+
+
+    uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewHandleNVX( device, pInfo );
+    }
+
+
+    VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
+    }
+
+  //=== VK_AMD_draw_indirect_count ===
+
+
+    void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_AMD_shader_info ===
+
+
+    VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t * pInfoSize, void * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
+    }
+
+  //=== VK_KHR_dynamic_rendering ===
+
+
+    void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo );
+    }
+
+
+    void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderingKHR( commandBuffer );
+    }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+
+    VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+
+    VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+
+    VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+
+    void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+  //=== VK_KHR_device_group ===
+
+
+    void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+
+    void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
+    }
+
+
+    void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+
+    VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+
+
+    void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPoolKHR( device, commandPool, flags );
+    }
+
+  //=== VK_KHR_device_group_creation ===
+
+
+    VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+
+    VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+
+
+    VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+
+    VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
+    }
+
+
+    VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
+    }
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+
+    VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
+    }
+
+
+    VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+
+    VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
+    }
+
+
+    VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
+    }
+
+  //=== VK_KHR_push_descriptor ===
+
+
+    void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
+    }
+
+
+    void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
+    }
+
+  //=== VK_EXT_conditional_rendering ===
+
+
+    void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
+    }
+
+
+    void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
+    }
+
+  //=== VK_KHR_descriptor_update_template ===
+
+
+    VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+
+    void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+
+    void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+
+    void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
+    }
+
+  //=== VK_EXT_direct_mode_display ===
+
+
+    VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseDisplayEXT( physicalDevice, display );
+    }
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+
+    VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+    }
+
+
+    VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+  //=== VK_EXT_display_control ===
+
+
+    VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
+    }
+
+
+    VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT * pDeviceEventInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
+    }
+
+
+    VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT * pDisplayEventInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
+    }
+
+
+    VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
+    }
+
+  //=== VK_GOOGLE_display_timing ===
+
+
+    VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
+    }
+
+
+    VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pPresentationTimingCount, VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
+    }
+
+  //=== VK_EXT_discard_rectangles ===
+
+
+    void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
+    }
+
+  //=== VK_EXT_hdr_metadata ===
+
+
+    void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR * pSwapchains, const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
+    }
+
+  //=== VK_KHR_create_renderpass2 ===
+
+
+    VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+
+    void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+
+    void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
+    }
+
+  //=== VK_KHR_shared_presentable_image ===
+
+
+    VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainStatusKHR( device, swapchain );
+    }
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+
+    VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
+    }
+
+
+    VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+
+    VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
+    }
+
+
+    VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
+    }
+
+  //=== VK_KHR_performance_query ===
+
+
+    VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t * pCounterCount, VkPerformanceCounterKHR * pCounters, VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+    }
+
+
+    VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireProfilingLockKHR( device, pInfo );
+    }
+
+
+    void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseProfilingLockKHR( device );
+    }
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+  //=== VK_KHR_get_display_properties2 ===
+
+
+    VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
+    }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+
+    VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+
+    VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+
+    VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
+    }
+
+
+    VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
+    }
+
+
+    void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+
+    void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueEndDebugUtilsLabelEXT( queue );
+    }
+
+
+    void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+
+    void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+
+    void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
+    }
+
+
+    void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+
+    VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
+    }
+
+
+    void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
+    }
+
+
+    void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
+    }
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+
+    VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer * buffer, VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
+    }
+
+
+    VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+
+
+    void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
+    }
+
+
+    void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
+    }
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+
+    void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+    VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
+
+    void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
+    }
+
+
+    void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
+    }
+
+
+    void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkDeviceAddress * pIndirectDeviceAddresses, const uint32_t * pIndirectStrides, const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructuresIndirectKHR( commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
+    }
+
+
+    VkResult vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
+    }
+
+
+    VkResult vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, VkQueryType queryType, size_t dataSize, void * pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
+    }
+
+
+    void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+
+
+    VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
+    }
+
+
+    void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+
+    void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
+    }
+
+
+    void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, const uint32_t * pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
+    }
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+
+    VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+
+    void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
+    }
+
+  //=== VK_KHR_bind_memory2 ===
+
+
+    VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+
+    VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+
+    VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
+    }
+
+  //=== VK_EXT_validation_cache ===
+
+
+    VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
+    }
+
+
+    void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
+    }
+
+
+    VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+
+    VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
+    }
+
+  //=== VK_NV_shading_rate_image ===
+
+
+    void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
+    }
+
+
+    void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
+    }
+
+
+    void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+    }
+
+  //=== VK_NV_ray_tracing ===
+
+
+    VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
+
+    void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
+    }
+
+
+    void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+
+    VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
+    }
+
+
+    void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV * pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
+    }
+
+
+    void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
+    }
+
+
+    void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
+    }
+
+
+    VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
+    }
+
+
+    void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV * pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+
+    VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCompileDeferredNV( device, pipeline, shader );
+    }
+
+  //=== VK_KHR_maintenance3 ===
+
+
+    void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
+    }
+
+  //=== VK_KHR_draw_indirect_count ===
+
+
+    void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_EXT_external_memory_host ===
+
+
+    VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
+    }
+
+  //=== VK_AMD_buffer_marker ===
+
+
+    void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
+    }
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+    VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t * pTimeDomainCount, VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
+    }
+
+
+    VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT * pTimestampInfos, uint64_t * pTimestamps, uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
+    }
+
+  //=== VK_NV_mesh_shader ===
+
+
+    void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_NV_scissor_exclusive ===
+
+
+    void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
+    }
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+
+    void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
+    }
+
+
+    void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+  //=== VK_KHR_timeline_semaphore ===
+
+
+    VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
+    }
+
+
+    VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
+    }
+
+
+    VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphoreKHR( device, pSignalInfo );
+    }
+
+  //=== VK_INTEL_performance_query ===
+
+
+    VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
+    }
+
+
+    void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUninitializePerformanceApiINTEL( device );
+    }
+
+
+    VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+
+    VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+
+    VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
+    }
+
+
+    VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
+    }
+
+
+    VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
+    }
+
+
+    VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
+    }
+
+
+    VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
+    }
+
+  //=== VK_AMD_display_native_hdr ===
+
+
+    void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
+    }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+
+    VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+
+    VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+    VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t * pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
+    }
+
+
+    void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D * pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
+    }
+
+  //=== VK_EXT_buffer_device_address ===
+
+
+    VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressEXT( device, pInfo );
+    }
+
+  //=== VK_EXT_tooling_info ===
+
+
+    VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+    }
+
+  //=== VK_KHR_present_wait ===
+
+
+    VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout );
+    }
+
+  //=== VK_NV_cooperative_matrix ===
+
+
+    VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
+    }
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+
+    VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pPresentModeCount, VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+    }
+
+
+    VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
+    }
+
+
+    VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
+    }
+
+
+    VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+
+    VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+  //=== VK_KHR_buffer_device_address ===
+
+
+    VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressKHR( device, pInfo );
+    }
+
+
+    uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+  //=== VK_EXT_line_rasterization ===
+
+
+    void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
+    }
+
+  //=== VK_EXT_host_query_reset ===
+
+
+    void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
+    }
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+
+    void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
+    }
+
+
+    void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
+    }
+
+
+    void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
+    }
+
+
+    void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
+    }
+
+
+    void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes, const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+    }
+
+
+    void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
+    }
+
+
+    void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
+    }
+
+
+    void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
+    }
+
+
+    void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
+    }
+
+
+    void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
+    }
+
+
+    void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+    }
+
+  //=== VK_KHR_deferred_host_operations ===
+
+
+    VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks * pAllocator, VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
+    }
+
+
+    void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
+    }
+
+
+    uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
+    }
+
+
+    VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationResultKHR( device, operation );
+    }
+
+
+    VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeferredOperationJoinKHR( device, operation );
+    }
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+
+    VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR * pPipelineInfo, uint32_t * pExecutableCount, VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
+    }
+
+
+    VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pStatisticCount, VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
+    }
+
+
+    VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
+    }
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+
+    VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo );
+    }
+
+  //=== VK_NV_device_generated_commands ===
+
+
+    void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
+    }
+
+
+    void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
+    }
+
+
+    void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
+    }
+
+
+    VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
+    }
+
+
+    void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
+    }
+
+  //=== VK_EXT_acquire_drm_display ===
+
+
+    VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display );
+    }
+
+
+    VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display );
+    }
+
+  //=== VK_EXT_private_data ===
+
+
+    VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+    }
+
+
+    void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
+    }
+
+
+    VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
+    }
+
+
+    void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
+    }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+
+    void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+
+    void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+
+
+    void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
+    }
+
+
+    void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
+    }
+
+
+    void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
+    }
+
+
+    void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
+    }
+
+
+    VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
+    }
+
+
+    void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
+    }
+
+
+    void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+  //=== VK_EXT_descriptor_buffer ===
+
+
+    void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes );
+    }
+
+
+    void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset );
+    }
+
+
+    void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor );
+    }
+
+
+    void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos );
+    }
+
+
+    void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t * pBufferIndices, const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets );
+    }
+
+
+    void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set );
+    }
+
+
+    VkResult vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+
+    void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps );
+    }
+
+  //=== VK_EXT_mesh_shader ===
+
+
+    void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_KHR_copy_commands2 ===
+
+
+    void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
+    }
+
+
+    void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
+    }
+
+
+    void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
+    }
+
+
+    void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
+    }
+
+
+    void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
+    }
+
+
+    void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
+    }
+
+  //=== VK_EXT_image_compression_control ===
+
+
+    void vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, const VkImageSubresource2EXT * pSubresource, VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
+    }
+
+  //=== VK_EXT_device_fault ===
+
+
+    VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+
+    VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireWinrtDisplayNV( physicalDevice, display );
+    }
+
+
+    VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+    VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
+    }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+    void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
+    }
+
+
+    VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress );
+    }
+
+
+    VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader );
+    }
+
+
+    void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize );
+    }
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+
+    void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetVertexInputEXT( commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
+    }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+
+    VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
+    }
+
+
+    VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+
+    VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo );
+    }
+
+
+    VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+
+    VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection );
+    }
+
+
+    VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo );
+    }
+
+
+    VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo );
+    }
+
+
+    void vkDestroyBufferCollectionFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator );
+    }
+
+
+    VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+
+    VkResult vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize );
+    }
+
+
+    void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSubpassShadingHUAWEI( commandBuffer );
+    }
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+
+    void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout );
+    }
+
+  //=== VK_NV_external_memory_rdma ===
+
+
+    VkResult vkGetMemoryRemoteAddressNV( VkDevice device, const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress );
+    }
+
+  //=== VK_EXT_pipeline_properties ===
+
+
+    VkResult vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties );
+    }
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+
+    void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints );
+    }
+
+
+    void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable );
+    }
+
+
+    void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable );
+    }
+
+
+    void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp );
+    }
+
+
+    void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable );
+    }
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+    VkResult vkCreateScreenSurfaceQNX( VkInstance instance, const VkScreenSurfaceCreateInfoQNX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window );
+    }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+
+
+    void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables );
+    }
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+
+    void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress );
+    }
+
+  //=== VK_EXT_multi_draw ===
+
+
+    void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT * pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride );
+    }
+
+
+    void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT * pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset );
+    }
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+    VkResult vkCreateMicromapEXT( VkDevice device, const VkMicromapCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap );
+    }
+
+
+    void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyMicromapEXT( device, micromap, pAllocator );
+    }
+
+
+    void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos );
+    }
+
+
+    VkResult vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos );
+    }
+
+
+    VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMicromapEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMicromapToMemoryEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, VkQueryType queryType, size_t dataSize, void * pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride );
+    }
+
+
+    void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery );
+    }
+
+
+    void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, const VkMicromapVersionInfoEXT * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility );
+    }
+
+
+    void vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT * pBuildInfo, VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo );
+    }
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+
+    void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset );
+    }
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+
+    void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority );
+    }
+
+  //=== VK_KHR_maintenance4 ===
+
+
+    void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+
+    void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, const VkDescriptorSetBindingReferenceVALVE * pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping );
+    }
+
+
+    void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData );
+    }
+
+  //=== VK_NV_copy_memory_indirect ===
+
+
+    void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride );
+    }
+
+
+    void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources );
+    }
+
+  //=== VK_NV_memory_decompression ===
+
+
+    void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions );
+    }
+
+
+    void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride );
+    }
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+
+    void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin );
+    }
+
+
+    void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable );
+    }
+
+
+    void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode );
+    }
+
+
+    void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples );
+    }
+
+
+    void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask );
+    }
+
+
+    void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable );
+    }
+
+
+    void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable );
+    }
+
+
+    void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable );
+    }
+
+
+    void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables );
+    }
+
+
+    void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations );
+    }
+
+
+    void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks );
+    }
+
+
+    void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream );
+    }
+
+
+    void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode );
+    }
+
+
+    void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize );
+    }
+
+
+    void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable );
+    }
+
+
+    void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable );
+    }
+
+
+    void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced );
+    }
+
+
+    void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode );
+    }
+
+
+    void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode );
+    }
+
+
+    void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable );
+    }
+
+
+    void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne );
+    }
+
+
+    void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable );
+    }
+
+
+    void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles );
+    }
+
+
+    void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable );
+    }
+
+
+    void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation );
+    }
+
+
+    void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode );
+    }
+
+
+    void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable );
+    }
+
+
+    void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+    }
+
+
+    void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable );
+    }
+
+
+    void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable );
+    }
+
+
+    void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode );
+    }
+
+  //=== VK_EXT_shader_module_identifier ===
+
+
+    void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier );
+    }
+
+
+    void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier );
+    }
+
+  //=== VK_NV_optical_flow ===
+
+
+    VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, uint32_t * pFormatCount, VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties );
+    }
+
+
+    VkResult vkCreateOpticalFlowSessionNV( VkDevice device, const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession );
+    }
+
+
+    void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator );
+    }
+
+
+    VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout );
+    }
+
+
+    void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
+    }
+
+  //=== VK_QCOM_tile_properties ===
+
+
+    VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, VkFramebuffer framebuffer, uint32_t * pPropertiesCount, VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties );
+    }
+
+
+    VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, const VkRenderingInfo * pRenderingInfo, VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties );
+    }
+
+  };
+#endif
+
+  class DispatchLoaderDynamic;
+#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC)
+# if defined( VK_NO_PROTOTYPES )
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+# else
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
+# endif
+#endif
+
+#if !defined( VULKAN_HPP_STORAGE_API )
+#  if defined( VULKAN_HPP_STORAGE_SHARED )
+#    if defined( _MSC_VER )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __declspec( dllexport )
+#      else
+#        define VULKAN_HPP_STORAGE_API __declspec( dllimport )
+#      endif
+#    elif defined( __clang__ ) || defined( __GNUC__ )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) )
+#      else
+#        define VULKAN_HPP_STORAGE_API
+#      endif
+#    else
+#      define VULKAN_HPP_STORAGE_API
+#      pragma warning Unknown import / export semantics
+#    endif
+#  else
+#    define VULKAN_HPP_STORAGE_API
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE                     \
+      namespace VULKAN_HPP_NAMESPACE                                               \
+      {                                                                            \
+        VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \
+      }
+  extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+#  else
+  static inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic()
+  {
+    static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls;
+    return dls;
+  }
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic()
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
+#  else
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
+#else
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT         = {}
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT       = VULKAN_HPP_DEFAULT_DISPATCHER
+#endif
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  struct AllocationCallbacks;
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectDestroy
+  {
+  public:
+    ObjectDestroy() = default;
+
+    ObjectDestroy( OwnerType owner,
+                   Optional<const AllocationCallbacks> allocationCallbacks
+                                             VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                   Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+  protected:
+    template <typename T>
+    void destroy(T t) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  class NoParent;
+
+  template <typename Dispatch>
+  class ObjectDestroy<NoParent, Dispatch>
+  {
+  public:
+    ObjectDestroy() = default;
+
+    ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
+                   Dispatch const & dispatch           VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+  protected:
+    template <typename T>
+    void destroy(T t) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_dispatch );
+      t.destroy( m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectFree
+  {
+  public:
+    ObjectFree() = default;
+
+    ObjectFree( OwnerType                                               owner,
+                Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
+
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_allocationCallbacks;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectRelease
+  {
+  public:
+    ObjectRelease() = default;
+
+    ObjectRelease( OwnerType                 owner,
+                   Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.release( t, *m_dispatch );
+    }
+
+  private:
+    OwnerType        m_owner    = {};
+    Dispatch const * m_dispatch = nullptr;
+  };
+
+  template <typename OwnerType, typename PoolType, typename Dispatch>
+  class PoolFree
+  {
+    public:
+      PoolFree() = default;
+
+    PoolFree( OwnerType                 owner,
+              PoolType                  pool,
+              Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+        : m_owner( owner )
+        , m_pool( pool )
+        , m_dispatch( &dispatch )
+      {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        ( m_owner.free )( m_pool, t, *m_dispatch );
+      }
+
+    private:
+      OwnerType        m_owner    = OwnerType();
+      PoolType         m_pool     = PoolType();
+      Dispatch const * m_dispatch = nullptr;
+  };
+
+#endif // !VULKAN_HPP_NO_SMART_HANDLE
+
+  //==================
+  //=== BASE TYPEs ===
+  //==================
+
+  using Bool32 = uint32_t;
+  using DeviceAddress = uint64_t;
+  using DeviceSize = uint64_t;
+  using RemoteAddressNV = void *;
+  using SampleMask = uint32_t;
+
+
+} // namespace VULKAN_HPP_NAMESPACE
+
+#include <vulkan/vulkan_enums.hpp>
+#if !defined( VULKAN_HPP_NO_TO_STRING )
+#include <vulkan/vulkan_to_string.hpp>
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+namespace std
+{
+  template <>
+  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+  {};
+}  // namespace std
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  class ErrorCategoryImpl : public std::error_category
+  {
+    public:
+    virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
+    virtual std::string message(int ev) const override
+    {
+#if defined( VULKAN_HPP_NO_TO_STRING )
+      return std::to_string( ev );
+#else
+      return VULKAN_HPP_NAMESPACE::to_string(static_cast<VULKAN_HPP_NAMESPACE::Result>(ev));
+#endif
+    }
+  };
+
+  class Error
+  {
+    public:
+    Error() VULKAN_HPP_NOEXCEPT = default;
+    Error(const Error&) VULKAN_HPP_NOEXCEPT = default;
+    virtual ~Error() VULKAN_HPP_NOEXCEPT = default;
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0;
+  };
+
+  class LogicError : public Error, public std::logic_error
+  {
+    public:
+    explicit LogicError( const std::string& what )
+      : Error(), std::logic_error(what) {}
+    explicit LogicError( char const * what )
+      : Error(), std::logic_error(what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); }
+  };
+
+  class SystemError : public Error, public std::system_error
+  {
+    public:
+    SystemError( std::error_code ec )
+      : Error(), std::system_error(ec) {}
+    SystemError( std::error_code ec, std::string const & what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( std::error_code ec, char const * what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( int ev, std::error_category const & ecat )
+      : Error(), std::system_error(ev, ecat) {}
+    SystemError( int ev, std::error_category const & ecat, std::string const & what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    SystemError( int ev, std::error_category const & ecat, char const * what)
+      : Error(), std::system_error(ev, ecat, what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); }
+  };
+
+  VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT
+  {
+    static ErrorCategoryImpl instance;
+    return instance;
+  }
+
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_code(static_cast<int>(e), errorCategory());
+  }
+
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_condition(static_cast<int>(e), errorCategory());
+  }
+
+
+  class OutOfHostMemoryError : public SystemError
+  {
+  public:
+    OutOfHostMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+    OutOfHostMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+  };
+
+  class OutOfDeviceMemoryError : public SystemError
+  {
+  public:
+    OutOfDeviceMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+    OutOfDeviceMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+  };
+
+  class InitializationFailedError : public SystemError
+  {
+  public:
+    InitializationFailedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+    InitializationFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+  };
+
+  class DeviceLostError : public SystemError
+  {
+  public:
+    DeviceLostError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+    DeviceLostError( char const * message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+  };
+
+  class MemoryMapFailedError : public SystemError
+  {
+  public:
+    MemoryMapFailedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+    MemoryMapFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+  };
+
+  class LayerNotPresentError : public SystemError
+  {
+  public:
+    LayerNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+    LayerNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+  };
+
+  class ExtensionNotPresentError : public SystemError
+  {
+  public:
+    ExtensionNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+    ExtensionNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+  };
+
+  class FeatureNotPresentError : public SystemError
+  {
+  public:
+    FeatureNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+    FeatureNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+  };
+
+  class IncompatibleDriverError : public SystemError
+  {
+  public:
+    IncompatibleDriverError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+    IncompatibleDriverError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+  };
+
+  class TooManyObjectsError : public SystemError
+  {
+  public:
+    TooManyObjectsError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+    TooManyObjectsError( char const * message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+  };
+
+  class FormatNotSupportedError : public SystemError
+  {
+  public:
+    FormatNotSupportedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+    FormatNotSupportedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+  };
+
+  class FragmentedPoolError : public SystemError
+  {
+  public:
+    FragmentedPoolError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+    FragmentedPoolError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+  };
+
+  class UnknownError : public SystemError
+  {
+  public:
+    UnknownError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+    UnknownError( char const * message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+  };
+
+  class OutOfPoolMemoryError : public SystemError
+  {
+  public:
+    OutOfPoolMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+    OutOfPoolMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+  };
+
+  class InvalidExternalHandleError : public SystemError
+  {
+  public:
+    InvalidExternalHandleError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+    InvalidExternalHandleError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+  };
+
+  class FragmentationError : public SystemError
+  {
+  public:
+    FragmentationError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+    FragmentationError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+  };
+
+  class InvalidOpaqueCaptureAddressError : public SystemError
+  {
+  public:
+    InvalidOpaqueCaptureAddressError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+    InvalidOpaqueCaptureAddressError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+  };
+
+  class SurfaceLostKHRError : public SystemError
+  {
+  public:
+    SurfaceLostKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+    SurfaceLostKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+  };
+
+  class NativeWindowInUseKHRError : public SystemError
+  {
+  public:
+    NativeWindowInUseKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+    NativeWindowInUseKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+  };
+
+  class OutOfDateKHRError : public SystemError
+  {
+  public:
+    OutOfDateKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+    OutOfDateKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+  };
+
+  class IncompatibleDisplayKHRError : public SystemError
+  {
+  public:
+    IncompatibleDisplayKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+    IncompatibleDisplayKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+  };
+
+  class ValidationFailedEXTError : public SystemError
+  {
+  public:
+    ValidationFailedEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+    ValidationFailedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+  };
+
+  class InvalidShaderNVError : public SystemError
+  {
+  public:
+    InvalidShaderNVError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+    InvalidShaderNVError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+  };
+
+  class ImageUsageNotSupportedKHRError : public SystemError
+  {
+  public:
+    ImageUsageNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
+    ImageUsageNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
+  };
+
+  class VideoPictureLayoutNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoPictureLayoutNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) {}
+    VideoPictureLayoutNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileOperationNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileOperationNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) {}
+    VideoProfileOperationNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileFormatNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileFormatNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) {}
+    VideoProfileFormatNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileCodecNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileCodecNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {}
+    VideoProfileCodecNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {}
+  };
+
+  class VideoStdVersionNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoStdVersionNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {}
+    VideoStdVersionNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {}
+  };
+
+  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
+  {
+  public:
+    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+  };
+
+  class NotPermittedKHRError : public SystemError
+  {
+  public:
+    NotPermittedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
+    NotPermittedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  class FullScreenExclusiveModeLostEXTError : public SystemError
+  {
+  public:
+    FullScreenExclusiveModeLostEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+    FullScreenExclusiveModeLostEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  class CompressionExhaustedEXTError : public SystemError
+  {
+  public:
+    CompressionExhaustedEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+    CompressionExhaustedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+  };
+
+
+  namespace
+  {
+    [[noreturn]] void throwResultException( Result result, char const * message )
+    {
+      switch ( result )
+      {
+      case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
+      case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
+      case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
+      case Result::eErrorDeviceLost: throw DeviceLostError( message );
+      case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
+      case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
+      case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
+      case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
+      case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
+      case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
+      case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
+      case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
+      case Result::eErrorUnknown: throw UnknownError( message );
+      case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
+      case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
+      case Result::eErrorFragmentation: throw FragmentationError( message );
+      case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
+      case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
+      case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
+      case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
+      case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
+      case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
+      case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
+      case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message );
+      case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message );
+      case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message );
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
+      case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message );
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
+        default: throw SystemError( make_error_code( result ) );
+      }
+    }
+  }
+#endif
+
+  template <typename T> void ignore(T const &) VULKAN_HPP_NOEXCEPT {}
+
+  template <typename T>
+  struct ResultValue
+  {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v)))
+#else
+    ResultValue( Result r, T & v )
+#endif
+      : result( r )
+      , value( v )
+    {}
+
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v))))
+#else
+    ResultValue( Result r, T && v )
+#endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
+
+    Result  result;
+    T       value;
+
+    operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
+  };
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  template <typename Type, typename Dispatch>
+  struct ResultValue<UniqueHandle<Type, Dispatch>>
+  {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v) VULKAN_HPP_NOEXCEPT
+#else
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v)
+#endif
+      : result(r)
+      , value(std::move(v))
+    {}
+
+    std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
+    {
+      return std::make_tuple( result, std::move( value ) );
+    }
+
+    Result                        result;
+    UniqueHandle<Type, Dispatch>  value;
+  };
+
+  template <typename Type, typename Dispatch>
+  struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
+  {
+#  ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
+#  else
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
+#  endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
+
+    std::tuple<Result, std::vector<UniqueHandle<Type, Dispatch>>> asTuple()
+    {
+      return std::make_tuple( result, std::move( value ) );
+    }
+
+    Result                                    result;
+    std::vector<UniqueHandle<Type, Dispatch>> value;
+  };
+#endif
+
+  template <typename T>
+  struct ResultValueType
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef ResultValue<T>  type;
+#else
+    typedef T               type;
+#endif
+  };
+
+  template <>
+  struct ResultValueType<void>
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef Result type;
+#else
+    typedef void   type;
+#endif
+  };
+
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type createResultValueType( Result result )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return result;
+#else
+    ignore( result );
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T & data )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return ResultValue<T>( result, data );
+#else
+    ignore( result );
+    return data;
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T && data )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return ResultValue<T>( result, std::move( data ) );
+#else
+    ignore( result );
+    return std::move( data );
+#endif
+  }
+
+  VULKAN_HPP_INLINE void resultCheck( Result result, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( result );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    ignore( message );
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+
+  VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( result );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    ignore( message );
+    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+} // namespace VULKAN_HPP_NAMESPACE
+
+// clang-format off
+#include <vulkan/vulkan_handles.hpp>
+#include <vulkan/vulkan_structs.hpp>
+#include <vulkan/vulkan_funcs.hpp>
+// clang-format on
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+
+  //=======================
+  //=== STRUCTS EXTENDS ===
+  //=======================
+
+
+  //=== VK_VERSION_1_0 ===
+  template <> struct StructExtends<ShaderModuleCreateInfo, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_1 ===
+  template <> struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_2 ===
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_3 ===
+  template <> struct StructExtends<PhysicalDeviceVulkan13Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan13Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan13Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, RayTracingPipelineCreateInfoNV>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DevicePrivateDataCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryBarrier2, SubpassDependency2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfo, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<WriteDescriptorSetInlineUniformBlock, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfo, DescriptorPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRenderingCreateInfo, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDynamicRenderingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDynamicRenderingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceRenderingInfo, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<FormatProperties3, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_swapchain ===
+  template <> struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_display_swapchain ===
+  template <> struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_EXT_debug_report ===
+  template <> struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_rasterization_order ===
+  template <> struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_video_queue ===
+  template <> struct StructExtends<QueueFamilyQueryResultStatusPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyVideoPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceVideoFormatInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, BufferCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_video_decode_queue ===
+  template <> struct StructExtends<VideoDecodeCapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeUsageInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeUsageInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_dedicated_allocation ===
+  template <> struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_transform_feedback ===
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+  template <> struct StructExtends<VideoEncodeH264CapabilitiesEXT, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264VclFrameInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264ProfileInfoEXT, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264ProfileInfoEXT, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlLayerInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+  template <> struct StructExtends<VideoEncodeH265CapabilitiesEXT, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265VclFrameInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265ProfileInfoEXT, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265ProfileInfoEXT, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+  template <> struct StructExtends<VideoDecodeH264ProfileInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264ProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264CapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264SessionParametersCreateInfoKHR, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264SessionParametersAddInfoKHR, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264PictureInfoKHR, VideoDecodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264DpbSlotInfoKHR, VideoReferenceSlotInfoKHR>{ enum { value = true }; };
+
+  //=== VK_AMD_texture_gather_bias_lod ===
+  template <> struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_dynamic_rendering ===
+  template <> struct StructExtends<RenderingFragmentShadingRateAttachmentInfoKHR, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderingFragmentDensityMapAttachmentInfoEXT, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentSampleCountInfoAMD, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentSampleCountInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, RenderingInfo>{ enum { value = true }; };
+
+  //=== VK_NV_corner_sampled_image ===
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_external_memory ===
+  template <> struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_win32_keyed_mutex ===
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_validation_flags ===
+  template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_astc_decode_mode ===
+  template <> struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_robustness ===
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+  template <> struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_keyed_mutex ===
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo2>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+  template <> struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_push_descriptor ===
+  template <> struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_conditional_rendering ===
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_incremental_present ===
+  template <> struct StructExtends<PresentRegionsKHR, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NV_clip_space_w_scaling ===
+  template <> struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_display_control ===
+  template <> struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_GOOGLE_display_timing ===
+  template <> struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NVX_multiview_per_view_attributes ===
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_viewport_swizzle ===
+  template <> struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_discard_rectangles ===
+  template <> struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_conservative_rasterization ===
+  template <> struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clip_enable ===
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_shared_presentable_image ===
+  template <> struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+  template <> struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_performance_query ===
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo2>{ enum { value = true }; };
+
+  //=== VK_EXT_debug_utils ===
+  template <> struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DebugUtilsObjectNameInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+  template <> struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
+  template <> struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AndroidHardwareBufferFormatProperties2ANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_blend_operation_advanced ===
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_fragment_coverage_to_color ===
+  template <> struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_acceleration_structure ===
+  template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructurePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+  template <> struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_shader_sm_builtins ===
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_drm_format_modifier ===
+  template <> struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DrmFormatModifierPropertiesList2EXT, FormatProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_validation_cache ===
+  template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_portability_subset ===
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_shading_rate_image ===
+  template <> struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing ===
+  template <> struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_representative_fragment_test ===
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_filter_cubic ===
+  template <> struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_external_memory_host ===
+  template <> struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_shader_clock ===
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_pipeline_compiler_control ===
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_core_properties ===
+  template <> struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_video_decode_h265 ===
+  template <> struct StructExtends<VideoDecodeH265ProfileInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265ProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265CapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265SessionParametersCreateInfoKHR, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265SessionParametersAddInfoKHR, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265PictureInfoKHR, VideoDecodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265DpbSlotInfoKHR, VideoReferenceSlotInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_global_priority ===
+  template <> struct StructExtends<DeviceQueueGlobalPriorityCreateInfoKHR, DeviceQueueCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyGlobalPriorityPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+  template <> struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_vertex_attribute_divisor ===
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_frame_token ===
+  template <> struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_compute_shader_derivatives ===
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_mesh_shader ===
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_shader_image_footprint ===
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_scissor_exclusive ===
+  template <> struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+  template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_INTEL_shader_integer_functions2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_INTEL_performance_query ===
+  template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pci_bus_info ===
+  template <> struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_display_native_hdr ===
+  template <> struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_density_map ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+
+  //=== VK_KHR_fragment_shading_rate ===
+  template <> struct StructExtends<FragmentShadingRateAttachmentInfoKHR, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineFragmentShadingRateStateCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRatePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_core_properties2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_device_coherent_memory ===
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_image_atomic_int64 ===
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_memory_budget ===
+  template <> struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_memory_priority ===
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_surface_protected_capabilities ===
+  template <> struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+  //=== VK_NV_dedicated_allocation_image_aliasing ===
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_buffer_device_address ===
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_validation_features ===
+  template <> struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_present_wait ===
+  template <> struct StructExtends<PhysicalDevicePresentWaitFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentWaitFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_cooperative_matrix ===
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_coverage_reduction_mode ===
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_shader_interlock ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_ycbcr_image_arrays ===
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_provoking_vertex ===
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationProvokingVertexStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_line_rasterization ===
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_atomic_float ===
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_index_type_uint8 ===
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_pipeline_executable_properties ===
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_atomic_float2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_surface_maintenance1 ===
+  template <> struct StructExtends<SurfacePresentModeEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfacePresentScalingCapabilitiesEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfacePresentModeCompatibilityEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+  template <> struct StructExtends<PhysicalDeviceSwapchainMaintenance1FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSwapchainMaintenance1FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentFenceInfoEXT, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentModesCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentModeInfoEXT, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentScalingCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NV_device_generated_commands ===
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_inherited_viewport_scissor ===
+  template <> struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceViewportScissorInfoNV, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_texel_buffer_alignment ===
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_render_pass_transform ===
+  template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_device_memory_report ===
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDeviceMemoryReportCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_robustness2 ===
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_custom_border_color ===
+  template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_pipeline_library ===
+  template <> struct StructExtends<PipelineLibraryCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_present_barrier ===
+  template <> struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceCapabilitiesPresentBarrierNV, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentBarrierCreateInfoNV, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_present_id ===
+  template <> struct StructExtends<PresentIdKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentIdFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentIdFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+  template <> struct StructExtends<VideoEncodeCapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeUsageInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeUsageInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeRateControlInfoKHR, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeRateControlLayerInfoKHR, VideoCodingControlInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, BufferViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, EventCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalDeviceInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalCommandQueueInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalBufferInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalBufferInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalTextureInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalTextureInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalIOSurfaceInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalIOSurfaceInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalSharedEventInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalSharedEventInfoEXT, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalSharedEventInfoEXT, EventCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+  template <> struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_descriptor_buffer ===
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorBufferBindingPushDescriptorBufferHandleEXT, DescriptorBufferBindingInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoNV>{ enum { value = true }; };
+
+  //=== VK_EXT_graphics_pipeline_library ===
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<GraphicsPipelineLibraryCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_early_and_late_fragment_tests ===
+  template <> struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_fragment_shader_barycentric ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_shader_subgroup_uniform_control_flow ===
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineFragmentShadingRateEnumStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+  template <> struct StructExtends<AccelerationStructureGeometryMotionTrianglesDataNV, AccelerationStructureGeometryTrianglesDataKHR>{ enum { value = true }; };
+  template <> struct StructExtends<AccelerationStructureMotionInfoNV, AccelerationStructureCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMotionBlurFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMotionBlurFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_mesh_shader ===
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_ycbcr_2plane_444_formats ===
+  template <> struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_density_map2 ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_QCOM_rotated_copy_commands ===
+  template <> struct StructExtends<CopyCommandTransformInfoQCOM, BufferImageCopy2>{ enum { value = true }; };
+  template <> struct StructExtends<CopyCommandTransformInfoQCOM, ImageBlit2>{ enum { value = true }; };
+
+  //=== VK_KHR_workgroup_memory_explicit_layout ===
+  template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_compression_control ===
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, SurfaceFormat2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2EXT>{ enum { value = true }; };
+
+  //=== VK_EXT_attachment_feedback_loop_layout ===
+  template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_4444_formats ===
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_device_fault ===
+  template <> struct StructExtends<PhysicalDeviceFaultFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFaultFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_rgba10x6_formats ===
+  template <> struct StructExtends<PhysicalDeviceRGBA10X6FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRGBA10X6FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelinePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_query ===
+  template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+  template <> struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_physical_device_drm ===
+  template <> struct StructExtends<PhysicalDeviceDrmPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_device_address_binding_report ===
+  template <> struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceAddressBindingCallbackDataEXT, DebugUtilsMessengerCallbackDataEXT>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clip_control ===
+  template <> struct StructExtends<PhysicalDeviceDepthClipControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportDepthClipControlCreateInfoEXT, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_primitive_topology_list_restart ===
+  template <> struct StructExtends<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+  template <> struct StructExtends<ImportMemoryZirconHandleInfoFUCHSIA, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  template <> struct StructExtends<ImportMemoryBufferCollectionFUCHSIA, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferCollectionImageCreateInfoFUCHSIA, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferCollectionBufferCreateInfoFUCHSIA, BufferCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+  template <> struct StructExtends<SubpassShadingPipelineCreateInfoHUAWEI, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingFeaturesHUAWEI, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingFeaturesHUAWEI, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingPropertiesHUAWEI, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_HUAWEI_invocation_mask ===
+  template <> struct StructExtends<PhysicalDeviceInvocationMaskFeaturesHUAWEI, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInvocationMaskFeaturesHUAWEI, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_external_memory_rdma ===
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryRDMAFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryRDMAFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_properties ===
+  template <> struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_multisampled_render_to_single_sampled ===
+  template <> struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassResolvePerformanceQueryEXT, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, RenderingInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_color_write_enable ===
+  template <> struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineColorWriteCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_primitives_generated_query ===
+  template <> struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_view_min_lod ===
+  template <> struct StructExtends<PhysicalDeviceImageViewMinLodFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageViewMinLodFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewMinLodCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_multi_draw ===
+  template <> struct StructExtends<PhysicalDeviceMultiDrawFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiDrawFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiDrawPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_image_2d_view_of_3d ===
+  template <> struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_opacity_micromap ===
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<AccelerationStructureTrianglesOpacityMicromapEXT, AccelerationStructureGeometryTrianglesDataKHR>{ enum { value = true }; };
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+  template <> struct StructExtends<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceClusterCullingShaderPropertiesHUAWEI, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_border_color_swizzle ===
+  template <> struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerBorderColorComponentMappingCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pageable_device_local_memory ===
+  template <> struct StructExtends<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+  template <> struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clamp_zero_one ===
+  template <> struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_non_seamless_cube_map ===
+  template <> struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_fragment_density_map_offset ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassFragmentDensityMapOffsetEndInfoQCOM, SubpassEndInfo>{ enum { value = true }; };
+
+  //=== VK_NV_copy_memory_indirect ===
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_memory_decompression ===
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_linear_color_attachment ===
+  template <> struct StructExtends<PhysicalDeviceLinearColorAttachmentFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLinearColorAttachmentFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_compression_control_swapchain ===
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_image_processing ===
+  template <> struct StructExtends<ImageViewSampleWeightCreateInfoQCOM, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingPropertiesQCOM, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_subpass_merge_feedback ===
+  template <> struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationControlEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationControlEXT, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationFeedbackCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassSubpassFeedbackCreateInfoEXT, SubpassDescription2>{ enum { value = true }; };
+
+  //=== VK_LUNARG_direct_driver_loading ===
+  template <> struct StructExtends<DirectDriverLoadingListLUNARG, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_module_identifier ===
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineShaderStageModuleIdentifierCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+  template <> struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_optical_flow ===
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowImageFormatInfoNV, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowImageFormatInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowSessionCreatePrivateDataInfoNV, OpticalFlowSessionCreateInfoNV>{ enum { value = true }; };
+
+  //=== VK_EXT_legacy_dithering ===
+  template <> struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_protected_access ===
+  template <> struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_tile_properties ===
+  template <> struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_SEC_amigo_profiling ===
+  template <> struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AmigoProfilingSubmitInfoSEC, SubmitInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_multiview_per_view_viewports ===
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_mutable_descriptor_type ===
+  template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_ARM_shader_core_builtins ===
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsPropertiesARM, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+  class DynamicLoader
+  {
+  public:
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT
+#  else
+    DynamicLoader( std::string const & vulkanLibraryName = {} )
+#  endif
+    {
+      if ( !vulkanLibraryName.empty() )
+      {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
+#  else
+#    error unsupported platform
+#  endif
+      }
+      else
+      {
+#  if defined( __unix__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
+        if ( m_library == nullptr )
+        {
+          m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
+        }
+#  elif defined( __APPLE__ )
+        m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( "vulkan-1.dll" );
+#  else
+#    error unsupported platform
+#  endif
+      }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+      if ( m_library == nullptr )
+      {
+        // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
+        throw std::runtime_error( "Failed to load vulkan library!" );
+      }
+#endif
+    }
+
+    DynamicLoader( DynamicLoader const & ) = delete;
+
+    DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library(other.m_library)
+    {
+      other.m_library = nullptr;
+    }
+
+    DynamicLoader &operator=( DynamicLoader const & ) = delete;
+
+    DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap(m_library, other.m_library);
+      return *this;
+    }
+
+    ~DynamicLoader() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_library )
+      {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        dlclose( m_library );
+#  elif defined( _WIN32 )
+        ::FreeLibrary( m_library );
+#  else
+#    error unsupported platform
+#  endif
+      }
+    }
+
+    template <typename T>
+    T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
+    {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+      return (T)dlsym( m_library, function );
+#  elif defined( _WIN32 )
+      return (T)::GetProcAddress( m_library, function );
+#  else
+#    error unsupported platform
+#  endif
+    }
+
+    bool success() const VULKAN_HPP_NOEXCEPT { return m_library != nullptr; }
+
+  private:
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+    void * m_library;
+#  elif defined( _WIN32 )
+    ::HINSTANCE m_library;
+#  else
+#    error unsupported platform
+#  endif
+  };
+#endif
+
+
+  using PFN_dummy = void ( * )();
+
+  class DispatchLoaderDynamic : public DispatchLoaderBase
+  {
+  public:
+
+  //=== VK_VERSION_1_0 ===
+    PFN_vkCreateInstance vkCreateInstance = 0;
+    PFN_vkDestroyInstance vkDestroyInstance = 0;
+    PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
+    PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
+    PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+    PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+    PFN_vkCreateDevice vkCreateDevice = 0;
+    PFN_vkDestroyDevice vkDestroyDevice = 0;
+    PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+    PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+    PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+    PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+    PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
+    PFN_vkQueueSubmit vkQueueSubmit = 0;
+    PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+    PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
+    PFN_vkAllocateMemory vkAllocateMemory = 0;
+    PFN_vkFreeMemory vkFreeMemory = 0;
+    PFN_vkMapMemory vkMapMemory = 0;
+    PFN_vkUnmapMemory vkUnmapMemory = 0;
+    PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
+    PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+    PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+    PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+    PFN_vkBindImageMemory vkBindImageMemory = 0;
+    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
+    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
+    PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
+    PFN_vkQueueBindSparse vkQueueBindSparse = 0;
+    PFN_vkCreateFence vkCreateFence = 0;
+    PFN_vkDestroyFence vkDestroyFence = 0;
+    PFN_vkResetFences vkResetFences = 0;
+    PFN_vkGetFenceStatus vkGetFenceStatus = 0;
+    PFN_vkWaitForFences vkWaitForFences = 0;
+    PFN_vkCreateSemaphore vkCreateSemaphore = 0;
+    PFN_vkDestroySemaphore vkDestroySemaphore = 0;
+    PFN_vkCreateEvent vkCreateEvent = 0;
+    PFN_vkDestroyEvent vkDestroyEvent = 0;
+    PFN_vkGetEventStatus vkGetEventStatus = 0;
+    PFN_vkSetEvent vkSetEvent = 0;
+    PFN_vkResetEvent vkResetEvent = 0;
+    PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+    PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
+    PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+    PFN_vkCreateBuffer vkCreateBuffer = 0;
+    PFN_vkDestroyBuffer vkDestroyBuffer = 0;
+    PFN_vkCreateBufferView vkCreateBufferView = 0;
+    PFN_vkDestroyBufferView vkDestroyBufferView = 0;
+    PFN_vkCreateImage vkCreateImage = 0;
+    PFN_vkDestroyImage vkDestroyImage = 0;
+    PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+    PFN_vkCreateImageView vkCreateImageView = 0;
+    PFN_vkDestroyImageView vkDestroyImageView = 0;
+    PFN_vkCreateShaderModule vkCreateShaderModule = 0;
+    PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+    PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
+    PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
+    PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+    PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+    PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+    PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+    PFN_vkDestroyPipeline vkDestroyPipeline = 0;
+    PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+    PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+    PFN_vkCreateSampler vkCreateSampler = 0;
+    PFN_vkDestroySampler vkDestroySampler = 0;
+    PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
+    PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
+    PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
+    PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
+    PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+    PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+    PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
+    PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+    PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
+    PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
+    PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+    PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
+    PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+    PFN_vkCreateCommandPool vkCreateCommandPool = 0;
+    PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+    PFN_vkResetCommandPool vkResetCommandPool = 0;
+    PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+    PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
+    PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+    PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+    PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+    PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+    PFN_vkCmdSetViewport vkCmdSetViewport = 0;
+    PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+    PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+    PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
+    PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
+    PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
+    PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+    PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
+    PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+    PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
+    PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
+    PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+    PFN_vkCmdDraw vkCmdDraw = 0;
+    PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
+    PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
+    PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+    PFN_vkCmdDispatch vkCmdDispatch = 0;
+    PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
+    PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+    PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+    PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+    PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+    PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+    PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
+    PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
+    PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
+    PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+    PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
+    PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+    PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+    PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+    PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+    PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+    PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
+    PFN_vkCmdEndQuery vkCmdEndQuery = 0;
+    PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
+    PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+    PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
+    PFN_vkCmdPushConstants vkCmdPushConstants = 0;
+    PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+    PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+    PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+    PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+
+  //=== VK_VERSION_1_1 ===
+    PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+    PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+    PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+    PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+    PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
+    PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+    PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
+    PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+    PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
+    PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+    PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+    PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+    PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
+    PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
+    PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+    PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
+    PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
+    PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+
+  //=== VK_VERSION_1_2 ===
+    PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
+    PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
+    PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
+    PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
+    PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
+    PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
+    PFN_vkResetQueryPool vkResetQueryPool = 0;
+    PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
+    PFN_vkWaitSemaphores vkWaitSemaphores = 0;
+    PFN_vkSignalSemaphore vkSignalSemaphore = 0;
+    PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
+    PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
+
+  //=== VK_VERSION_1_3 ===
+    PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0;
+    PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0;
+    PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0;
+    PFN_vkSetPrivateData vkSetPrivateData = 0;
+    PFN_vkGetPrivateData vkGetPrivateData = 0;
+    PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0;
+    PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0;
+    PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0;
+    PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0;
+    PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0;
+    PFN_vkQueueSubmit2 vkQueueSubmit2 = 0;
+    PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0;
+    PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0;
+    PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0;
+    PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0;
+    PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0;
+    PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0;
+    PFN_vkCmdBeginRendering vkCmdBeginRendering = 0;
+    PFN_vkCmdEndRendering vkCmdEndRendering = 0;
+    PFN_vkCmdSetCullMode vkCmdSetCullMode = 0;
+    PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0;
+    PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0;
+    PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0;
+    PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0;
+    PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0;
+    PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0;
+    PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0;
+    PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0;
+    PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0;
+    PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0;
+    PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0;
+    PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0;
+    PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0;
+    PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0;
+    PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0;
+    PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0;
+    PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0;
+
+  //=== VK_KHR_surface ===
+    PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
+
+  //=== VK_KHR_swapchain ===
+    PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
+    PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
+    PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+    PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+    PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
+    PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+
+  //=== VK_KHR_display ===
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
+    PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+    PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+    PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
+
+  //=== VK_KHR_display_swapchain ===
+    PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+    PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+    PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+    PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+    PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#else
+    PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+    PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+    PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+    PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+    PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+
+  //=== VK_EXT_debug_marker ===
+    PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+    PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
+    PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
+    PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
+    PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
+
+  //=== VK_KHR_video_queue ===
+    PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
+    PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
+    PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
+    PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
+    PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
+    PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
+    PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
+    PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
+    PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
+    PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
+    PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
+    PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
+
+  //=== VK_KHR_video_decode_queue ===
+    PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
+
+  //=== VK_EXT_transform_feedback ===
+    PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
+    PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
+    PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
+    PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
+    PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
+    PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
+
+  //=== VK_NVX_binary_import ===
+    PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0;
+    PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0;
+    PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0;
+    PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0;
+    PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0;
+
+  //=== VK_NVX_image_view_handle ===
+    PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
+    PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
+
+  //=== VK_AMD_draw_indirect_count ===
+    PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
+    PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
+
+  //=== VK_AMD_shader_info ===
+    PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+
+  //=== VK_KHR_dynamic_rendering ===
+    PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0;
+    PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+    PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#else
+    PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+    PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
+#else
+    PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+
+  //=== VK_KHR_device_group ===
+    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+    PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+    PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+    PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#else
+    PFN_dummy vkCreateViSurfaceNN_placeholder = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+    PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+
+  //=== VK_KHR_device_group_creation ===
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+
+  //=== VK_KHR_external_memory_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+    PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
+    PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
+#else
+    PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+    PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
+    PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+    PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+    PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#else
+    PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+    PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+    PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+
+  //=== VK_KHR_push_descriptor ===
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+    PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
+
+  //=== VK_EXT_conditional_rendering ===
+    PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
+    PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
+
+  //=== VK_KHR_descriptor_update_template ===
+    PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+    PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+    PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+
+  //=== VK_NV_clip_space_w_scaling ===
+    PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+
+  //=== VK_EXT_direct_mode_display ===
+    PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+    PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+    PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
+#else
+    PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0;
+    PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
+
+  //=== VK_EXT_display_control ===
+    PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+    PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+    PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+    PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+
+  //=== VK_GOOGLE_display_timing ===
+    PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+    PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+
+  //=== VK_EXT_discard_rectangles ===
+    PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
+
+  //=== VK_EXT_hdr_metadata ===
+    PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+
+  //=== VK_KHR_create_renderpass2 ===
+    PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+    PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+    PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+    PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+
+  //=== VK_KHR_shared_presentable_image ===
+    PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+
+  //=== VK_KHR_external_fence_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+    PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+    PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
+#else
+    PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+    PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+    PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
+
+  //=== VK_KHR_performance_query ===
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
+
+  //=== VK_KHR_get_display_properties2 ===
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
+    PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
+    PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+    PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#else
+    PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+    PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#else
+    PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+    PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+    PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+    PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
+    PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
+    PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
+    PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
+    PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
+    PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+    PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+    PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+    PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
+#else
+    PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0;
+    PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+    PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
+
+  //=== VK_KHR_get_memory_requirements2 ===
+    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+    PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+
+  //=== VK_KHR_acceleration_structure ===
+    PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
+    PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
+    PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
+    PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
+    PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
+    PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
+    PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
+    PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
+    PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
+    PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
+    PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
+    PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
+    PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
+    PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
+    PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+    PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+
+  //=== VK_KHR_bind_memory2 ===
+    PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+    PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+    PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
+
+  //=== VK_EXT_validation_cache ===
+    PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+    PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
+    PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
+    PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+
+  //=== VK_NV_shading_rate_image ===
+    PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
+    PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
+    PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+
+  //=== VK_NV_ray_tracing ===
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
+    PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
+    PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
+    PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+    PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
+    PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+    PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+    PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+
+  //=== VK_KHR_maintenance3 ===
+    PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
+
+  //=== VK_KHR_draw_indirect_count ===
+    PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+    PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+
+  //=== VK_EXT_external_memory_host ===
+    PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
+
+  //=== VK_AMD_buffer_marker ===
+    PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
+
+  //=== VK_EXT_calibrated_timestamps ===
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
+    PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
+
+  //=== VK_NV_mesh_shader ===
+    PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
+    PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
+    PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+
+  //=== VK_NV_scissor_exclusive ===
+    PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+    PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
+    PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
+
+  //=== VK_KHR_timeline_semaphore ===
+    PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+    PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+    PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+
+  //=== VK_INTEL_performance_query ===
+    PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
+    PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
+    PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
+    PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+    PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+    PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
+    PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
+
+  //=== VK_AMD_display_native_hdr ===
+    PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+    PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
+#else
+    PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+    PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#else
+    PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+    PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
+    PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
+
+  //=== VK_EXT_buffer_device_address ===
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+
+  //=== VK_EXT_tooling_info ===
+    PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
+
+  //=== VK_KHR_present_wait ===
+    PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0;
+
+  //=== VK_NV_cooperative_matrix ===
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+
+  //=== VK_NV_coverage_reduction_mode ===
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
+    PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+    PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
+#else
+    PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0;
+    PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0;
+    PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0;
+    PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+    PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
+
+  //=== VK_KHR_buffer_device_address ===
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+    PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+
+  //=== VK_EXT_line_rasterization ===
+    PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
+
+  //=== VK_EXT_host_query_reset ===
+    PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state ===
+    PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
+    PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
+    PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
+    PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
+    PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
+    PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
+    PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
+    PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
+    PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
+    PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
+    PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
+    PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
+
+  //=== VK_KHR_deferred_host_operations ===
+    PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
+    PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
+    PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
+    PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
+    PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+    PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+    PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+    PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0;
+
+  //=== VK_NV_device_generated_commands ===
+    PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
+    PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
+    PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
+    PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
+    PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
+    PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
+
+  //=== VK_EXT_acquire_drm_display ===
+    PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0;
+    PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0;
+
+  //=== VK_EXT_private_data ===
+    PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
+    PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
+    PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
+    PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+    PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
+#else
+    PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+    PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
+#else
+    PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+    PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
+    PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
+    PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
+    PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
+    PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
+    PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
+    PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
+    PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
+
+  //=== VK_EXT_descriptor_buffer ===
+    PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0;
+    PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0;
+    PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0;
+    PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0;
+    PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0;
+    PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0;
+    PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+    PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
+
+  //=== VK_EXT_mesh_shader ===
+    PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
+    PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
+    PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
+
+  //=== VK_KHR_copy_commands2 ===
+    PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
+    PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
+    PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
+    PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
+    PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
+    PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
+
+  //=== VK_EXT_image_compression_control ===
+    PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
+  //=== VK_EXT_device_fault ===
+    PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+    PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
+    PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
+#else
+    PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0;
+    PFN_dummy vkGetWinrtDisplayNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+    PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
+    PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
+#else
+    PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+    PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
+    PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
+    PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
+    PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
+    PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
+    PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
+    PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+    PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+    PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
+    PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
+#else
+    PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+    PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
+    PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
+#else
+    PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+    PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0;
+    PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0;
+    PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0;
+    PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0;
+    PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0;
+#else
+    PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0;
+    PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0;
+    PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0;
+    PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+    PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0;
+    PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0;
+
+  //=== VK_HUAWEI_invocation_mask ===
+    PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0;
+
+  //=== VK_NV_external_memory_rdma ===
+    PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
+
+  //=== VK_EXT_pipeline_properties ===
+    PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+    PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
+    PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
+    PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0;
+    PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
+    PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+    PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
+    PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
+#else
+    PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+    PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+    PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
+
+  //=== VK_EXT_multi_draw ===
+    PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
+    PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+
+  //=== VK_EXT_opacity_micromap ===
+    PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+    PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+    PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+    PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+    PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+    PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+    PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+    PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+    PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+    PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+    PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+    PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+    PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+    PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+    PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0;
+    PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0;
+
+  //=== VK_EXT_pageable_device_local_memory ===
+    PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
+
+  //=== VK_KHR_maintenance4 ===
+    PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0;
+    PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
+    PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+    PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
+    PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
+
+  //=== VK_NV_copy_memory_indirect ===
+    PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0;
+    PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0;
+
+  //=== VK_NV_memory_decompression ===
+    PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
+    PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+    PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+    PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+    PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+    PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+    PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+    PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+    PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+    PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+    PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+    PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+    PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+    PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+    PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+    PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+    PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+    PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+    PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+    PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+    PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+    PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+    PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+    PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+    PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+    PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+    PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+    PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+    PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+    PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+    PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+    PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+    PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
+  //=== VK_EXT_shader_module_identifier ===
+    PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
+    PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+
+  //=== VK_NV_optical_flow ===
+    PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+    PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+    PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+    PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+    PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
+  //=== VK_QCOM_tile_properties ===
+    PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
+    PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
+
+
+  public:
+    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
+    DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
+    {
+      init(getInstanceProcAddr);
+    }
+
+    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(getInstanceProcAddr);
+
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+
+
+  //=== VK_VERSION_1_0 ===
+      vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
+      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
+      vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
+
+    }
+
+    // This interface does not require a linked vulkan library.
+    DispatchLoaderDynamic( VkInstance                instance,
+                           PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+                           VkDevice                  device            = {},
+                           PFN_vkGetDeviceProcAddr   getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
+    }
+
+    // This interface does not require a linked vulkan library.
+    void init( VkInstance                instance,
+               PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+               VkDevice                  device              = {},
+               PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      init( VULKAN_HPP_NAMESPACE::Instance(instance) );
+      if (device) {
+        init( VULKAN_HPP_NAMESPACE::Device(device) );
+      }
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkInstance instance = static_cast<VkInstance>(instanceCpp);
+
+
+  //=== VK_VERSION_1_0 ===
+      vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+      vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+      vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+      vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
+      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+      vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
+      vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+      vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+      vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+      vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+
+  //=== VK_VERSION_1_2 ===
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+
+  //=== VK_VERSION_1_3 ===
+      vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
+      vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) );
+      vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) );
+      vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) );
+      vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) );
+      vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) );
+      vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) );
+      vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) );
+      vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) );
+      vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) );
+      vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) );
+      vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) );
+      vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) );
+      vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) );
+      vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) );
+      vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) );
+      vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) );
+      vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) );
+      vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) );
+      vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) );
+      vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) );
+      vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) );
+      vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) );
+      vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) );
+      vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) );
+      vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) );
+      vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) );
+      vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) );
+      vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) );
+      vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) );
+      vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) );
+      vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) );
+      vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) );
+      vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) );
+      vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) );
+      vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) );
+      vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) );
+
+  //=== VK_KHR_surface ===
+      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
+      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+
+  //=== VK_KHR_swapchain ===
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+
+  //=== VK_KHR_display ===
+      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+      vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+      vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+      vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+      vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+
+  //=== VK_KHR_display_swapchain ===
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+      vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
+      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+      vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+      vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+      vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+      vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+      vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+      vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+
+  //=== VK_EXT_debug_marker ===
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
+
+  //=== VK_KHR_video_queue ===
+      vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
+      vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) );
+      vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) );
+      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+      vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) );
+      vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) );
+      vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) );
+      vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) );
+      vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) );
+      vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) );
+      vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) );
+
+  //=== VK_KHR_video_decode_queue ===
+      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) );
+
+  //=== VK_EXT_transform_feedback ===
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+
+  //=== VK_NVX_binary_import ===
+      vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) );
+      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) );
+      vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) );
+      vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) );
+      vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) );
+
+  //=== VK_NVX_image_view_handle ===
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
+
+  //=== VK_AMD_draw_indirect_count ===
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+
+  //=== VK_AMD_shader_info ===
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
+
+  //=== VK_KHR_dynamic_rendering ===
+      vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) );
+      if ( !vkCmdBeginRendering ) vkCmdBeginRendering = vkCmdBeginRenderingKHR;
+      vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) );
+      if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+      vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+      vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+      if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
+      vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
+      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
+      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
+      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
+      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+  //=== VK_KHR_device_group ===
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+
+  //=== VK_KHR_device_group_creation ===
+      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+      if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
+
+  //=== VK_KHR_external_memory_capabilities ===
+      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
+
+  //=== VK_KHR_push_descriptor ===
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+
+  //=== VK_EXT_conditional_rendering ===
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
+
+  //=== VK_KHR_descriptor_update_template ===
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+
+  //=== VK_EXT_direct_mode_display ===
+      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+      vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+      vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+
+  //=== VK_EXT_display_control ===
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
+
+  //=== VK_GOOGLE_display_timing ===
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
+
+  //=== VK_EXT_discard_rectangles ===
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
+
+  //=== VK_EXT_hdr_metadata ===
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
+
+  //=== VK_KHR_create_renderpass2 ===
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+
+  //=== VK_KHR_shared_presentable_image ===
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
+
+  //=== VK_KHR_external_fence_capabilities ===
+      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
+
+  //=== VK_KHR_performance_query ===
+      vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+      vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+
+  //=== VK_KHR_get_display_properties2 ===
+      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+      vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+      vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+      vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+      vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+      vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
+      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+
+  //=== VK_KHR_get_memory_requirements2 ===
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+
+  //=== VK_KHR_acceleration_structure ===
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
+      vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
+      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+      vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+
+  //=== VK_KHR_bind_memory2 ===
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+
+  //=== VK_EXT_validation_cache ===
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
+
+  //=== VK_NV_shading_rate_image ===
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+
+  //=== VK_NV_ray_tracing ===
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
+
+  //=== VK_KHR_maintenance3 ===
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+
+  //=== VK_KHR_draw_indirect_count ===
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+
+  //=== VK_EXT_external_memory_host ===
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
+
+  //=== VK_AMD_buffer_marker ===
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
+
+  //=== VK_EXT_calibrated_timestamps ===
+      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
+
+  //=== VK_NV_mesh_shader ===
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+  //=== VK_NV_scissor_exclusive ===
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+
+  //=== VK_KHR_timeline_semaphore ===
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+
+  //=== VK_INTEL_performance_query ===
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
+
+  //=== VK_AMD_display_native_hdr ===
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+      vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+      vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+      vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
+
+  //=== VK_EXT_buffer_device_address ===
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+
+  //=== VK_EXT_tooling_info ===
+      vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+      if ( !vkGetPhysicalDeviceToolProperties ) vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
+
+  //=== VK_KHR_present_wait ===
+      vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) );
+
+  //=== VK_NV_cooperative_matrix ===
+      vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+
+  //=== VK_NV_coverage_reduction_mode ===
+      vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+      vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+
+  //=== VK_KHR_buffer_device_address ===
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+
+  //=== VK_EXT_line_rasterization ===
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
+
+  //=== VK_EXT_host_query_reset ===
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
+      if ( !vkCmdSetCullMode ) vkCmdSetCullMode = vkCmdSetCullModeEXT;
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
+      if ( !vkCmdSetFrontFace ) vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
+      if ( !vkCmdSetPrimitiveTopology ) vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
+      if ( !vkCmdSetViewportWithCount ) vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
+      if ( !vkCmdSetScissorWithCount ) vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
+      if ( !vkCmdBindVertexBuffers2 ) vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
+      if ( !vkCmdSetDepthTestEnable ) vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
+      if ( !vkCmdSetDepthWriteEnable ) vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
+      if ( !vkCmdSetDepthCompareOp ) vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      if ( !vkCmdSetDepthBoundsTestEnable ) vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
+      if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
+      if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
+
+  //=== VK_KHR_deferred_host_operations ===
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+      vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) );
+
+  //=== VK_NV_device_generated_commands ===
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
+
+  //=== VK_EXT_acquire_drm_display ===
+      vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
+      vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
+
+  //=== VK_EXT_private_data ===
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
+      if ( !vkCreatePrivateDataSlot ) vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
+      if ( !vkDestroyPrivateDataSlot ) vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
+      if ( !vkSetPrivateData ) vkSetPrivateData = vkSetPrivateDataEXT;
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
+      if ( !vkGetPrivateData ) vkGetPrivateData = vkGetPrivateDataEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+      vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
+      if ( !vkCmdSetEvent2 ) vkCmdSetEvent2 = vkCmdSetEvent2KHR;
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) );
+      if ( !vkCmdResetEvent2 ) vkCmdResetEvent2 = vkCmdResetEvent2KHR;
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
+      if ( !vkCmdWaitEvents2 ) vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
+      if ( !vkCmdPipelineBarrier2 ) vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
+      if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
+      if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR;
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
+
+  //=== VK_EXT_descriptor_buffer ===
+      vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) );
+      vkGetDescriptorSetLayoutBindingOffsetEXT = PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
+      vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) );
+      vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) );
+      vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
+      vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
+      vkGetBufferOpaqueCaptureDescriptorDataEXT = PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageViewOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetSamplerOpaqueCaptureDescriptorDataEXT = PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+      vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+  //=== VK_EXT_mesh_shader ===
+      vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) );
+      vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) );
+      vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+  //=== VK_KHR_copy_commands2 ===
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
+      if ( !vkCmdCopyBuffer2 ) vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
+      if ( !vkCmdCopyImage2 ) vkCmdCopyImage2 = vkCmdCopyImage2KHR;
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
+      if ( !vkCmdCopyBufferToImage2 ) vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
+      if ( !vkCmdCopyImageToBuffer2 ) vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
+      if ( !vkCmdBlitImage2 ) vkCmdBlitImage2 = vkCmdBlitImage2KHR;
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
+      if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+
+  //=== VK_EXT_image_compression_control ===
+      vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+
+  //=== VK_EXT_device_fault ===
+      vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+      vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
+      vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+      vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+      vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
+      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+      vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+      vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) );
+      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+      vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+      vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) );
+      vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+      vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+      vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) );
+      vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+      vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+      vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) );
+
+  //=== VK_HUAWEI_invocation_mask ===
+      vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) );
+
+  //=== VK_NV_external_memory_rdma ===
+      vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) );
+
+  //=== VK_EXT_pipeline_properties ===
+      vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) );
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+      vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) );
+      vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+      if ( !vkCmdSetRasterizerDiscardEnable ) vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
+      vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) );
+      if ( !vkCmdSetDepthBiasEnable ) vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
+      vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) );
+      vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+      if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+      vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
+      vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+      vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+      vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) );
+
+  //=== VK_EXT_multi_draw ===
+      vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) );
+      vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) );
+
+  //=== VK_EXT_opacity_micromap ===
+      vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) );
+      vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) );
+      vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) );
+      vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) );
+      vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) );
+      vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) );
+      vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) );
+      vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) );
+      vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) );
+      vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) );
+      vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) );
+      vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) );
+      vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) );
+      vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) );
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+      vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) );
+      vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+      vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) );
+
+  //=== VK_KHR_maintenance4 ===
+      vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceBufferMemoryRequirements ) vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
+      vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageMemoryRequirements ) vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
+      vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageSparseMemoryRequirements ) vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+      vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+      vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+  //=== VK_NV_copy_memory_indirect ===
+      vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) );
+      vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) );
+
+  //=== VK_NV_memory_decompression ===
+      vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) );
+      vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) );
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+      vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) );
+      vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) );
+      vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) );
+      vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) );
+      vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) );
+      vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+      vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) );
+      vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) );
+      vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) );
+      vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) );
+      vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) );
+      vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) );
+      vkCmdSetConservativeRasterizationModeEXT = PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) );
+      vkCmdSetExtraPrimitiveOverestimationSizeEXT = PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+      vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) );
+      vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) );
+      vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) );
+      vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) );
+      vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) );
+      vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) );
+      vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+      vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) );
+      vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) );
+      vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) );
+      vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) );
+      vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) );
+      vkCmdSetCoverageModulationTableEnableNV = PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) );
+      vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) );
+      vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) );
+      vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+      vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) );
+
+  //=== VK_EXT_shader_module_identifier ===
+      vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) );
+      vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+  //=== VK_NV_optical_flow ===
+      vkGetPhysicalDeviceOpticalFlowImageFormatsNV = PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+      vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) );
+      vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) );
+      vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
+      vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
+
+  //=== VK_QCOM_tile_properties ===
+      vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
+      vkGetDynamicRenderingTilePropertiesQCOM = PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkDevice device = static_cast<VkDevice>(deviceCpp);
+
+
+  //=== VK_VERSION_1_0 ===
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+      vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+
+  //=== VK_VERSION_1_2 ===
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+
+  //=== VK_VERSION_1_3 ===
+      vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
+      vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
+      vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
+      vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
+      vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
+      vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
+      vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
+      vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
+      vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
+      vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
+      vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
+      vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
+      vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
+      vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
+      vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
+      vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
+      vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
+      vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
+      vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
+      vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
+      vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
+      vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
+      vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
+      vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
+      vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
+      vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
+      vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
+      vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
+      vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
+      vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
+      vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
+      vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
+      vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
+      vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
+      vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
+      vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
+
+  //=== VK_KHR_swapchain ===
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+
+  //=== VK_KHR_display_swapchain ===
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+
+  //=== VK_EXT_debug_marker ===
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+
+  //=== VK_KHR_video_queue ===
+      vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
+      vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
+      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+      vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
+      vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
+      vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
+      vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
+      vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
+      vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
+      vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
+
+  //=== VK_KHR_video_decode_queue ===
+      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
+
+  //=== VK_EXT_transform_feedback ===
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+
+  //=== VK_NVX_binary_import ===
+      vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
+      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
+      vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
+      vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
+      vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
+
+  //=== VK_NVX_image_view_handle ===
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+
+  //=== VK_AMD_draw_indirect_count ===
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+
+  //=== VK_AMD_shader_info ===
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
+
+  //=== VK_KHR_dynamic_rendering ===
+      vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) );
+      if ( !vkCmdBeginRendering ) vkCmdBeginRendering = vkCmdBeginRenderingKHR;
+      vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) );
+      if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_device_group ===
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+
+  //=== VK_KHR_maintenance1 ===
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
+
+  //=== VK_KHR_push_descriptor ===
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+
+  //=== VK_EXT_conditional_rendering ===
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+
+  //=== VK_KHR_descriptor_update_template ===
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+
+  //=== VK_EXT_display_control ===
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+
+  //=== VK_GOOGLE_display_timing ===
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+
+  //=== VK_EXT_discard_rectangles ===
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+
+  //=== VK_EXT_hdr_metadata ===
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
+
+  //=== VK_KHR_create_renderpass2 ===
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+
+  //=== VK_KHR_shared_presentable_image ===
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
+
+  //=== VK_KHR_performance_query ===
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+
+  //=== VK_EXT_debug_utils ===
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+
+  //=== VK_KHR_get_memory_requirements2 ===
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+
+  //=== VK_KHR_acceleration_structure ===
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+      vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
+      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+      vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+
+  //=== VK_KHR_bind_memory2 ===
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+
+  //=== VK_EXT_validation_cache ===
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+
+  //=== VK_NV_shading_rate_image ===
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+
+  //=== VK_NV_ray_tracing ===
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+
+  //=== VK_KHR_maintenance3 ===
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+
+  //=== VK_KHR_draw_indirect_count ===
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+
+  //=== VK_EXT_external_memory_host ===
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+
+  //=== VK_AMD_buffer_marker ===
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+
+  //=== VK_EXT_calibrated_timestamps ===
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+
+  //=== VK_NV_mesh_shader ===
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+  //=== VK_NV_scissor_exclusive ===
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+
+  //=== VK_KHR_timeline_semaphore ===
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+
+  //=== VK_INTEL_performance_query ===
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+
+  //=== VK_AMD_display_native_hdr ===
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+
+  //=== VK_KHR_fragment_shading_rate ===
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+
+  //=== VK_EXT_buffer_device_address ===
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+
+  //=== VK_KHR_present_wait ===
+      vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_buffer_device_address ===
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+
+  //=== VK_EXT_line_rasterization ===
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+
+  //=== VK_EXT_host_query_reset ===
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
+      if ( !vkCmdSetCullMode ) vkCmdSetCullMode = vkCmdSetCullModeEXT;
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
+      if ( !vkCmdSetFrontFace ) vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+      if ( !vkCmdSetPrimitiveTopology ) vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+      if ( !vkCmdSetViewportWithCount ) vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+      if ( !vkCmdSetScissorWithCount ) vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+      if ( !vkCmdBindVertexBuffers2 ) vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+      if ( !vkCmdSetDepthTestEnable ) vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+      if ( !vkCmdSetDepthWriteEnable ) vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+      if ( !vkCmdSetDepthCompareOp ) vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      if ( !vkCmdSetDepthBoundsTestEnable ) vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+      if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
+      if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
+
+  //=== VK_KHR_deferred_host_operations ===
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+      vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) );
+
+  //=== VK_NV_device_generated_commands ===
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+
+  //=== VK_EXT_private_data ===
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+      if ( !vkCreatePrivateDataSlot ) vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+      if ( !vkDestroyPrivateDataSlot ) vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
+      if ( !vkSetPrivateData ) vkSetPrivateData = vkSetPrivateDataEXT;
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
+      if ( !vkGetPrivateData ) vkGetPrivateData = vkGetPrivateDataEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+      vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
+      if ( !vkCmdSetEvent2 ) vkCmdSetEvent2 = vkCmdSetEvent2KHR;
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
+      if ( !vkCmdResetEvent2 ) vkCmdResetEvent2 = vkCmdResetEvent2KHR;
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
+      if ( !vkCmdWaitEvents2 ) vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
+      if ( !vkCmdPipelineBarrier2 ) vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
+      if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
+      if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR;
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
+
+  //=== VK_EXT_descriptor_buffer ===
+      vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) );
+      vkGetDescriptorSetLayoutBindingOffsetEXT = PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
+      vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) );
+      vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) );
+      vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
+      vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
+      vkGetBufferOpaqueCaptureDescriptorDataEXT = PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageViewOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetSamplerOpaqueCaptureDescriptorDataEXT = PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+      vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+  //=== VK_EXT_mesh_shader ===
+      vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
+      vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
+      vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+  //=== VK_KHR_copy_commands2 ===
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
+      if ( !vkCmdCopyBuffer2 ) vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
+      if ( !vkCmdCopyImage2 ) vkCmdCopyImage2 = vkCmdCopyImage2KHR;
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+      if ( !vkCmdCopyBufferToImage2 ) vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+      if ( !vkCmdCopyImageToBuffer2 ) vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
+      if ( !vkCmdBlitImage2 ) vkCmdBlitImage2 = vkCmdBlitImage2KHR;
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
+      if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+
+  //=== VK_EXT_image_compression_control ===
+      vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
+  //=== VK_EXT_device_fault ===
+      vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+      vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+      vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
+      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+      vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+      vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
+      vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+      vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+      vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
+      vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+      vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+      vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
+
+  //=== VK_HUAWEI_invocation_mask ===
+      vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
+
+  //=== VK_NV_external_memory_rdma ===
+      vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+
+  //=== VK_EXT_pipeline_properties ===
+      vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+      vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
+      vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+      if ( !vkCmdSetRasterizerDiscardEnable ) vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
+      vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
+      if ( !vkCmdSetDepthBiasEnable ) vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
+      vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
+      vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+      if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+
+  //=== VK_EXT_color_write_enable ===
+      vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+      vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
+
+  //=== VK_EXT_multi_draw ===
+      vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
+      vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+
+  //=== VK_EXT_opacity_micromap ===
+      vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+      vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+      vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+      vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+      vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+      vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+      vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+      vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+      vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+      vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+      vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+      vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+      vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+      vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+      vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) );
+      vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+      vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+
+  //=== VK_KHR_maintenance4 ===
+      vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceBufferMemoryRequirements ) vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
+      vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageMemoryRequirements ) vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
+      vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageSparseMemoryRequirements ) vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+      vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+      vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+  //=== VK_NV_copy_memory_indirect ===
+      vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) );
+      vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) );
+
+  //=== VK_NV_memory_decompression ===
+      vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
+      vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+      vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+      vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+      vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+      vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+      vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+      vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+      vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+      vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+      vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+      vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+      vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+      vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+      vkCmdSetConservativeRasterizationModeEXT = PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+      vkCmdSetExtraPrimitiveOverestimationSizeEXT = PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+      vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+      vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+      vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+      vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+      vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+      vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+      vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+      vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+      vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+      vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+      vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+      vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+      vkCmdSetCoverageModulationTableEnableNV = PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+      vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+      vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+      vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+      vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
+  //=== VK_EXT_shader_module_identifier ===
+      vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
+      vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+  //=== VK_NV_optical_flow ===
+      vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+      vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+      vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+      vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
+  //=== VK_QCOM_tile_properties ===
+      vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
+      vkGetDynamicRenderingTilePropertiesQCOM = PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+    }
+
+    template <typename DynamicLoader>
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl) VULKAN_HPP_NOEXCEPT
+    {
+      PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+      PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+      init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
+    }
+
+    template <typename DynamicLoader
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      = VULKAN_HPP_NAMESPACE::DynamicLoader
+#endif
+    >
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device) VULKAN_HPP_NOEXCEPT
+    {
+      static DynamicLoader dl;
+      init(instance, device, dl);
+    }
+  };
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_android.h b/host/libs/graphics_detector/include/vulkan/vulkan_android.h
new file mode 100644
index 0000000..11f5397
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_android.h
@@ -0,0 +1,125 @@
+#ifndef VULKAN_ANDROID_H_
+#define VULKAN_ANDROID_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_android_surface 1
+struct ANativeWindow;
+#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6
+#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface"
+typedef VkFlags VkAndroidSurfaceCreateFlagsKHR;
+typedef struct VkAndroidSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAndroidSurfaceCreateFlagsKHR    flags;
+    struct ANativeWindow*             window;
+} VkAndroidSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_ANDROID_external_memory_android_hardware_buffer 1
+struct AHardwareBuffer;
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer"
+typedef struct VkAndroidHardwareBufferUsageANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           androidHardwareBufferUsage;
+} VkAndroidHardwareBufferUsageANDROID;
+
+typedef struct VkAndroidHardwareBufferPropertiesANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeBits;
+} VkAndroidHardwareBufferPropertiesANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    VkFormatFeatureFlags             formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkAndroidHardwareBufferFormatPropertiesANDROID;
+
+typedef struct VkImportAndroidHardwareBufferInfoANDROID {
+    VkStructureType            sType;
+    const void*                pNext;
+    struct AHardwareBuffer*    buffer;
+} VkImportAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkMemoryGetAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkExternalFormatANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           externalFormat;
+} VkExternalFormatANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    VkFormatFeatureFlags2            formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkAndroidHardwareBufferFormatProperties2ANDROID;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_beta.h b/host/libs/graphics_detector/include/vulkan/vulkan_beta.h
new file mode 100644
index 0000000..cfeda0e
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_beta.h
@@ -0,0 +1,553 @@
+#ifndef VULKAN_BETA_H_
+#define VULKAN_BETA_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_portability_subset 1
+#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset"
+typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           constantAlphaColorBlendFactors;
+    VkBool32           events;
+    VkBool32           imageViewFormatReinterpretation;
+    VkBool32           imageViewFormatSwizzle;
+    VkBool32           imageView2DOn3DImage;
+    VkBool32           multisampleArrayImage;
+    VkBool32           mutableComparisonSamplers;
+    VkBool32           pointPolygons;
+    VkBool32           samplerMipLodBias;
+    VkBool32           separateStencilMaskRef;
+    VkBool32           shaderSampleRateInterpolationFunctions;
+    VkBool32           tessellationIsolines;
+    VkBool32           tessellationPointMode;
+    VkBool32           triangleFans;
+    VkBool32           vertexAttributeAccessBeyondStride;
+} VkPhysicalDevicePortabilitySubsetFeaturesKHR;
+
+typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           minVertexInputBindingStrideAlignment;
+} VkPhysicalDevicePortabilitySubsetPropertiesKHR;
+
+
+
+#define VK_KHR_video_encode_queue 1
+#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 7
+#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue"
+
+typedef enum VkVideoEncodeTuningModeKHR {
+    VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1,
+    VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2,
+    VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3,
+    VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4,
+    VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeTuningModeKHR;
+typedef VkFlags VkVideoEncodeFlagsKHR;
+
+typedef enum VkVideoEncodeCapabilityFlagBitsKHR {
+    VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoEncodeCapabilityFlagsKHR;
+
+typedef enum VkVideoEncodeRateControlModeFlagBitsKHR {
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeRateControlModeFlagBitsKHR;
+typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR;
+
+typedef enum VkVideoEncodeUsageFlagBitsKHR {
+    VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002,
+    VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004,
+    VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008,
+    VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoEncodeUsageFlagsKHR;
+
+typedef enum VkVideoEncodeContentFlagBitsKHR {
+    VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002,
+    VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004,
+    VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeContentFlagBitsKHR;
+typedef VkFlags VkVideoEncodeContentFlagsKHR;
+typedef VkFlags VkVideoEncodeRateControlFlagsKHR;
+typedef struct VkVideoEncodeInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoEncodeFlagsKHR                 flags;
+    uint32_t                              qualityLevel;
+    VkBuffer                              dstBitstreamBuffer;
+    VkDeviceSize                          dstBitstreamBufferOffset;
+    VkDeviceSize                          dstBitstreamBufferMaxRange;
+    VkVideoPictureResourceInfoKHR         srcPictureResource;
+    const VkVideoReferenceSlotInfoKHR*    pSetupReferenceSlot;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+    uint32_t                              precedingExternallyEncodedBytes;
+} VkVideoEncodeInfoKHR;
+
+typedef struct VkVideoEncodeCapabilitiesKHR {
+    VkStructureType                         sType;
+    void*                                   pNext;
+    VkVideoEncodeCapabilityFlagsKHR         flags;
+    VkVideoEncodeRateControlModeFlagsKHR    rateControlModes;
+    uint8_t                                 rateControlLayerCount;
+    uint8_t                                 qualityLevelCount;
+    VkExtent2D                              inputImageDataFillAlignment;
+} VkVideoEncodeCapabilitiesKHR;
+
+typedef struct VkVideoEncodeUsageInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkVideoEncodeUsageFlagsKHR      videoUsageHints;
+    VkVideoEncodeContentFlagsKHR    videoContentHints;
+    VkVideoEncodeTuningModeKHR      tuningMode;
+} VkVideoEncodeUsageInfoKHR;
+
+typedef struct VkVideoEncodeRateControlLayerInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           averageBitrate;
+    uint32_t           maxBitrate;
+    uint32_t           frameRateNumerator;
+    uint32_t           frameRateDenominator;
+    uint32_t           virtualBufferSizeInMs;
+    uint32_t           initialVirtualBufferSizeInMs;
+} VkVideoEncodeRateControlLayerInfoKHR;
+
+typedef struct VkVideoEncodeRateControlInfoKHR {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkVideoEncodeRateControlFlagsKHR               flags;
+    VkVideoEncodeRateControlModeFlagBitsKHR        rateControlMode;
+    uint8_t                                        layerCount;
+    const VkVideoEncodeRateControlLayerInfoKHR*    pLayerConfigs;
+} VkVideoEncodeRateControlInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoEncodeInfoKHR*                 pEncodeInfo);
+#endif
+
+
+#define VK_EXT_video_encode_h264 1
+#include "vk_video/vulkan_video_codec_h264std.h"
+#include "vk_video/vulkan_video_codec_h264std_encode.h"
+#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 9
+#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264"
+
+typedef enum VkVideoEncodeH264RateControlStructureEXT {
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264RateControlStructureEXT;
+
+typedef enum VkVideoEncodeH264CapabilityFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000010,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000040,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000080,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT = 0x00000100,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00000200,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT = 0x00000400,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT = 0x00000800,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00001000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00002000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00004000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00008000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00010000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00020000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00040000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT = 0x00080000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00100000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT = 0x00200000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT = 0x00400000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x00800000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x01000000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264CapabilityFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264CapabilityFlagsEXT;
+
+typedef enum VkVideoEncodeH264InputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264InputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264OutputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT;
+typedef struct VkVideoEncodeH264CapabilitiesEXT {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkVideoEncodeH264CapabilityFlagsEXT    flags;
+    VkVideoEncodeH264InputModeFlagsEXT     inputModeFlags;
+    VkVideoEncodeH264OutputModeFlagsEXT    outputModeFlags;
+    uint8_t                                maxPPictureL0ReferenceCount;
+    uint8_t                                maxBPictureL0ReferenceCount;
+    uint8_t                                maxL1ReferenceCount;
+    VkBool32                               motionVectorsOverPicBoundariesFlag;
+    uint32_t                               maxBytesPerPicDenom;
+    uint32_t                               maxBitsPerMbDenom;
+    uint32_t                               log2MaxMvLengthHorizontal;
+    uint32_t                               log2MaxMvLengthVertical;
+} VkVideoEncodeH264CapabilitiesEXT;
+
+typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH264SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH264PictureParameterSet*     pStdPPSs;
+} VkVideoEncodeH264SessionParametersAddInfoEXT;
+
+typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoEncodeH264SessionParametersAddInfoEXT*    pParametersAddInfo;
+} VkVideoEncodeH264SessionParametersCreateInfoEXT;
+
+typedef struct VkVideoEncodeH264DpbSlotInfoEXT {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    int8_t                                    slotIndex;
+    const StdVideoEncodeH264ReferenceInfo*    pStdReferenceInfo;
+} VkVideoEncodeH264DpbSlotInfoEXT;
+
+typedef struct VkVideoEncodeH264ReferenceListsInfoEXT {
+    VkStructureType                                      sType;
+    const void*                                          pNext;
+    uint8_t                                              referenceList0EntryCount;
+    const VkVideoEncodeH264DpbSlotInfoEXT*               pReferenceList0Entries;
+    uint8_t                                              referenceList1EntryCount;
+    const VkVideoEncodeH264DpbSlotInfoEXT*               pReferenceList1Entries;
+    const StdVideoEncodeH264RefMemMgmtCtrlOperations*    pMemMgmtCtrlOperations;
+} VkVideoEncodeH264ReferenceListsInfoEXT;
+
+typedef struct VkVideoEncodeH264NaluSliceInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    uint32_t                                         mbCount;
+    const VkVideoEncodeH264ReferenceListsInfoEXT*    pReferenceFinalLists;
+    const StdVideoEncodeH264SliceHeader*             pSliceHeaderStd;
+} VkVideoEncodeH264NaluSliceInfoEXT;
+
+typedef struct VkVideoEncodeH264VclFrameInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    const VkVideoEncodeH264ReferenceListsInfoEXT*    pReferenceFinalLists;
+    uint32_t                                         naluSliceEntryCount;
+    const VkVideoEncodeH264NaluSliceInfoEXT*         pNaluSliceEntries;
+    const StdVideoEncodeH264PictureInfo*             pCurrentPictureInfo;
+} VkVideoEncodeH264VclFrameInfoEXT;
+
+typedef struct VkVideoEncodeH264EmitPictureParametersInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint8_t            spsId;
+    VkBool32           emitSpsEnable;
+    uint32_t           ppsIdEntryCount;
+    const uint8_t*     ppsIdEntries;
+} VkVideoEncodeH264EmitPictureParametersInfoEXT;
+
+typedef struct VkVideoEncodeH264ProfileInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH264ProfileIdc    stdProfileIdc;
+} VkVideoEncodeH264ProfileInfoEXT;
+
+typedef struct VkVideoEncodeH264RateControlInfoEXT {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    gopFrameCount;
+    uint32_t                                    idrPeriod;
+    uint32_t                                    consecutiveBFrameCount;
+    VkVideoEncodeH264RateControlStructureEXT    rateControlStructure;
+    uint8_t                                     temporalLayerCount;
+} VkVideoEncodeH264RateControlInfoEXT;
+
+typedef struct VkVideoEncodeH264QpEXT {
+    int32_t    qpI;
+    int32_t    qpP;
+    int32_t    qpB;
+} VkVideoEncodeH264QpEXT;
+
+typedef struct VkVideoEncodeH264FrameSizeEXT {
+    uint32_t    frameISize;
+    uint32_t    framePSize;
+    uint32_t    frameBSize;
+} VkVideoEncodeH264FrameSizeEXT;
+
+typedef struct VkVideoEncodeH264RateControlLayerInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    uint8_t                          temporalLayerId;
+    VkBool32                         useInitialRcQp;
+    VkVideoEncodeH264QpEXT           initialRcQp;
+    VkBool32                         useMinQp;
+    VkVideoEncodeH264QpEXT           minQp;
+    VkBool32                         useMaxQp;
+    VkVideoEncodeH264QpEXT           maxQp;
+    VkBool32                         useMaxFrameSize;
+    VkVideoEncodeH264FrameSizeEXT    maxFrameSize;
+} VkVideoEncodeH264RateControlLayerInfoEXT;
+
+
+
+#define VK_EXT_video_encode_h265 1
+#include "vk_video/vulkan_video_codec_h265std.h"
+#include "vk_video/vulkan_video_codec_h265std_encode.h"
+#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 9
+#define VK_EXT_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_EXT_video_encode_h265"
+
+typedef enum VkVideoEncodeH265RateControlStructureEXT {
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265RateControlStructureEXT;
+
+typedef enum VkVideoEncodeH265CapabilityFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT = 0x00000010,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT = 0x00000040,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT = 0x00000080,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT = 0x00000100,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT = 0x00000200,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT = 0x00000400,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT = 0x00000800,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00001000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT = 0x00002000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00004000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT = 0x00008000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT = 0x00010000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT = 0x00020000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT = 0x00040000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT = 0x00080000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT = 0x00100000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT = 0x00200000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT = 0x00400000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT = 0x00800000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x01000000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x02000000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265CapabilityFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265CapabilityFlagsEXT;
+
+typedef enum VkVideoEncodeH265InputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265InputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265InputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH265OutputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265OutputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265OutputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH265CtbSizeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265CtbSizeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265CtbSizeFlagsEXT;
+
+typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265TransformBlockSizeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsEXT;
+typedef struct VkVideoEncodeH265CapabilitiesEXT {
+    VkStructureType                                sType;
+    void*                                          pNext;
+    VkVideoEncodeH265CapabilityFlagsEXT            flags;
+    VkVideoEncodeH265InputModeFlagsEXT             inputModeFlags;
+    VkVideoEncodeH265OutputModeFlagsEXT            outputModeFlags;
+    VkVideoEncodeH265CtbSizeFlagsEXT               ctbSizes;
+    VkVideoEncodeH265TransformBlockSizeFlagsEXT    transformBlockSizes;
+    uint8_t                                        maxPPictureL0ReferenceCount;
+    uint8_t                                        maxBPictureL0ReferenceCount;
+    uint8_t                                        maxL1ReferenceCount;
+    uint8_t                                        maxSubLayersCount;
+    uint8_t                                        minLog2MinLumaCodingBlockSizeMinus3;
+    uint8_t                                        maxLog2MinLumaCodingBlockSizeMinus3;
+    uint8_t                                        minLog2MinLumaTransformBlockSizeMinus2;
+    uint8_t                                        maxLog2MinLumaTransformBlockSizeMinus2;
+    uint8_t                                        minMaxTransformHierarchyDepthInter;
+    uint8_t                                        maxMaxTransformHierarchyDepthInter;
+    uint8_t                                        minMaxTransformHierarchyDepthIntra;
+    uint8_t                                        maxMaxTransformHierarchyDepthIntra;
+    uint8_t                                        maxDiffCuQpDeltaDepth;
+    uint8_t                                        minMaxNumMergeCand;
+    uint8_t                                        maxMaxNumMergeCand;
+} VkVideoEncodeH265CapabilitiesEXT;
+
+typedef struct VkVideoEncodeH265SessionParametersAddInfoEXT {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdVPSCount;
+    const StdVideoH265VideoParameterSet*       pStdVPSs;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH265SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH265PictureParameterSet*     pStdPPSs;
+} VkVideoEncodeH265SessionParametersAddInfoEXT;
+
+typedef struct VkVideoEncodeH265SessionParametersCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdVPSCount;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoEncodeH265SessionParametersAddInfoEXT*    pParametersAddInfo;
+} VkVideoEncodeH265SessionParametersCreateInfoEXT;
+
+typedef struct VkVideoEncodeH265DpbSlotInfoEXT {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    int8_t                                    slotIndex;
+    const StdVideoEncodeH265ReferenceInfo*    pStdReferenceInfo;
+} VkVideoEncodeH265DpbSlotInfoEXT;
+
+typedef struct VkVideoEncodeH265ReferenceListsInfoEXT {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    uint8_t                                            referenceList0EntryCount;
+    const VkVideoEncodeH265DpbSlotInfoEXT*             pReferenceList0Entries;
+    uint8_t                                            referenceList1EntryCount;
+    const VkVideoEncodeH265DpbSlotInfoEXT*             pReferenceList1Entries;
+    const StdVideoEncodeH265ReferenceModifications*    pReferenceModifications;
+} VkVideoEncodeH265ReferenceListsInfoEXT;
+
+typedef struct VkVideoEncodeH265NaluSliceSegmentInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    uint32_t                                         ctbCount;
+    const VkVideoEncodeH265ReferenceListsInfoEXT*    pReferenceFinalLists;
+    const StdVideoEncodeH265SliceSegmentHeader*      pSliceSegmentHeaderStd;
+} VkVideoEncodeH265NaluSliceSegmentInfoEXT;
+
+typedef struct VkVideoEncodeH265VclFrameInfoEXT {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    const VkVideoEncodeH265ReferenceListsInfoEXT*      pReferenceFinalLists;
+    uint32_t                                           naluSliceSegmentEntryCount;
+    const VkVideoEncodeH265NaluSliceSegmentInfoEXT*    pNaluSliceSegmentEntries;
+    const StdVideoEncodeH265PictureInfo*               pCurrentPictureInfo;
+} VkVideoEncodeH265VclFrameInfoEXT;
+
+typedef struct VkVideoEncodeH265EmitPictureParametersInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint8_t            vpsId;
+    uint8_t            spsId;
+    VkBool32           emitVpsEnable;
+    VkBool32           emitSpsEnable;
+    uint32_t           ppsIdEntryCount;
+    const uint8_t*     ppsIdEntries;
+} VkVideoEncodeH265EmitPictureParametersInfoEXT;
+
+typedef struct VkVideoEncodeH265ProfileInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH265ProfileIdc    stdProfileIdc;
+} VkVideoEncodeH265ProfileInfoEXT;
+
+typedef struct VkVideoEncodeH265RateControlInfoEXT {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    gopFrameCount;
+    uint32_t                                    idrPeriod;
+    uint32_t                                    consecutiveBFrameCount;
+    VkVideoEncodeH265RateControlStructureEXT    rateControlStructure;
+    uint8_t                                     subLayerCount;
+} VkVideoEncodeH265RateControlInfoEXT;
+
+typedef struct VkVideoEncodeH265QpEXT {
+    int32_t    qpI;
+    int32_t    qpP;
+    int32_t    qpB;
+} VkVideoEncodeH265QpEXT;
+
+typedef struct VkVideoEncodeH265FrameSizeEXT {
+    uint32_t    frameISize;
+    uint32_t    framePSize;
+    uint32_t    frameBSize;
+} VkVideoEncodeH265FrameSizeEXT;
+
+typedef struct VkVideoEncodeH265RateControlLayerInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    uint8_t                          temporalId;
+    VkBool32                         useInitialRcQp;
+    VkVideoEncodeH265QpEXT           initialRcQp;
+    VkBool32                         useMinQp;
+    VkVideoEncodeH265QpEXT           minQp;
+    VkBool32                         useMaxQp;
+    VkVideoEncodeH265QpEXT           maxQp;
+    VkBool32                         useMaxFrameSize;
+    VkVideoEncodeH265FrameSizeEXT    maxFrameSize;
+} VkVideoEncodeH265RateControlLayerInfoEXT;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_core.h b/host/libs/graphics_detector/include/vulkan/vulkan_core.h
new file mode 100644
index 0000000..4c73571
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_core.h
@@ -0,0 +1,16911 @@
+#ifndef VULKAN_CORE_H_
+#define VULKAN_CORE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_VERSION_1_0 1
+#include "vk_platform.h"
+
+#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
+
+
+#ifndef VK_USE_64_BIT_PTR_DEFINES
+    #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+        #define VK_USE_64_BIT_PTR_DEFINES 1
+    #else
+        #define VK_USE_64_BIT_PTR_DEFINES 0
+    #endif
+#endif
+
+
+#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE
+    #if (VK_USE_64_BIT_PTR_DEFINES==1)
+        #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L))
+            #define VK_NULL_HANDLE nullptr
+        #else
+            #define VK_NULL_HANDLE ((void*)0)
+        #endif
+    #else
+        #define VK_NULL_HANDLE 0ULL
+    #endif
+#endif
+#ifndef VK_NULL_HANDLE
+    #define VK_NULL_HANDLE 0
+#endif
+
+
+#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE
+    #if (VK_USE_64_BIT_PTR_DEFINES==1)
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+    #else
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+    #endif
+#endif
+
+// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead.
+#define VK_MAKE_VERSION(major, minor, patch) \
+    ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
+//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0
+
+#define VK_MAKE_API_VERSION(variant, major, minor, patch) \
+    ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+// Vulkan 1.0 version number
+#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
+
+// Version of this file
+#define VK_HEADER_VERSION 239
+
+// Complete version of this file
+#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead.
+#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead.
+#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead.
+#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU)
+
+#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29)
+#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU)
+#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU)
+#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU)
+typedef uint32_t VkBool32;
+typedef uint64_t VkDeviceAddress;
+typedef uint64_t VkDeviceSize;
+typedef uint32_t VkFlags;
+typedef uint32_t VkSampleMask;
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
+VK_DEFINE_HANDLE(VkInstance)
+VK_DEFINE_HANDLE(VkPhysicalDevice)
+VK_DEFINE_HANDLE(VkDevice)
+VK_DEFINE_HANDLE(VkQueue)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore)
+VK_DEFINE_HANDLE(VkCommandBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
+#define VK_ATTACHMENT_UNUSED              (~0U)
+#define VK_FALSE                          0U
+#define VK_LOD_CLAMP_NONE                 1000.0F
+#define VK_QUEUE_FAMILY_IGNORED           (~0U)
+#define VK_REMAINING_ARRAY_LAYERS         (~0U)
+#define VK_REMAINING_MIP_LEVELS           (~0U)
+#define VK_SUBPASS_EXTERNAL               (~0U)
+#define VK_TRUE                           1U
+#define VK_WHOLE_SIZE                     (~0ULL)
+#define VK_MAX_MEMORY_TYPES               32U
+#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE  256U
+#define VK_UUID_SIZE                      16U
+#define VK_MAX_EXTENSION_NAME_SIZE        256U
+#define VK_MAX_DESCRIPTION_SIZE           256U
+#define VK_MAX_MEMORY_HEAPS               16U
+
+typedef enum VkResult {
+    VK_SUCCESS = 0,
+    VK_NOT_READY = 1,
+    VK_TIMEOUT = 2,
+    VK_EVENT_SET = 3,
+    VK_EVENT_RESET = 4,
+    VK_INCOMPLETE = 5,
+    VK_ERROR_OUT_OF_HOST_MEMORY = -1,
+    VK_ERROR_OUT_OF_DEVICE_MEMORY = -2,
+    VK_ERROR_INITIALIZATION_FAILED = -3,
+    VK_ERROR_DEVICE_LOST = -4,
+    VK_ERROR_MEMORY_MAP_FAILED = -5,
+    VK_ERROR_LAYER_NOT_PRESENT = -6,
+    VK_ERROR_EXTENSION_NOT_PRESENT = -7,
+    VK_ERROR_FEATURE_NOT_PRESENT = -8,
+    VK_ERROR_INCOMPATIBLE_DRIVER = -9,
+    VK_ERROR_TOO_MANY_OBJECTS = -10,
+    VK_ERROR_FORMAT_NOT_SUPPORTED = -11,
+    VK_ERROR_FRAGMENTED_POOL = -12,
+    VK_ERROR_UNKNOWN = -13,
+    VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003,
+    VK_ERROR_FRAGMENTATION = -1000161000,
+    VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000,
+    VK_PIPELINE_COMPILE_REQUIRED = 1000297000,
+    VK_ERROR_SURFACE_LOST_KHR = -1000000000,
+    VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001,
+    VK_SUBOPTIMAL_KHR = 1000001003,
+    VK_ERROR_OUT_OF_DATE_KHR = -1000001004,
+    VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
+    VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
+    VK_ERROR_INVALID_SHADER_NV = -1000012000,
+    VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000,
+    VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001,
+    VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002,
+    VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003,
+    VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004,
+    VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005,
+    VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
+    VK_ERROR_NOT_PERMITTED_KHR = -1000174001,
+    VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
+    VK_THREAD_IDLE_KHR = 1000268000,
+    VK_THREAD_DONE_KHR = 1000268001,
+    VK_OPERATION_DEFERRED_KHR = 1000268002,
+    VK_OPERATION_NOT_DEFERRED_KHR = 1000268003,
+    VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000,
+    VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
+    VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR,
+    VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED,
+    VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED,
+    VK_RESULT_MAX_ENUM = 0x7FFFFFFF
+} VkResult;
+
+typedef enum VkStructureType {
+    VK_STRUCTURE_TYPE_APPLICATION_INFO = 0,
+    VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2,
+    VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO = 4,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5,
+    VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6,
+    VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7,
+    VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8,
+    VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9,
+    VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11,
+    VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12,
+    VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13,
+    VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16,
+    VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19,
+    VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23,
+    VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24,
+    VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26,
+    VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28,
+    VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29,
+    VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30,
+    VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35,
+    VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38,
+    VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42,
+    VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46,
+    VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47,
+    VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000,
+    VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002,
+    VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004,
+    VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005,
+    VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000,
+    VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002,
+    VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001,
+    VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002,
+    VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003,
+    VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001,
+    VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002,
+    VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54,
+    VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000,
+    VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001,
+    VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002,
+    VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003,
+    VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004,
+    VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005,
+    VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006,
+    VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007,
+    VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008,
+    VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009,
+    VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000,
+    VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000,
+    VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001,
+    VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001,
+    VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002,
+    VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000,
+    VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007,
+    VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009,
+    VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000,
+    VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001,
+    VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000,
+    VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000,
+    VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000,
+    VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000,
+    VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000,
+    VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002,
+    VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000,
+    VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001,
+    VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003,
+    VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007,
+    VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008,
+    VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009,
+    VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010,
+    VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012,
+    VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014,
+    VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002,
+    VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000,
+    VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001,
+    VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038004,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT = 1000038005,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000038006,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT = 1000038007,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038008,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038009,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT = 1000038010,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT = 1000039000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039004,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT = 1000039005,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000039006,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT = 1000039007,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT = 1000039008,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039009,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039010,
+#endif
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006,
+    VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
+    VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006,
+    VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007,
+    VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008,
+    VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009,
+    VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000,
+    VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000,
+    VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001,
+    VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001,
+    VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002,
+    VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000,
+    VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001,
+    VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001,
+    VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002,
+    VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000,
+    VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000,
+    VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001,
+    VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003,
+    VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000,
+    VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001,
+    VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000,
+    VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001,
+    VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000,
+    VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001,
+    VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003,
+    VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001,
+    VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002,
+    VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004,
+    VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000,
+    VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002,
+    VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003,
+    VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004,
+    VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006,
+    VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001,
+    VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003,
+    VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009,
+    VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010,
+    VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011,
+    VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015,
+    VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006,
+    VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001,
+#endif
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001,
+    VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003,
+    VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004,
+    VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005,
+    VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009,
+    VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000,
+    VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000,
+    VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000,
+    VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000,
+    VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000,
+    VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002,
+    VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000,
+    VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000,
+    VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001,
+    VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003,
+    VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000,
+    VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001,
+    VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000,
+    VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001,
+    VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002,
+    VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000,
+    VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000,
+    VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001,
+    VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002,
+    VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000,
+    VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001,
+    VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000,
+    VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004,
+    VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000,
+    VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004,
+    VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005,
+    VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000,
+    VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001,
+    VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002,
+    VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002,
+    VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004,
+#endif
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
+    VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004,
+    VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005,
+    VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007,
+    VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008,
+    VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
+    VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001,
+    VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000,
+    VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001,
+    VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002,
+    VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003,
+    VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000,
+    VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001,
+    VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000,
+    VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000,
+    VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001,
+    VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000,
+    VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000,
+    VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003,
+    VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005,
+    VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007,
+    VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009,
+    VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000,
+    VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001,
+    VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000,
+    VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001,
+    VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000,
+    VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000,
+    VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000,
+    VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001,
+    VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002,
+    VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003,
+    VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006,
+    VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007,
+    VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI = 1000404000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI = 1000404001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000,
+    VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001,
+    VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002,
+    VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003,
+    VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000,
+    VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000,
+    VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000,
+    VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000,
+    VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO,
+    VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
+    VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
+    VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+    VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+    VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+    VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+    VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+    VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
+    VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
+    VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
+    VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
+    VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+    VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
+    VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkStructureType;
+
+typedef enum VkPipelineCacheHeaderVersion {
+    VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+    VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
+typedef enum VkImageLayout {
+    VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    VK_IMAGE_LAYOUT_GENERAL = 1,
+    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
+    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
+    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
+    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
+    VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001,
+    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002,
+    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003,
+    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000,
+    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001,
+    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002,
+    VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
+    VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+    VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002,
+#endif
+    VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
+} VkImageLayout;
+
+typedef enum VkObjectType {
+    VK_OBJECT_TYPE_UNKNOWN = 0,
+    VK_OBJECT_TYPE_INSTANCE = 1,
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
+    VK_OBJECT_TYPE_DEVICE = 3,
+    VK_OBJECT_TYPE_QUEUE = 4,
+    VK_OBJECT_TYPE_SEMAPHORE = 5,
+    VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
+    VK_OBJECT_TYPE_FENCE = 7,
+    VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
+    VK_OBJECT_TYPE_BUFFER = 9,
+    VK_OBJECT_TYPE_IMAGE = 10,
+    VK_OBJECT_TYPE_EVENT = 11,
+    VK_OBJECT_TYPE_QUERY_POOL = 12,
+    VK_OBJECT_TYPE_BUFFER_VIEW = 13,
+    VK_OBJECT_TYPE_IMAGE_VIEW = 14,
+    VK_OBJECT_TYPE_SHADER_MODULE = 15,
+    VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
+    VK_OBJECT_TYPE_RENDER_PASS = 18,
+    VK_OBJECT_TYPE_PIPELINE = 19,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
+    VK_OBJECT_TYPE_SAMPLER = 21,
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
+    VK_OBJECT_TYPE_FRAMEBUFFER = 24,
+    VK_OBJECT_TYPE_COMMAND_POOL = 25,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000,
+    VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000,
+    VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
+    VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
+    VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000,
+    VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001,
+    VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000,
+    VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001,
+    VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000,
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000,
+    VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000,
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
+    VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
+    VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
+    VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
+    VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
+    VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkObjectType;
+
+typedef enum VkVendorId {
+    VK_VENDOR_ID_VIV = 0x10001,
+    VK_VENDOR_ID_VSI = 0x10002,
+    VK_VENDOR_ID_KAZAN = 0x10003,
+    VK_VENDOR_ID_CODEPLAY = 0x10004,
+    VK_VENDOR_ID_MESA = 0x10005,
+    VK_VENDOR_ID_POCL = 0x10006,
+    VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF
+} VkVendorId;
+
+typedef enum VkSystemAllocationScope {
+    VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
+    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
+    VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
+    VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
+    VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
+    VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
+} VkSystemAllocationScope;
+
+typedef enum VkInternalAllocationType {
+    VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
+    VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkInternalAllocationType;
+
+typedef enum VkFormat {
+    VK_FORMAT_UNDEFINED = 0,
+    VK_FORMAT_R4G4_UNORM_PACK8 = 1,
+    VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2,
+    VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3,
+    VK_FORMAT_R5G6B5_UNORM_PACK16 = 4,
+    VK_FORMAT_B5G6R5_UNORM_PACK16 = 5,
+    VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6,
+    VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7,
+    VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8,
+    VK_FORMAT_R8_UNORM = 9,
+    VK_FORMAT_R8_SNORM = 10,
+    VK_FORMAT_R8_USCALED = 11,
+    VK_FORMAT_R8_SSCALED = 12,
+    VK_FORMAT_R8_UINT = 13,
+    VK_FORMAT_R8_SINT = 14,
+    VK_FORMAT_R8_SRGB = 15,
+    VK_FORMAT_R8G8_UNORM = 16,
+    VK_FORMAT_R8G8_SNORM = 17,
+    VK_FORMAT_R8G8_USCALED = 18,
+    VK_FORMAT_R8G8_SSCALED = 19,
+    VK_FORMAT_R8G8_UINT = 20,
+    VK_FORMAT_R8G8_SINT = 21,
+    VK_FORMAT_R8G8_SRGB = 22,
+    VK_FORMAT_R8G8B8_UNORM = 23,
+    VK_FORMAT_R8G8B8_SNORM = 24,
+    VK_FORMAT_R8G8B8_USCALED = 25,
+    VK_FORMAT_R8G8B8_SSCALED = 26,
+    VK_FORMAT_R8G8B8_UINT = 27,
+    VK_FORMAT_R8G8B8_SINT = 28,
+    VK_FORMAT_R8G8B8_SRGB = 29,
+    VK_FORMAT_B8G8R8_UNORM = 30,
+    VK_FORMAT_B8G8R8_SNORM = 31,
+    VK_FORMAT_B8G8R8_USCALED = 32,
+    VK_FORMAT_B8G8R8_SSCALED = 33,
+    VK_FORMAT_B8G8R8_UINT = 34,
+    VK_FORMAT_B8G8R8_SINT = 35,
+    VK_FORMAT_B8G8R8_SRGB = 36,
+    VK_FORMAT_R8G8B8A8_UNORM = 37,
+    VK_FORMAT_R8G8B8A8_SNORM = 38,
+    VK_FORMAT_R8G8B8A8_USCALED = 39,
+    VK_FORMAT_R8G8B8A8_SSCALED = 40,
+    VK_FORMAT_R8G8B8A8_UINT = 41,
+    VK_FORMAT_R8G8B8A8_SINT = 42,
+    VK_FORMAT_R8G8B8A8_SRGB = 43,
+    VK_FORMAT_B8G8R8A8_UNORM = 44,
+    VK_FORMAT_B8G8R8A8_SNORM = 45,
+    VK_FORMAT_B8G8R8A8_USCALED = 46,
+    VK_FORMAT_B8G8R8A8_SSCALED = 47,
+    VK_FORMAT_B8G8R8A8_UINT = 48,
+    VK_FORMAT_B8G8R8A8_SINT = 49,
+    VK_FORMAT_B8G8R8A8_SRGB = 50,
+    VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51,
+    VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52,
+    VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53,
+    VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54,
+    VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55,
+    VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56,
+    VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57,
+    VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58,
+    VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59,
+    VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60,
+    VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61,
+    VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62,
+    VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63,
+    VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64,
+    VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65,
+    VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66,
+    VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67,
+    VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68,
+    VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69,
+    VK_FORMAT_R16_UNORM = 70,
+    VK_FORMAT_R16_SNORM = 71,
+    VK_FORMAT_R16_USCALED = 72,
+    VK_FORMAT_R16_SSCALED = 73,
+    VK_FORMAT_R16_UINT = 74,
+    VK_FORMAT_R16_SINT = 75,
+    VK_FORMAT_R16_SFLOAT = 76,
+    VK_FORMAT_R16G16_UNORM = 77,
+    VK_FORMAT_R16G16_SNORM = 78,
+    VK_FORMAT_R16G16_USCALED = 79,
+    VK_FORMAT_R16G16_SSCALED = 80,
+    VK_FORMAT_R16G16_UINT = 81,
+    VK_FORMAT_R16G16_SINT = 82,
+    VK_FORMAT_R16G16_SFLOAT = 83,
+    VK_FORMAT_R16G16B16_UNORM = 84,
+    VK_FORMAT_R16G16B16_SNORM = 85,
+    VK_FORMAT_R16G16B16_USCALED = 86,
+    VK_FORMAT_R16G16B16_SSCALED = 87,
+    VK_FORMAT_R16G16B16_UINT = 88,
+    VK_FORMAT_R16G16B16_SINT = 89,
+    VK_FORMAT_R16G16B16_SFLOAT = 90,
+    VK_FORMAT_R16G16B16A16_UNORM = 91,
+    VK_FORMAT_R16G16B16A16_SNORM = 92,
+    VK_FORMAT_R16G16B16A16_USCALED = 93,
+    VK_FORMAT_R16G16B16A16_SSCALED = 94,
+    VK_FORMAT_R16G16B16A16_UINT = 95,
+    VK_FORMAT_R16G16B16A16_SINT = 96,
+    VK_FORMAT_R16G16B16A16_SFLOAT = 97,
+    VK_FORMAT_R32_UINT = 98,
+    VK_FORMAT_R32_SINT = 99,
+    VK_FORMAT_R32_SFLOAT = 100,
+    VK_FORMAT_R32G32_UINT = 101,
+    VK_FORMAT_R32G32_SINT = 102,
+    VK_FORMAT_R32G32_SFLOAT = 103,
+    VK_FORMAT_R32G32B32_UINT = 104,
+    VK_FORMAT_R32G32B32_SINT = 105,
+    VK_FORMAT_R32G32B32_SFLOAT = 106,
+    VK_FORMAT_R32G32B32A32_UINT = 107,
+    VK_FORMAT_R32G32B32A32_SINT = 108,
+    VK_FORMAT_R32G32B32A32_SFLOAT = 109,
+    VK_FORMAT_R64_UINT = 110,
+    VK_FORMAT_R64_SINT = 111,
+    VK_FORMAT_R64_SFLOAT = 112,
+    VK_FORMAT_R64G64_UINT = 113,
+    VK_FORMAT_R64G64_SINT = 114,
+    VK_FORMAT_R64G64_SFLOAT = 115,
+    VK_FORMAT_R64G64B64_UINT = 116,
+    VK_FORMAT_R64G64B64_SINT = 117,
+    VK_FORMAT_R64G64B64_SFLOAT = 118,
+    VK_FORMAT_R64G64B64A64_UINT = 119,
+    VK_FORMAT_R64G64B64A64_SINT = 120,
+    VK_FORMAT_R64G64B64A64_SFLOAT = 121,
+    VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122,
+    VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123,
+    VK_FORMAT_D16_UNORM = 124,
+    VK_FORMAT_X8_D24_UNORM_PACK32 = 125,
+    VK_FORMAT_D32_SFLOAT = 126,
+    VK_FORMAT_S8_UINT = 127,
+    VK_FORMAT_D16_UNORM_S8_UINT = 128,
+    VK_FORMAT_D24_UNORM_S8_UINT = 129,
+    VK_FORMAT_D32_SFLOAT_S8_UINT = 130,
+    VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131,
+    VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132,
+    VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133,
+    VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134,
+    VK_FORMAT_BC2_UNORM_BLOCK = 135,
+    VK_FORMAT_BC2_SRGB_BLOCK = 136,
+    VK_FORMAT_BC3_UNORM_BLOCK = 137,
+    VK_FORMAT_BC3_SRGB_BLOCK = 138,
+    VK_FORMAT_BC4_UNORM_BLOCK = 139,
+    VK_FORMAT_BC4_SNORM_BLOCK = 140,
+    VK_FORMAT_BC5_UNORM_BLOCK = 141,
+    VK_FORMAT_BC5_SNORM_BLOCK = 142,
+    VK_FORMAT_BC6H_UFLOAT_BLOCK = 143,
+    VK_FORMAT_BC6H_SFLOAT_BLOCK = 144,
+    VK_FORMAT_BC7_UNORM_BLOCK = 145,
+    VK_FORMAT_BC7_SRGB_BLOCK = 146,
+    VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147,
+    VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148,
+    VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149,
+    VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150,
+    VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151,
+    VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152,
+    VK_FORMAT_EAC_R11_UNORM_BLOCK = 153,
+    VK_FORMAT_EAC_R11_SNORM_BLOCK = 154,
+    VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155,
+    VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156,
+    VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157,
+    VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158,
+    VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159,
+    VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160,
+    VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161,
+    VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162,
+    VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163,
+    VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164,
+    VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165,
+    VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166,
+    VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167,
+    VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168,
+    VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169,
+    VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170,
+    VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171,
+    VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172,
+    VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173,
+    VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174,
+    VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175,
+    VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176,
+    VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177,
+    VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178,
+    VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179,
+    VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180,
+    VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181,
+    VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182,
+    VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183,
+    VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184,
+    VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000,
+    VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006,
+    VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016,
+    VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026,
+    VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027,
+    VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033,
+    VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002,
+    VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003,
+    VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000,
+    VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001,
+    VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000,
+    VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001,
+    VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002,
+    VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003,
+    VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004,
+    VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005,
+    VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006,
+    VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007,
+    VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008,
+    VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009,
+    VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010,
+    VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011,
+    VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012,
+    VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013,
+    VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000,
+    VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001,
+    VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002,
+    VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003,
+    VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004,
+    VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
+    VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
+    VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+    VK_FORMAT_R16G16_S10_5_NV = 1000464000,
+    VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK,
+    VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM,
+    VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM,
+    VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM,
+    VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16,
+    VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16,
+    VK_FORMAT_MAX_ENUM = 0x7FFFFFFF
+} VkFormat;
+
+typedef enum VkImageTiling {
+    VK_IMAGE_TILING_OPTIMAL = 0,
+    VK_IMAGE_TILING_LINEAR = 1,
+    VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000,
+    VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
+} VkImageTiling;
+
+typedef enum VkImageType {
+    VK_IMAGE_TYPE_1D = 0,
+    VK_IMAGE_TYPE_2D = 1,
+    VK_IMAGE_TYPE_3D = 2,
+    VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
+typedef enum VkPhysicalDeviceType {
+    VK_PHYSICAL_DEVICE_TYPE_OTHER = 0,
+    VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1,
+    VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2,
+    VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3,
+    VK_PHYSICAL_DEVICE_TYPE_CPU = 4,
+    VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkPhysicalDeviceType;
+
+typedef enum VkQueryType {
+    VK_QUERY_TYPE_OCCLUSION = 0,
+    VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
+    VK_QUERY_TYPE_TIMESTAMP = 2,
+    VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000,
+    VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000,
+#endif
+    VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000,
+    VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001,
+    VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000,
+    VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001,
+    VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkQueryType;
+
+typedef enum VkSharingMode {
+    VK_SHARING_MODE_EXCLUSIVE = 0,
+    VK_SHARING_MODE_CONCURRENT = 1,
+    VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSharingMode;
+
+typedef enum VkComponentSwizzle {
+    VK_COMPONENT_SWIZZLE_IDENTITY = 0,
+    VK_COMPONENT_SWIZZLE_ZERO = 1,
+    VK_COMPONENT_SWIZZLE_ONE = 2,
+    VK_COMPONENT_SWIZZLE_R = 3,
+    VK_COMPONENT_SWIZZLE_G = 4,
+    VK_COMPONENT_SWIZZLE_B = 5,
+    VK_COMPONENT_SWIZZLE_A = 6,
+    VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF
+} VkComponentSwizzle;
+
+typedef enum VkImageViewType {
+    VK_IMAGE_VIEW_TYPE_1D = 0,
+    VK_IMAGE_VIEW_TYPE_2D = 1,
+    VK_IMAGE_VIEW_TYPE_3D = 2,
+    VK_IMAGE_VIEW_TYPE_CUBE = 3,
+    VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
+    VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
+    VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
+    VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewType;
+
+typedef enum VkBlendFactor {
+    VK_BLEND_FACTOR_ZERO = 0,
+    VK_BLEND_FACTOR_ONE = 1,
+    VK_BLEND_FACTOR_SRC_COLOR = 2,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3,
+    VK_BLEND_FACTOR_DST_COLOR = 4,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5,
+    VK_BLEND_FACTOR_SRC_ALPHA = 6,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7,
+    VK_BLEND_FACTOR_DST_ALPHA = 8,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9,
+    VK_BLEND_FACTOR_CONSTANT_COLOR = 10,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11,
+    VK_BLEND_FACTOR_CONSTANT_ALPHA = 12,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13,
+    VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14,
+    VK_BLEND_FACTOR_SRC1_COLOR = 15,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16,
+    VK_BLEND_FACTOR_SRC1_ALPHA = 17,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18,
+    VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF
+} VkBlendFactor;
+
+typedef enum VkBlendOp {
+    VK_BLEND_OP_ADD = 0,
+    VK_BLEND_OP_SUBTRACT = 1,
+    VK_BLEND_OP_REVERSE_SUBTRACT = 2,
+    VK_BLEND_OP_MIN = 3,
+    VK_BLEND_OP_MAX = 4,
+    VK_BLEND_OP_ZERO_EXT = 1000148000,
+    VK_BLEND_OP_SRC_EXT = 1000148001,
+    VK_BLEND_OP_DST_EXT = 1000148002,
+    VK_BLEND_OP_SRC_OVER_EXT = 1000148003,
+    VK_BLEND_OP_DST_OVER_EXT = 1000148004,
+    VK_BLEND_OP_SRC_IN_EXT = 1000148005,
+    VK_BLEND_OP_DST_IN_EXT = 1000148006,
+    VK_BLEND_OP_SRC_OUT_EXT = 1000148007,
+    VK_BLEND_OP_DST_OUT_EXT = 1000148008,
+    VK_BLEND_OP_SRC_ATOP_EXT = 1000148009,
+    VK_BLEND_OP_DST_ATOP_EXT = 1000148010,
+    VK_BLEND_OP_XOR_EXT = 1000148011,
+    VK_BLEND_OP_MULTIPLY_EXT = 1000148012,
+    VK_BLEND_OP_SCREEN_EXT = 1000148013,
+    VK_BLEND_OP_OVERLAY_EXT = 1000148014,
+    VK_BLEND_OP_DARKEN_EXT = 1000148015,
+    VK_BLEND_OP_LIGHTEN_EXT = 1000148016,
+    VK_BLEND_OP_COLORDODGE_EXT = 1000148017,
+    VK_BLEND_OP_COLORBURN_EXT = 1000148018,
+    VK_BLEND_OP_HARDLIGHT_EXT = 1000148019,
+    VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020,
+    VK_BLEND_OP_DIFFERENCE_EXT = 1000148021,
+    VK_BLEND_OP_EXCLUSION_EXT = 1000148022,
+    VK_BLEND_OP_INVERT_EXT = 1000148023,
+    VK_BLEND_OP_INVERT_RGB_EXT = 1000148024,
+    VK_BLEND_OP_LINEARDODGE_EXT = 1000148025,
+    VK_BLEND_OP_LINEARBURN_EXT = 1000148026,
+    VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027,
+    VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028,
+    VK_BLEND_OP_PINLIGHT_EXT = 1000148029,
+    VK_BLEND_OP_HARDMIX_EXT = 1000148030,
+    VK_BLEND_OP_HSL_HUE_EXT = 1000148031,
+    VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032,
+    VK_BLEND_OP_HSL_COLOR_EXT = 1000148033,
+    VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034,
+    VK_BLEND_OP_PLUS_EXT = 1000148035,
+    VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036,
+    VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037,
+    VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038,
+    VK_BLEND_OP_MINUS_EXT = 1000148039,
+    VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040,
+    VK_BLEND_OP_CONTRAST_EXT = 1000148041,
+    VK_BLEND_OP_INVERT_OVG_EXT = 1000148042,
+    VK_BLEND_OP_RED_EXT = 1000148043,
+    VK_BLEND_OP_GREEN_EXT = 1000148044,
+    VK_BLEND_OP_BLUE_EXT = 1000148045,
+    VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF
+} VkBlendOp;
+
+typedef enum VkCompareOp {
+    VK_COMPARE_OP_NEVER = 0,
+    VK_COMPARE_OP_LESS = 1,
+    VK_COMPARE_OP_EQUAL = 2,
+    VK_COMPARE_OP_LESS_OR_EQUAL = 3,
+    VK_COMPARE_OP_GREATER = 4,
+    VK_COMPARE_OP_NOT_EQUAL = 5,
+    VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
+    VK_COMPARE_OP_ALWAYS = 7,
+    VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkCompareOp;
+
+typedef enum VkDynamicState {
+    VK_DYNAMIC_STATE_VIEWPORT = 0,
+    VK_DYNAMIC_STATE_SCISSOR = 1,
+    VK_DYNAMIC_STATE_LINE_WIDTH = 2,
+    VK_DYNAMIC_STATE_DEPTH_BIAS = 3,
+    VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5,
+    VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6,
+    VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7,
+    VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8,
+    VK_DYNAMIC_STATE_CULL_MODE = 1000267000,
+    VK_DYNAMIC_STATE_FRONT_FACE = 1000267001,
+    VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002,
+    VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003,
+    VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005,
+    VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006,
+    VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007,
+    VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009,
+    VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010,
+    VK_DYNAMIC_STATE_STENCIL_OP = 1000267011,
+    VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001,
+    VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002,
+    VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000,
+    VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000,
+    VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000,
+    VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000,
+    VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004,
+    VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006,
+    VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001,
+    VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000,
+    VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000,
+    VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000,
+    VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003,
+    VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000,
+    VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002,
+    VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003,
+    VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004,
+    VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005,
+    VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006,
+    VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007,
+    VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008,
+    VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009,
+    VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010,
+    VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011,
+    VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012,
+    VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013,
+    VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014,
+    VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015,
+    VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016,
+    VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017,
+    VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018,
+    VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019,
+    VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020,
+    VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021,
+    VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023,
+    VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024,
+    VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025,
+    VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029,
+    VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030,
+    VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031,
+    VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032,
+    VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE,
+    VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE,
+    VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+    VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+    VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+    VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+    VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+    VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP,
+    VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+    VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+    VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF
+} VkDynamicState;
+
+typedef enum VkFrontFace {
+    VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
+    VK_FRONT_FACE_CLOCKWISE = 1,
+    VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
+} VkFrontFace;
+
+typedef enum VkVertexInputRate {
+    VK_VERTEX_INPUT_RATE_VERTEX = 0,
+    VK_VERTEX_INPUT_RATE_INSTANCE = 1,
+    VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
+} VkVertexInputRate;
+
+typedef enum VkPrimitiveTopology {
+    VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
+    VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
+    VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
+} VkPrimitiveTopology;
+
+typedef enum VkPolygonMode {
+    VK_POLYGON_MODE_FILL = 0,
+    VK_POLYGON_MODE_LINE = 1,
+    VK_POLYGON_MODE_POINT = 2,
+    VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
+    VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkPolygonMode;
+
+typedef enum VkStencilOp {
+    VK_STENCIL_OP_KEEP = 0,
+    VK_STENCIL_OP_ZERO = 1,
+    VK_STENCIL_OP_REPLACE = 2,
+    VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
+    VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
+    VK_STENCIL_OP_INVERT = 5,
+    VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
+    VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
+    VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
+} VkStencilOp;
+
+typedef enum VkLogicOp {
+    VK_LOGIC_OP_CLEAR = 0,
+    VK_LOGIC_OP_AND = 1,
+    VK_LOGIC_OP_AND_REVERSE = 2,
+    VK_LOGIC_OP_COPY = 3,
+    VK_LOGIC_OP_AND_INVERTED = 4,
+    VK_LOGIC_OP_NO_OP = 5,
+    VK_LOGIC_OP_XOR = 6,
+    VK_LOGIC_OP_OR = 7,
+    VK_LOGIC_OP_NOR = 8,
+    VK_LOGIC_OP_EQUIVALENT = 9,
+    VK_LOGIC_OP_INVERT = 10,
+    VK_LOGIC_OP_OR_REVERSE = 11,
+    VK_LOGIC_OP_COPY_INVERTED = 12,
+    VK_LOGIC_OP_OR_INVERTED = 13,
+    VK_LOGIC_OP_NAND = 14,
+    VK_LOGIC_OP_SET = 15,
+    VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
+} VkLogicOp;
+
+typedef enum VkBorderColor {
+    VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+    VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+    VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+    VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+    VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003,
+    VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004,
+    VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
+typedef enum VkFilter {
+    VK_FILTER_NEAREST = 0,
+    VK_FILTER_LINEAR = 1,
+    VK_FILTER_CUBIC_EXT = 1000015000,
+    VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT,
+    VK_FILTER_MAX_ENUM = 0x7FFFFFFF
+} VkFilter;
+
+typedef enum VkSamplerAddressMode {
+    VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
+    VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerAddressMode;
+
+typedef enum VkSamplerMipmapMode {
+    VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
+    VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
+    VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerMipmapMode;
+
+typedef enum VkDescriptorType {
+    VK_DESCRIPTOR_TYPE_SAMPLER = 0,
+    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1,
+    VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2,
+    VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3,
+    VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4,
+    VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9,
+    VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10,
+    VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000,
+    VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000,
+    VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000,
+    VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001,
+    VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000,
+    VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
+    VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
+    VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorType;
+
+typedef enum VkAttachmentLoadOp {
+    VK_ATTACHMENT_LOAD_OP_LOAD = 0,
+    VK_ATTACHMENT_LOAD_OP_CLEAR = 1,
+    VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2,
+    VK_ATTACHMENT_LOAD_OP_NONE_EXT = 1000400000,
+    VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentLoadOp;
+
+typedef enum VkAttachmentStoreOp {
+    VK_ATTACHMENT_STORE_OP_STORE = 0,
+    VK_ATTACHMENT_STORE_OP_DONT_CARE = 1,
+    VK_ATTACHMENT_STORE_OP_NONE = 1000301000,
+    VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentStoreOp;
+
+typedef enum VkPipelineBindPoint {
+    VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
+    VK_PIPELINE_BIND_POINT_COMPUTE = 1,
+    VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000,
+    VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003,
+    VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
+    VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineBindPoint;
+
+typedef enum VkCommandBufferLevel {
+    VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0,
+    VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1,
+    VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferLevel;
+
+typedef enum VkIndexType {
+    VK_INDEX_TYPE_UINT16 = 0,
+    VK_INDEX_TYPE_UINT32 = 1,
+    VK_INDEX_TYPE_NONE_KHR = 1000165000,
+    VK_INDEX_TYPE_UINT8_EXT = 1000265000,
+    VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR,
+    VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkIndexType;
+
+typedef enum VkSubpassContents {
+    VK_SUBPASS_CONTENTS_INLINE = 0,
+    VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1,
+    VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassContents;
+
+typedef enum VkAccessFlagBits {
+    VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
+    VK_ACCESS_INDEX_READ_BIT = 0x00000002,
+    VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
+    VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
+    VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
+    VK_ACCESS_SHADER_READ_BIT = 0x00000020,
+    VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
+    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
+    VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
+    VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
+    VK_ACCESS_HOST_READ_BIT = 0x00002000,
+    VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
+    VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
+    VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+    VK_ACCESS_NONE = 0,
+    VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000,
+    VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000,
+    VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
+    VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000,
+    VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000,
+    VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000,
+    VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    VK_ACCESS_NONE_KHR = VK_ACCESS_NONE,
+    VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAccessFlagBits;
+typedef VkFlags VkAccessFlags;
+
+typedef enum VkImageAspectFlagBits {
+    VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
+    VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
+    VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
+    VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
+    VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010,
+    VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020,
+    VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040,
+    VK_IMAGE_ASPECT_NONE = 0,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400,
+    VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    VK_IMAGE_ASPECT_NONE_KHR = VK_IMAGE_ASPECT_NONE,
+    VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageAspectFlagBits;
+typedef VkFlags VkImageAspectFlags;
+
+typedef enum VkFormatFeatureFlagBits {
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
+    VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
+    VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
+    VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
+    VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400,
+    VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000,
+    VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000,
+    VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000,
+    VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000,
+    VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000,
+    VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
+    VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000,
+#endif
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFormatFeatureFlagBits;
+typedef VkFlags VkFormatFeatureFlags;
+
+typedef enum VkImageCreateFlagBits {
+    VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008,
+    VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010,
+    VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100,
+    VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800,
+    VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200,
+    VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000,
+    VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000,
+    VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000,
+    VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000,
+    VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000,
+    VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000,
+    VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT,
+    VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT,
+    VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageCreateFlagBits;
+typedef VkFlags VkImageCreateFlags;
+
+typedef enum VkSampleCountFlagBits {
+    VK_SAMPLE_COUNT_1_BIT = 0x00000001,
+    VK_SAMPLE_COUNT_2_BIT = 0x00000002,
+    VK_SAMPLE_COUNT_4_BIT = 0x00000004,
+    VK_SAMPLE_COUNT_8_BIT = 0x00000008,
+    VK_SAMPLE_COUNT_16_BIT = 0x00000010,
+    VK_SAMPLE_COUNT_32_BIT = 0x00000020,
+    VK_SAMPLE_COUNT_64_BIT = 0x00000040,
+    VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSampleCountFlagBits;
+typedef VkFlags VkSampleCountFlags;
+
+typedef enum VkImageUsageFlagBits {
+    VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+    VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+    VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
+    VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+    VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+    VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400,
+    VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800,
+    VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000,
+    VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
+    VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000,
+#endif
+    VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000,
+    VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000,
+    VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000,
+    VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000,
+    VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+
+typedef enum VkInstanceCreateFlagBits {
+    VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001,
+    VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkInstanceCreateFlagBits;
+typedef VkFlags VkInstanceCreateFlags;
+
+typedef enum VkMemoryHeapFlagBits {
+    VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
+
+typedef enum VkMemoryPropertyFlagBits {
+    VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002,
+    VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004,
+    VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008,
+    VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010,
+    VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020,
+    VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040,
+    VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080,
+    VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100,
+    VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryPropertyFlagBits;
+typedef VkFlags VkMemoryPropertyFlags;
+
+typedef enum VkQueueFlagBits {
+    VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+    VK_QUEUE_COMPUTE_BIT = 0x00000002,
+    VK_QUEUE_TRANSFER_BIT = 0x00000004,
+    VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
+    VK_QUEUE_PROTECTED_BIT = 0x00000010,
+    VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040,
+#endif
+    VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100,
+    VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
+typedef VkFlags VkDeviceCreateFlags;
+
+typedef enum VkDeviceQueueCreateFlagBits {
+    VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001,
+    VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDeviceQueueCreateFlagBits;
+typedef VkFlags VkDeviceQueueCreateFlags;
+
+typedef enum VkPipelineStageFlagBits {
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008,
+    VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010,
+    VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000,
+    VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000,
+    VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000,
+    VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
+    VK_PIPELINE_STAGE_NONE = 0,
+    VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000,
+    VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000,
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000,
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000,
+    VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000,
+    VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+    VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000,
+    VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000,
+    VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+    VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
+    VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE,
+    VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineStageFlagBits;
+typedef VkFlags VkPipelineStageFlags;
+typedef VkFlags VkMemoryMapFlags;
+
+typedef enum VkSparseMemoryBindFlagBits {
+    VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
+    VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
+
+typedef enum VkSparseImageFormatFlagBits {
+    VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001,
+    VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002,
+    VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004,
+    VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseImageFormatFlagBits;
+typedef VkFlags VkSparseImageFormatFlags;
+
+typedef enum VkFenceCreateFlagBits {
+    VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
+    VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceCreateFlagBits;
+typedef VkFlags VkFenceCreateFlags;
+typedef VkFlags VkSemaphoreCreateFlags;
+
+typedef enum VkEventCreateFlagBits {
+    VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001,
+    VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT,
+    VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkEventCreateFlagBits;
+typedef VkFlags VkEventCreateFlags;
+
+typedef enum VkQueryPipelineStatisticFlagBits {
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001,
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002,
+    VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040,
+    VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200,
+    VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400,
+    VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800,
+    VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000,
+    VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI = 0x00002000,
+    VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryPipelineStatisticFlagBits;
+typedef VkFlags VkQueryPipelineStatisticFlags;
+typedef VkFlags VkQueryPoolCreateFlags;
+
+typedef enum VkQueryResultFlagBits {
+    VK_QUERY_RESULT_64_BIT = 0x00000001,
+    VK_QUERY_RESULT_WAIT_BIT = 0x00000002,
+    VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004,
+    VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008,
+    VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010,
+    VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryResultFlagBits;
+typedef VkFlags VkQueryResultFlags;
+
+typedef enum VkBufferCreateFlagBits {
+    VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010,
+    VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferCreateFlagBits;
+typedef VkFlags VkBufferCreateFlags;
+
+typedef enum VkBufferUsageFlagBits {
+    VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004,
+    VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010,
+    VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020,
+    VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
+    VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
+    VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000,
+    VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000,
+    VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
+    VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
+    VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000,
+    VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000,
+    VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000,
+#endif
+    VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000,
+    VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000,
+    VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000,
+    VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000,
+    VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000,
+    VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferUsageFlagBits;
+typedef VkFlags VkBufferUsageFlags;
+typedef VkFlags VkBufferViewCreateFlags;
+
+typedef enum VkImageViewCreateFlagBits {
+    VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001,
+    VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004,
+    VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002,
+    VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewCreateFlagBits;
+typedef VkFlags VkImageViewCreateFlags;
+typedef VkFlags VkShaderModuleCreateFlags;
+
+typedef enum VkPipelineCacheCreateFlagBits {
+    VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
+    VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT,
+    VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheCreateFlagBits;
+typedef VkFlags VkPipelineCacheCreateFlags;
+
+typedef enum VkColorComponentFlagBits {
+    VK_COLOR_COMPONENT_R_BIT = 0x00000001,
+    VK_COLOR_COMPONENT_G_BIT = 0x00000002,
+    VK_COLOR_COMPONENT_B_BIT = 0x00000004,
+    VK_COLOR_COMPONENT_A_BIT = 0x00000008,
+    VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkColorComponentFlagBits;
+typedef VkFlags VkColorComponentFlags;
+
+typedef enum VkPipelineCreateFlagBits {
+    VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
+    VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
+    VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010,
+    VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100,
+    VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200,
+    VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000,
+    VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000,
+    VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020,
+    VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040,
+    VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
+    VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000,
+    VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800,
+    VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000,
+    VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000,
+    VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400,
+    VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000,
+    VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000,
+    VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000,
+    VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000,
+    VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000,
+    VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000,
+    VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
+    VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
+    VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
+    VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreateFlagBits;
+typedef VkFlags VkPipelineCreateFlags;
+
+typedef enum VkPipelineShaderStageCreateFlagBits {
+    VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001,
+    VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002,
+    VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT,
+    VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT,
+    VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineShaderStageCreateFlagBits;
+typedef VkFlags VkPipelineShaderStageCreateFlags;
+
+typedef enum VkShaderStageFlagBits {
+    VK_SHADER_STAGE_VERTEX_BIT = 0x00000001,
+    VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002,
+    VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004,
+    VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008,
+    VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010,
+    VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020,
+    VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F,
+    VK_SHADER_STAGE_ALL = 0x7FFFFFFF,
+    VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100,
+    VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200,
+    VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400,
+    VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800,
+    VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000,
+    VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000,
+    VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040,
+    VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080,
+    VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000,
+    VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI = 0x00080000,
+    VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+    VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+    VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+    VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR,
+    VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+    VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+    VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT,
+    VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT,
+    VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkShaderStageFlagBits;
+
+typedef enum VkCullModeFlagBits {
+    VK_CULL_MODE_NONE = 0,
+    VK_CULL_MODE_FRONT_BIT = 0x00000001,
+    VK_CULL_MODE_BACK_BIT = 0x00000002,
+    VK_CULL_MODE_FRONT_AND_BACK = 0x00000003,
+    VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCullModeFlagBits;
+typedef VkFlags VkCullModeFlags;
+typedef VkFlags VkPipelineVertexInputStateCreateFlags;
+typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
+typedef VkFlags VkPipelineTessellationStateCreateFlags;
+typedef VkFlags VkPipelineViewportStateCreateFlags;
+typedef VkFlags VkPipelineRasterizationStateCreateFlags;
+typedef VkFlags VkPipelineMultisampleStateCreateFlags;
+
+typedef enum VkPipelineDepthStencilStateCreateFlagBits {
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineDepthStencilStateCreateFlagBits;
+typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
+
+typedef enum VkPipelineColorBlendStateCreateFlagBits {
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001,
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineColorBlendStateCreateFlagBits;
+typedef VkFlags VkPipelineColorBlendStateCreateFlags;
+typedef VkFlags VkPipelineDynamicStateCreateFlags;
+
+typedef enum VkPipelineLayoutCreateFlagBits {
+    VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002,
+    VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineLayoutCreateFlagBits;
+typedef VkFlags VkPipelineLayoutCreateFlags;
+typedef VkFlags VkShaderStageFlags;
+
+typedef enum VkSamplerCreateFlagBits {
+    VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001,
+    VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002,
+    VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008,
+    VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004,
+    VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010,
+    VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerCreateFlagBits;
+typedef VkFlags VkSamplerCreateFlags;
+
+typedef enum VkDescriptorPoolCreateFlagBits {
+    VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
+    VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002,
+    VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004,
+    VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+    VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
+    VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorPoolCreateFlagBits;
+typedef VkFlags VkDescriptorPoolCreateFlags;
+typedef VkFlags VkDescriptorPoolResetFlags;
+
+typedef enum VkDescriptorSetLayoutCreateFlagBits {
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorSetLayoutCreateFlagBits;
+typedef VkFlags VkDescriptorSetLayoutCreateFlags;
+
+typedef enum VkAttachmentDescriptionFlagBits {
+    VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
+    VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentDescriptionFlagBits;
+typedef VkFlags VkAttachmentDescriptionFlags;
+
+typedef enum VkDependencyFlagBits {
+    VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
+    VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDependencyFlagBits;
+typedef VkFlags VkDependencyFlags;
+
+typedef enum VkFramebufferCreateFlagBits {
+    VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001,
+    VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+    VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFramebufferCreateFlagBits;
+typedef VkFlags VkFramebufferCreateFlags;
+
+typedef enum VkRenderPassCreateFlagBits {
+    VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002,
+    VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderPassCreateFlagBits;
+typedef VkFlags VkRenderPassCreateFlags;
+
+typedef enum VkSubpassDescriptionFlagBits {
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001,
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
+    VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004,
+    VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040,
+    VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassDescriptionFlagBits;
+typedef VkFlags VkSubpassDescriptionFlags;
+
+typedef enum VkCommandPoolCreateFlagBits {
+    VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
+    VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
+    VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004,
+    VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolCreateFlagBits;
+typedef VkFlags VkCommandPoolCreateFlags;
+
+typedef enum VkCommandPoolResetFlagBits {
+    VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolResetFlagBits;
+typedef VkFlags VkCommandPoolResetFlags;
+
+typedef enum VkCommandBufferUsageFlagBits {
+    VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002,
+    VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004,
+    VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferUsageFlagBits;
+typedef VkFlags VkCommandBufferUsageFlags;
+
+typedef enum VkQueryControlFlagBits {
+    VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001,
+    VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryControlFlagBits;
+typedef VkFlags VkQueryControlFlags;
+
+typedef enum VkCommandBufferResetFlagBits {
+    VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferResetFlagBits;
+typedef VkFlags VkCommandBufferResetFlags;
+
+typedef enum VkStencilFaceFlagBits {
+    VK_STENCIL_FACE_FRONT_BIT = 0x00000001,
+    VK_STENCIL_FACE_BACK_BIT = 0x00000002,
+    VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003,
+    VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK,
+    VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkStencilFaceFlagBits;
+typedef VkFlags VkStencilFaceFlags;
+typedef struct VkExtent2D {
+    uint32_t    width;
+    uint32_t    height;
+} VkExtent2D;
+
+typedef struct VkExtent3D {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkExtent3D;
+
+typedef struct VkOffset2D {
+    int32_t    x;
+    int32_t    y;
+} VkOffset2D;
+
+typedef struct VkOffset3D {
+    int32_t    x;
+    int32_t    y;
+    int32_t    z;
+} VkOffset3D;
+
+typedef struct VkRect2D {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+} VkRect2D;
+
+typedef struct VkBaseInStructure {
+    VkStructureType                    sType;
+    const struct VkBaseInStructure*    pNext;
+} VkBaseInStructure;
+
+typedef struct VkBaseOutStructure {
+    VkStructureType               sType;
+    struct VkBaseOutStructure*    pNext;
+} VkBaseOutStructure;
+
+typedef struct VkBufferMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+    uint32_t           srcQueueFamilyIndex;
+    uint32_t           dstQueueFamilyIndex;
+    VkBuffer           buffer;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkBufferMemoryBarrier;
+
+typedef struct VkDispatchIndirectCommand {
+    uint32_t    x;
+    uint32_t    y;
+    uint32_t    z;
+} VkDispatchIndirectCommand;
+
+typedef struct VkDrawIndexedIndirectCommand {
+    uint32_t    indexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstIndex;
+    int32_t     vertexOffset;
+    uint32_t    firstInstance;
+} VkDrawIndexedIndirectCommand;
+
+typedef struct VkDrawIndirectCommand {
+    uint32_t    vertexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstVertex;
+    uint32_t    firstInstance;
+} VkDrawIndirectCommand;
+
+typedef struct VkImageSubresourceRange {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              baseMipLevel;
+    uint32_t              levelCount;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceRange;
+
+typedef struct VkImageMemoryBarrier {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkAccessFlags              srcAccessMask;
+    VkAccessFlags              dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct VkMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+} VkMemoryBarrier;
+
+typedef struct VkPipelineCacheHeaderVersionOne {
+    uint32_t                        headerSize;
+    VkPipelineCacheHeaderVersion    headerVersion;
+    uint32_t                        vendorID;
+    uint32_t                        deviceID;
+    uint8_t                         pipelineCacheUUID[VK_UUID_SIZE];
+} VkPipelineCacheHeaderVersionOne;
+
+typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
+    void*                                       pUserData,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkFreeFunction)(
+    void*                                       pUserData,
+    void*                                       pMemory);
+
+typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+    void*                                       pUserData,
+    void*                                       pOriginal,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
+typedef struct VkAllocationCallbacks {
+    void*                                   pUserData;
+    PFN_vkAllocationFunction                pfnAllocation;
+    PFN_vkReallocationFunction              pfnReallocation;
+    PFN_vkFreeFunction                      pfnFree;
+    PFN_vkInternalAllocationNotification    pfnInternalAllocation;
+    PFN_vkInternalFreeNotification          pfnInternalFree;
+} VkAllocationCallbacks;
+
+typedef struct VkApplicationInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pApplicationName;
+    uint32_t           applicationVersion;
+    const char*        pEngineName;
+    uint32_t           engineVersion;
+    uint32_t           apiVersion;
+} VkApplicationInfo;
+
+typedef struct VkFormatProperties {
+    VkFormatFeatureFlags    linearTilingFeatures;
+    VkFormatFeatureFlags    optimalTilingFeatures;
+    VkFormatFeatureFlags    bufferFeatures;
+} VkFormatProperties;
+
+typedef struct VkImageFormatProperties {
+    VkExtent3D            maxExtent;
+    uint32_t              maxMipLevels;
+    uint32_t              maxArrayLayers;
+    VkSampleCountFlags    sampleCounts;
+    VkDeviceSize          maxResourceSize;
+} VkImageFormatProperties;
+
+typedef struct VkInstanceCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkInstanceCreateFlags       flags;
+    const VkApplicationInfo*    pApplicationInfo;
+    uint32_t                    enabledLayerCount;
+    const char* const*          ppEnabledLayerNames;
+    uint32_t                    enabledExtensionCount;
+    const char* const*          ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+typedef struct VkMemoryHeap {
+    VkDeviceSize         size;
+    VkMemoryHeapFlags    flags;
+} VkMemoryHeap;
+
+typedef struct VkMemoryType {
+    VkMemoryPropertyFlags    propertyFlags;
+    uint32_t                 heapIndex;
+} VkMemoryType;
+
+typedef struct VkPhysicalDeviceFeatures {
+    VkBool32    robustBufferAccess;
+    VkBool32    fullDrawIndexUint32;
+    VkBool32    imageCubeArray;
+    VkBool32    independentBlend;
+    VkBool32    geometryShader;
+    VkBool32    tessellationShader;
+    VkBool32    sampleRateShading;
+    VkBool32    dualSrcBlend;
+    VkBool32    logicOp;
+    VkBool32    multiDrawIndirect;
+    VkBool32    drawIndirectFirstInstance;
+    VkBool32    depthClamp;
+    VkBool32    depthBiasClamp;
+    VkBool32    fillModeNonSolid;
+    VkBool32    depthBounds;
+    VkBool32    wideLines;
+    VkBool32    largePoints;
+    VkBool32    alphaToOne;
+    VkBool32    multiViewport;
+    VkBool32    samplerAnisotropy;
+    VkBool32    textureCompressionETC2;
+    VkBool32    textureCompressionASTC_LDR;
+    VkBool32    textureCompressionBC;
+    VkBool32    occlusionQueryPrecise;
+    VkBool32    pipelineStatisticsQuery;
+    VkBool32    vertexPipelineStoresAndAtomics;
+    VkBool32    fragmentStoresAndAtomics;
+    VkBool32    shaderTessellationAndGeometryPointSize;
+    VkBool32    shaderImageGatherExtended;
+    VkBool32    shaderStorageImageExtendedFormats;
+    VkBool32    shaderStorageImageMultisample;
+    VkBool32    shaderStorageImageReadWithoutFormat;
+    VkBool32    shaderStorageImageWriteWithoutFormat;
+    VkBool32    shaderUniformBufferArrayDynamicIndexing;
+    VkBool32    shaderSampledImageArrayDynamicIndexing;
+    VkBool32    shaderStorageBufferArrayDynamicIndexing;
+    VkBool32    shaderStorageImageArrayDynamicIndexing;
+    VkBool32    shaderClipDistance;
+    VkBool32    shaderCullDistance;
+    VkBool32    shaderFloat64;
+    VkBool32    shaderInt64;
+    VkBool32    shaderInt16;
+    VkBool32    shaderResourceResidency;
+    VkBool32    shaderResourceMinLod;
+    VkBool32    sparseBinding;
+    VkBool32    sparseResidencyBuffer;
+    VkBool32    sparseResidencyImage2D;
+    VkBool32    sparseResidencyImage3D;
+    VkBool32    sparseResidency2Samples;
+    VkBool32    sparseResidency4Samples;
+    VkBool32    sparseResidency8Samples;
+    VkBool32    sparseResidency16Samples;
+    VkBool32    sparseResidencyAliased;
+    VkBool32    variableMultisampleRate;
+    VkBool32    inheritedQueries;
+} VkPhysicalDeviceFeatures;
+
+typedef struct VkPhysicalDeviceLimits {
+    uint32_t              maxImageDimension1D;
+    uint32_t              maxImageDimension2D;
+    uint32_t              maxImageDimension3D;
+    uint32_t              maxImageDimensionCube;
+    uint32_t              maxImageArrayLayers;
+    uint32_t              maxTexelBufferElements;
+    uint32_t              maxUniformBufferRange;
+    uint32_t              maxStorageBufferRange;
+    uint32_t              maxPushConstantsSize;
+    uint32_t              maxMemoryAllocationCount;
+    uint32_t              maxSamplerAllocationCount;
+    VkDeviceSize          bufferImageGranularity;
+    VkDeviceSize          sparseAddressSpaceSize;
+    uint32_t              maxBoundDescriptorSets;
+    uint32_t              maxPerStageDescriptorSamplers;
+    uint32_t              maxPerStageDescriptorUniformBuffers;
+    uint32_t              maxPerStageDescriptorStorageBuffers;
+    uint32_t              maxPerStageDescriptorSampledImages;
+    uint32_t              maxPerStageDescriptorStorageImages;
+    uint32_t              maxPerStageDescriptorInputAttachments;
+    uint32_t              maxPerStageResources;
+    uint32_t              maxDescriptorSetSamplers;
+    uint32_t              maxDescriptorSetUniformBuffers;
+    uint32_t              maxDescriptorSetUniformBuffersDynamic;
+    uint32_t              maxDescriptorSetStorageBuffers;
+    uint32_t              maxDescriptorSetStorageBuffersDynamic;
+    uint32_t              maxDescriptorSetSampledImages;
+    uint32_t              maxDescriptorSetStorageImages;
+    uint32_t              maxDescriptorSetInputAttachments;
+    uint32_t              maxVertexInputAttributes;
+    uint32_t              maxVertexInputBindings;
+    uint32_t              maxVertexInputAttributeOffset;
+    uint32_t              maxVertexInputBindingStride;
+    uint32_t              maxVertexOutputComponents;
+    uint32_t              maxTessellationGenerationLevel;
+    uint32_t              maxTessellationPatchSize;
+    uint32_t              maxTessellationControlPerVertexInputComponents;
+    uint32_t              maxTessellationControlPerVertexOutputComponents;
+    uint32_t              maxTessellationControlPerPatchOutputComponents;
+    uint32_t              maxTessellationControlTotalOutputComponents;
+    uint32_t              maxTessellationEvaluationInputComponents;
+    uint32_t              maxTessellationEvaluationOutputComponents;
+    uint32_t              maxGeometryShaderInvocations;
+    uint32_t              maxGeometryInputComponents;
+    uint32_t              maxGeometryOutputComponents;
+    uint32_t              maxGeometryOutputVertices;
+    uint32_t              maxGeometryTotalOutputComponents;
+    uint32_t              maxFragmentInputComponents;
+    uint32_t              maxFragmentOutputAttachments;
+    uint32_t              maxFragmentDualSrcAttachments;
+    uint32_t              maxFragmentCombinedOutputResources;
+    uint32_t              maxComputeSharedMemorySize;
+    uint32_t              maxComputeWorkGroupCount[3];
+    uint32_t              maxComputeWorkGroupInvocations;
+    uint32_t              maxComputeWorkGroupSize[3];
+    uint32_t              subPixelPrecisionBits;
+    uint32_t              subTexelPrecisionBits;
+    uint32_t              mipmapPrecisionBits;
+    uint32_t              maxDrawIndexedIndexValue;
+    uint32_t              maxDrawIndirectCount;
+    float                 maxSamplerLodBias;
+    float                 maxSamplerAnisotropy;
+    uint32_t              maxViewports;
+    uint32_t              maxViewportDimensions[2];
+    float                 viewportBoundsRange[2];
+    uint32_t              viewportSubPixelBits;
+    size_t                minMemoryMapAlignment;
+    VkDeviceSize          minTexelBufferOffsetAlignment;
+    VkDeviceSize          minUniformBufferOffsetAlignment;
+    VkDeviceSize          minStorageBufferOffsetAlignment;
+    int32_t               minTexelOffset;
+    uint32_t              maxTexelOffset;
+    int32_t               minTexelGatherOffset;
+    uint32_t              maxTexelGatherOffset;
+    float                 minInterpolationOffset;
+    float                 maxInterpolationOffset;
+    uint32_t              subPixelInterpolationOffsetBits;
+    uint32_t              maxFramebufferWidth;
+    uint32_t              maxFramebufferHeight;
+    uint32_t              maxFramebufferLayers;
+    VkSampleCountFlags    framebufferColorSampleCounts;
+    VkSampleCountFlags    framebufferDepthSampleCounts;
+    VkSampleCountFlags    framebufferStencilSampleCounts;
+    VkSampleCountFlags    framebufferNoAttachmentsSampleCounts;
+    uint32_t              maxColorAttachments;
+    VkSampleCountFlags    sampledImageColorSampleCounts;
+    VkSampleCountFlags    sampledImageIntegerSampleCounts;
+    VkSampleCountFlags    sampledImageDepthSampleCounts;
+    VkSampleCountFlags    sampledImageStencilSampleCounts;
+    VkSampleCountFlags    storageImageSampleCounts;
+    uint32_t              maxSampleMaskWords;
+    VkBool32              timestampComputeAndGraphics;
+    float                 timestampPeriod;
+    uint32_t              maxClipDistances;
+    uint32_t              maxCullDistances;
+    uint32_t              maxCombinedClipAndCullDistances;
+    uint32_t              discreteQueuePriorities;
+    float                 pointSizeRange[2];
+    float                 lineWidthRange[2];
+    float                 pointSizeGranularity;
+    float                 lineWidthGranularity;
+    VkBool32              strictLines;
+    VkBool32              standardSampleLocations;
+    VkDeviceSize          optimalBufferCopyOffsetAlignment;
+    VkDeviceSize          optimalBufferCopyRowPitchAlignment;
+    VkDeviceSize          nonCoherentAtomSize;
+} VkPhysicalDeviceLimits;
+
+typedef struct VkPhysicalDeviceMemoryProperties {
+    uint32_t        memoryTypeCount;
+    VkMemoryType    memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t        memoryHeapCount;
+    VkMemoryHeap    memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
+typedef struct VkPhysicalDeviceSparseProperties {
+    VkBool32    residencyStandard2DBlockShape;
+    VkBool32    residencyStandard2DMultisampleBlockShape;
+    VkBool32    residencyStandard3DBlockShape;
+    VkBool32    residencyAlignedMipSize;
+    VkBool32    residencyNonResidentStrict;
+} VkPhysicalDeviceSparseProperties;
+
+typedef struct VkPhysicalDeviceProperties {
+    uint32_t                            apiVersion;
+    uint32_t                            driverVersion;
+    uint32_t                            vendorID;
+    uint32_t                            deviceID;
+    VkPhysicalDeviceType                deviceType;
+    char                                deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t                             pipelineCacheUUID[VK_UUID_SIZE];
+    VkPhysicalDeviceLimits              limits;
+    VkPhysicalDeviceSparseProperties    sparseProperties;
+} VkPhysicalDeviceProperties;
+
+typedef struct VkQueueFamilyProperties {
+    VkQueueFlags    queueFlags;
+    uint32_t        queueCount;
+    uint32_t        timestampValidBits;
+    VkExtent3D      minImageTransferGranularity;
+} VkQueueFamilyProperties;
+
+typedef struct VkDeviceQueueCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueCount;
+    const float*                pQueuePriorities;
+} VkDeviceQueueCreateInfo;
+
+typedef struct VkDeviceCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDeviceCreateFlags                flags;
+    uint32_t                           queueCreateInfoCount;
+    const VkDeviceQueueCreateInfo*     pQueueCreateInfos;
+    uint32_t                           enabledLayerCount;
+    const char* const*                 ppEnabledLayerNames;
+    uint32_t                           enabledExtensionCount;
+    const char* const*                 ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures*    pEnabledFeatures;
+} VkDeviceCreateInfo;
+
+typedef struct VkExtensionProperties {
+    char        extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+} VkExtensionProperties;
+
+typedef struct VkLayerProperties {
+    char        layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+    uint32_t    implementationVersion;
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+} VkLayerProperties;
+
+typedef struct VkSubmitInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       waitSemaphoreCount;
+    const VkSemaphore*             pWaitSemaphores;
+    const VkPipelineStageFlags*    pWaitDstStageMask;
+    uint32_t                       commandBufferCount;
+    const VkCommandBuffer*         pCommandBuffers;
+    uint32_t                       signalSemaphoreCount;
+    const VkSemaphore*             pSignalSemaphores;
+} VkSubmitInfo;
+
+typedef struct VkMappedMemoryRange {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkMappedMemoryRange;
+
+typedef struct VkMemoryAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeIndex;
+} VkMemoryAllocateInfo;
+
+typedef struct VkMemoryRequirements {
+    VkDeviceSize    size;
+    VkDeviceSize    alignment;
+    uint32_t        memoryTypeBits;
+} VkMemoryRequirements;
+
+typedef struct VkSparseMemoryBind {
+    VkDeviceSize               resourceOffset;
+    VkDeviceSize               size;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseMemoryBind;
+
+typedef struct VkSparseBufferMemoryBindInfo {
+    VkBuffer                     buffer;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseBufferMemoryBindInfo;
+
+typedef struct VkSparseImageOpaqueMemoryBindInfo {
+    VkImage                      image;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseImageOpaqueMemoryBindInfo;
+
+typedef struct VkImageSubresource {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              arrayLayer;
+} VkImageSubresource;
+
+typedef struct VkSparseImageMemoryBind {
+    VkImageSubresource         subresource;
+    VkOffset3D                 offset;
+    VkExtent3D                 extent;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseImageMemoryBind;
+
+typedef struct VkSparseImageMemoryBindInfo {
+    VkImage                           image;
+    uint32_t                          bindCount;
+    const VkSparseImageMemoryBind*    pBinds;
+} VkSparseImageMemoryBindInfo;
+
+typedef struct VkBindSparseInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    waitSemaphoreCount;
+    const VkSemaphore*                          pWaitSemaphores;
+    uint32_t                                    bufferBindCount;
+    const VkSparseBufferMemoryBindInfo*         pBufferBinds;
+    uint32_t                                    imageOpaqueBindCount;
+    const VkSparseImageOpaqueMemoryBindInfo*    pImageOpaqueBinds;
+    uint32_t                                    imageBindCount;
+    const VkSparseImageMemoryBindInfo*          pImageBinds;
+    uint32_t                                    signalSemaphoreCount;
+    const VkSemaphore*                          pSignalSemaphores;
+} VkBindSparseInfo;
+
+typedef struct VkSparseImageFormatProperties {
+    VkImageAspectFlags          aspectMask;
+    VkExtent3D                  imageGranularity;
+    VkSparseImageFormatFlags    flags;
+} VkSparseImageFormatProperties;
+
+typedef struct VkSparseImageMemoryRequirements {
+    VkSparseImageFormatProperties    formatProperties;
+    uint32_t                         imageMipTailFirstLod;
+    VkDeviceSize                     imageMipTailSize;
+    VkDeviceSize                     imageMipTailOffset;
+    VkDeviceSize                     imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
+typedef struct VkFenceCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFenceCreateFlags    flags;
+} VkFenceCreateInfo;
+
+typedef struct VkSemaphoreCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSemaphoreCreateFlags    flags;
+} VkSemaphoreCreateInfo;
+
+typedef struct VkEventCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkEventCreateFlags    flags;
+} VkEventCreateInfo;
+
+typedef struct VkQueryPoolCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkQueryPoolCreateFlags           flags;
+    VkQueryType                      queryType;
+    uint32_t                         queryCount;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkQueryPoolCreateInfo;
+
+typedef struct VkBufferCreateInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkBufferCreateFlags    flags;
+    VkDeviceSize           size;
+    VkBufferUsageFlags     usage;
+    VkSharingMode          sharingMode;
+    uint32_t               queueFamilyIndexCount;
+    const uint32_t*        pQueueFamilyIndices;
+} VkBufferCreateInfo;
+
+typedef struct VkBufferViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkBufferViewCreateFlags    flags;
+    VkBuffer                   buffer;
+    VkFormat                   format;
+    VkDeviceSize               offset;
+    VkDeviceSize               range;
+} VkBufferViewCreateInfo;
+
+typedef struct VkImageCreateInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageCreateFlags       flags;
+    VkImageType              imageType;
+    VkFormat                 format;
+    VkExtent3D               extent;
+    uint32_t                 mipLevels;
+    uint32_t                 arrayLayers;
+    VkSampleCountFlagBits    samples;
+    VkImageTiling            tiling;
+    VkImageUsageFlags        usage;
+    VkSharingMode            sharingMode;
+    uint32_t                 queueFamilyIndexCount;
+    const uint32_t*          pQueueFamilyIndices;
+    VkImageLayout            initialLayout;
+} VkImageCreateInfo;
+
+typedef struct VkSubresourceLayout {
+    VkDeviceSize    offset;
+    VkDeviceSize    size;
+    VkDeviceSize    rowPitch;
+    VkDeviceSize    arrayPitch;
+    VkDeviceSize    depthPitch;
+} VkSubresourceLayout;
+
+typedef struct VkComponentMapping {
+    VkComponentSwizzle    r;
+    VkComponentSwizzle    g;
+    VkComponentSwizzle    b;
+    VkComponentSwizzle    a;
+} VkComponentMapping;
+
+typedef struct VkImageViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkImageViewCreateFlags     flags;
+    VkImage                    image;
+    VkImageViewType            viewType;
+    VkFormat                   format;
+    VkComponentMapping         components;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageViewCreateInfo;
+
+typedef struct VkShaderModuleCreateInfo {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkShaderModuleCreateFlags    flags;
+    size_t                       codeSize;
+    const uint32_t*              pCode;
+} VkShaderModuleCreateInfo;
+
+typedef struct VkPipelineCacheCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineCacheCreateFlags    flags;
+    size_t                        initialDataSize;
+    const void*                   pInitialData;
+} VkPipelineCacheCreateInfo;
+
+typedef struct VkSpecializationMapEntry {
+    uint32_t    constantID;
+    uint32_t    offset;
+    size_t      size;
+} VkSpecializationMapEntry;
+
+typedef struct VkSpecializationInfo {
+    uint32_t                           mapEntryCount;
+    const VkSpecializationMapEntry*    pMapEntries;
+    size_t                             dataSize;
+    const void*                        pData;
+} VkSpecializationInfo;
+
+typedef struct VkPipelineShaderStageCreateInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkPipelineShaderStageCreateFlags    flags;
+    VkShaderStageFlagBits               stage;
+    VkShaderModule                      module;
+    const char*                         pName;
+    const VkSpecializationInfo*         pSpecializationInfo;
+} VkPipelineShaderStageCreateInfo;
+
+typedef struct VkComputePipelineCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkPipelineCreateFlags              flags;
+    VkPipelineShaderStageCreateInfo    stage;
+    VkPipelineLayout                   layout;
+    VkPipeline                         basePipelineHandle;
+    int32_t                            basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
+typedef struct VkVertexInputBindingDescription {
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+} VkVertexInputBindingDescription;
+
+typedef struct VkVertexInputAttributeDescription {
+    uint32_t    location;
+    uint32_t    binding;
+    VkFormat    format;
+    uint32_t    offset;
+} VkVertexInputAttributeDescription;
+
+typedef struct VkPipelineVertexInputStateCreateInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineVertexInputStateCreateFlags       flags;
+    uint32_t                                    vertexBindingDescriptionCount;
+    const VkVertexInputBindingDescription*      pVertexBindingDescriptions;
+    uint32_t                                    vertexAttributeDescriptionCount;
+    const VkVertexInputAttributeDescription*    pVertexAttributeDescriptions;
+} VkPipelineVertexInputStateCreateInfo;
+
+typedef struct VkPipelineInputAssemblyStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineInputAssemblyStateCreateFlags    flags;
+    VkPrimitiveTopology                        topology;
+    VkBool32                                   primitiveRestartEnable;
+} VkPipelineInputAssemblyStateCreateInfo;
+
+typedef struct VkPipelineTessellationStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineTessellationStateCreateFlags    flags;
+    uint32_t                                  patchControlPoints;
+} VkPipelineTessellationStateCreateInfo;
+
+typedef struct VkViewport {
+    float    x;
+    float    y;
+    float    width;
+    float    height;
+    float    minDepth;
+    float    maxDepth;
+} VkViewport;
+
+typedef struct VkPipelineViewportStateCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkPipelineViewportStateCreateFlags    flags;
+    uint32_t                              viewportCount;
+    const VkViewport*                     pViewports;
+    uint32_t                              scissorCount;
+    const VkRect2D*                       pScissors;
+} VkPipelineViewportStateCreateInfo;
+
+typedef struct VkPipelineRasterizationStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineRasterizationStateCreateFlags    flags;
+    VkBool32                                   depthClampEnable;
+    VkBool32                                   rasterizerDiscardEnable;
+    VkPolygonMode                              polygonMode;
+    VkCullModeFlags                            cullMode;
+    VkFrontFace                                frontFace;
+    VkBool32                                   depthBiasEnable;
+    float                                      depthBiasConstantFactor;
+    float                                      depthBiasClamp;
+    float                                      depthBiasSlopeFactor;
+    float                                      lineWidth;
+} VkPipelineRasterizationStateCreateInfo;
+
+typedef struct VkPipelineMultisampleStateCreateInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineMultisampleStateCreateFlags    flags;
+    VkSampleCountFlagBits                    rasterizationSamples;
+    VkBool32                                 sampleShadingEnable;
+    float                                    minSampleShading;
+    const VkSampleMask*                      pSampleMask;
+    VkBool32                                 alphaToCoverageEnable;
+    VkBool32                                 alphaToOneEnable;
+} VkPipelineMultisampleStateCreateInfo;
+
+typedef struct VkStencilOpState {
+    VkStencilOp    failOp;
+    VkStencilOp    passOp;
+    VkStencilOp    depthFailOp;
+    VkCompareOp    compareOp;
+    uint32_t       compareMask;
+    uint32_t       writeMask;
+    uint32_t       reference;
+} VkStencilOpState;
+
+typedef struct VkPipelineDepthStencilStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineDepthStencilStateCreateFlags    flags;
+    VkBool32                                  depthTestEnable;
+    VkBool32                                  depthWriteEnable;
+    VkCompareOp                               depthCompareOp;
+    VkBool32                                  depthBoundsTestEnable;
+    VkBool32                                  stencilTestEnable;
+    VkStencilOpState                          front;
+    VkStencilOpState                          back;
+    float                                     minDepthBounds;
+    float                                     maxDepthBounds;
+} VkPipelineDepthStencilStateCreateInfo;
+
+typedef struct VkPipelineColorBlendAttachmentState {
+    VkBool32                 blendEnable;
+    VkBlendFactor            srcColorBlendFactor;
+    VkBlendFactor            dstColorBlendFactor;
+    VkBlendOp                colorBlendOp;
+    VkBlendFactor            srcAlphaBlendFactor;
+    VkBlendFactor            dstAlphaBlendFactor;
+    VkBlendOp                alphaBlendOp;
+    VkColorComponentFlags    colorWriteMask;
+} VkPipelineColorBlendAttachmentState;
+
+typedef struct VkPipelineColorBlendStateCreateInfo {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineColorBlendStateCreateFlags          flags;
+    VkBool32                                      logicOpEnable;
+    VkLogicOp                                     logicOp;
+    uint32_t                                      attachmentCount;
+    const VkPipelineColorBlendAttachmentState*    pAttachments;
+    float                                         blendConstants[4];
+} VkPipelineColorBlendStateCreateInfo;
+
+typedef struct VkPipelineDynamicStateCreateInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineDynamicStateCreateFlags    flags;
+    uint32_t                             dynamicStateCount;
+    const VkDynamicState*                pDynamicStates;
+} VkPipelineDynamicStateCreateInfo;
+
+typedef struct VkGraphicsPipelineCreateInfo {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCreateFlags                            flags;
+    uint32_t                                         stageCount;
+    const VkPipelineShaderStageCreateInfo*           pStages;
+    const VkPipelineVertexInputStateCreateInfo*      pVertexInputState;
+    const VkPipelineInputAssemblyStateCreateInfo*    pInputAssemblyState;
+    const VkPipelineTessellationStateCreateInfo*     pTessellationState;
+    const VkPipelineViewportStateCreateInfo*         pViewportState;
+    const VkPipelineRasterizationStateCreateInfo*    pRasterizationState;
+    const VkPipelineMultisampleStateCreateInfo*      pMultisampleState;
+    const VkPipelineDepthStencilStateCreateInfo*     pDepthStencilState;
+    const VkPipelineColorBlendStateCreateInfo*       pColorBlendState;
+    const VkPipelineDynamicStateCreateInfo*          pDynamicState;
+    VkPipelineLayout                                 layout;
+    VkRenderPass                                     renderPass;
+    uint32_t                                         subpass;
+    VkPipeline                                       basePipelineHandle;
+    int32_t                                          basePipelineIndex;
+} VkGraphicsPipelineCreateInfo;
+
+typedef struct VkPushConstantRange {
+    VkShaderStageFlags    stageFlags;
+    uint32_t              offset;
+    uint32_t              size;
+} VkPushConstantRange;
+
+typedef struct VkPipelineLayoutCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPipelineLayoutCreateFlags     flags;
+    uint32_t                        setLayoutCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+    uint32_t                        pushConstantRangeCount;
+    const VkPushConstantRange*      pPushConstantRanges;
+} VkPipelineLayoutCreateInfo;
+
+typedef struct VkSamplerCreateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSamplerCreateFlags    flags;
+    VkFilter                magFilter;
+    VkFilter                minFilter;
+    VkSamplerMipmapMode     mipmapMode;
+    VkSamplerAddressMode    addressModeU;
+    VkSamplerAddressMode    addressModeV;
+    VkSamplerAddressMode    addressModeW;
+    float                   mipLodBias;
+    VkBool32                anisotropyEnable;
+    float                   maxAnisotropy;
+    VkBool32                compareEnable;
+    VkCompareOp             compareOp;
+    float                   minLod;
+    float                   maxLod;
+    VkBorderColor           borderColor;
+    VkBool32                unnormalizedCoordinates;
+} VkSamplerCreateInfo;
+
+typedef struct VkCopyDescriptorSet {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDescriptorSet    srcSet;
+    uint32_t           srcBinding;
+    uint32_t           srcArrayElement;
+    VkDescriptorSet    dstSet;
+    uint32_t           dstBinding;
+    uint32_t           dstArrayElement;
+    uint32_t           descriptorCount;
+} VkCopyDescriptorSet;
+
+typedef struct VkDescriptorBufferInfo {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+    VkDeviceSize    range;
+} VkDescriptorBufferInfo;
+
+typedef struct VkDescriptorImageInfo {
+    VkSampler        sampler;
+    VkImageView      imageView;
+    VkImageLayout    imageLayout;
+} VkDescriptorImageInfo;
+
+typedef struct VkDescriptorPoolSize {
+    VkDescriptorType    type;
+    uint32_t            descriptorCount;
+} VkDescriptorPoolSize;
+
+typedef struct VkDescriptorPoolCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDescriptorPoolCreateFlags    flags;
+    uint32_t                       maxSets;
+    uint32_t                       poolSizeCount;
+    const VkDescriptorPoolSize*    pPoolSizes;
+} VkDescriptorPoolCreateInfo;
+
+typedef struct VkDescriptorSetAllocateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDescriptorPool                descriptorPool;
+    uint32_t                        descriptorSetCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+} VkDescriptorSetAllocateInfo;
+
+typedef struct VkDescriptorSetLayoutBinding {
+    uint32_t              binding;
+    VkDescriptorType      descriptorType;
+    uint32_t              descriptorCount;
+    VkShaderStageFlags    stageFlags;
+    const VkSampler*      pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
+
+typedef struct VkDescriptorSetLayoutCreateInfo {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDescriptorSetLayoutCreateFlags       flags;
+    uint32_t                               bindingCount;
+    const VkDescriptorSetLayoutBinding*    pBindings;
+} VkDescriptorSetLayoutCreateInfo;
+
+typedef struct VkWriteDescriptorSet {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDescriptorSet                  dstSet;
+    uint32_t                         dstBinding;
+    uint32_t                         dstArrayElement;
+    uint32_t                         descriptorCount;
+    VkDescriptorType                 descriptorType;
+    const VkDescriptorImageInfo*     pImageInfo;
+    const VkDescriptorBufferInfo*    pBufferInfo;
+    const VkBufferView*              pTexelBufferView;
+} VkWriteDescriptorSet;
+
+typedef struct VkAttachmentDescription {
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription;
+
+typedef struct VkAttachmentReference {
+    uint32_t         attachment;
+    VkImageLayout    layout;
+} VkAttachmentReference;
+
+typedef struct VkFramebufferCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkFramebufferCreateFlags    flags;
+    VkRenderPass                renderPass;
+    uint32_t                    attachmentCount;
+    const VkImageView*          pAttachments;
+    uint32_t                    width;
+    uint32_t                    height;
+    uint32_t                    layers;
+} VkFramebufferCreateInfo;
+
+typedef struct VkSubpassDescription {
+    VkSubpassDescriptionFlags       flags;
+    VkPipelineBindPoint             pipelineBindPoint;
+    uint32_t                        inputAttachmentCount;
+    const VkAttachmentReference*    pInputAttachments;
+    uint32_t                        colorAttachmentCount;
+    const VkAttachmentReference*    pColorAttachments;
+    const VkAttachmentReference*    pResolveAttachments;
+    const VkAttachmentReference*    pDepthStencilAttachment;
+    uint32_t                        preserveAttachmentCount;
+    const uint32_t*                 pPreserveAttachments;
+} VkSubpassDescription;
+
+typedef struct VkSubpassDependency {
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+} VkSubpassDependency;
+
+typedef struct VkRenderPassCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRenderPassCreateFlags           flags;
+    uint32_t                          attachmentCount;
+    const VkAttachmentDescription*    pAttachments;
+    uint32_t                          subpassCount;
+    const VkSubpassDescription*       pSubpasses;
+    uint32_t                          dependencyCount;
+    const VkSubpassDependency*        pDependencies;
+} VkRenderPassCreateInfo;
+
+typedef struct VkCommandPoolCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkCommandPoolCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+} VkCommandPoolCreateInfo;
+
+typedef struct VkCommandBufferAllocateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkCommandPool           commandPool;
+    VkCommandBufferLevel    level;
+    uint32_t                commandBufferCount;
+} VkCommandBufferAllocateInfo;
+
+typedef struct VkCommandBufferInheritanceInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkRenderPass                     renderPass;
+    uint32_t                         subpass;
+    VkFramebuffer                    framebuffer;
+    VkBool32                         occlusionQueryEnable;
+    VkQueryControlFlags              queryFlags;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkCommandBufferInheritanceInfo;
+
+typedef struct VkCommandBufferBeginInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkCommandBufferUsageFlags                flags;
+    const VkCommandBufferInheritanceInfo*    pInheritanceInfo;
+} VkCommandBufferBeginInfo;
+
+typedef struct VkBufferCopy {
+    VkDeviceSize    srcOffset;
+    VkDeviceSize    dstOffset;
+    VkDeviceSize    size;
+} VkBufferCopy;
+
+typedef struct VkImageSubresourceLayers {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceLayers;
+
+typedef struct VkBufferImageCopy {
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy;
+
+typedef union VkClearColorValue {
+    float       float32[4];
+    int32_t     int32[4];
+    uint32_t    uint32[4];
+} VkClearColorValue;
+
+typedef struct VkClearDepthStencilValue {
+    float       depth;
+    uint32_t    stencil;
+} VkClearDepthStencilValue;
+
+typedef union VkClearValue {
+    VkClearColorValue           color;
+    VkClearDepthStencilValue    depthStencil;
+} VkClearValue;
+
+typedef struct VkClearAttachment {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              colorAttachment;
+    VkClearValue          clearValue;
+} VkClearAttachment;
+
+typedef struct VkClearRect {
+    VkRect2D    rect;
+    uint32_t    baseArrayLayer;
+    uint32_t    layerCount;
+} VkClearRect;
+
+typedef struct VkImageBlit {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit;
+
+typedef struct VkImageCopy {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy;
+
+typedef struct VkImageResolve {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve;
+
+typedef struct VkRenderPassBeginInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkRenderPass           renderPass;
+    VkFramebuffer          framebuffer;
+    VkRect2D               renderArea;
+    uint32_t               clearValueCount;
+    const VkClearValue*    pClearValues;
+} VkRenderPassBeginInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
+typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
+typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue);
+typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
+typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
+typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
+typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
+typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
+typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
+typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
+typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers);
+typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
+typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
+    VkQueue                                     queue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+#endif
+
+
+#define VK_VERSION_1_1 1
+// Vulkan 1.1 version number
+#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0
+
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate)
+#define VK_MAX_DEVICE_GROUP_SIZE          32U
+#define VK_LUID_SIZE                      8U
+#define VK_QUEUE_FAMILY_EXTERNAL          (~1U)
+
+typedef enum VkPointClippingBehavior {
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1,
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+    VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF
+} VkPointClippingBehavior;
+
+typedef enum VkTessellationDomainOrigin {
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1,
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF
+} VkTessellationDomainOrigin;
+
+typedef enum VkSamplerYcbcrModelConversion {
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrModelConversion;
+
+typedef enum VkSamplerYcbcrRange {
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1,
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+    VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrRange;
+
+typedef enum VkChromaLocation {
+    VK_CHROMA_LOCATION_COSITED_EVEN = 0,
+    VK_CHROMA_LOCATION_MIDPOINT = 1,
+    VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN,
+    VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT,
+    VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF
+} VkChromaLocation;
+
+typedef enum VkDescriptorUpdateTemplateType {
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorUpdateTemplateType;
+
+typedef enum VkSubgroupFeatureFlagBits {
+    VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001,
+    VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002,
+    VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004,
+    VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008,
+    VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010,
+    VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020,
+    VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040,
+    VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080,
+    VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100,
+    VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubgroupFeatureFlagBits;
+typedef VkFlags VkSubgroupFeatureFlags;
+
+typedef enum VkPeerMemoryFeatureFlagBits {
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008,
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPeerMemoryFeatureFlagBits;
+typedef VkFlags VkPeerMemoryFeatureFlags;
+
+typedef enum VkMemoryAllocateFlagBits {
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004,
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryAllocateFlagBits;
+typedef VkFlags VkMemoryAllocateFlags;
+typedef VkFlags VkCommandPoolTrimFlags;
+typedef VkFlags VkDescriptorUpdateTemplateCreateFlags;
+
+typedef enum VkExternalMemoryHandleTypeFlagBits {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBits;
+typedef VkFlags VkExternalMemoryHandleTypeFlags;
+
+typedef enum VkExternalMemoryFeatureFlagBits {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBits;
+typedef VkFlags VkExternalMemoryFeatureFlags;
+
+typedef enum VkExternalFenceHandleTypeFlagBits {
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceHandleTypeFlagBits;
+typedef VkFlags VkExternalFenceHandleTypeFlags;
+
+typedef enum VkExternalFenceFeatureFlagBits {
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceFeatureFlagBits;
+typedef VkFlags VkExternalFenceFeatureFlags;
+
+typedef enum VkFenceImportFlagBits {
+    VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT,
+    VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceImportFlagBits;
+typedef VkFlags VkFenceImportFlags;
+
+typedef enum VkSemaphoreImportFlagBits {
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
+    VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreImportFlagBits;
+typedef VkFlags VkSemaphoreImportFlags;
+
+typedef enum VkExternalSemaphoreHandleTypeFlagBits {
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreHandleTypeFlagBits;
+typedef VkFlags VkExternalSemaphoreHandleTypeFlags;
+
+typedef enum VkExternalSemaphoreFeatureFlagBits {
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreFeatureFlagBits;
+typedef VkFlags VkExternalSemaphoreFeatureFlags;
+typedef struct VkPhysicalDeviceSubgroupProperties {
+    VkStructureType           sType;
+    void*                     pNext;
+    uint32_t                  subgroupSize;
+    VkShaderStageFlags        supportedStages;
+    VkSubgroupFeatureFlags    supportedOperations;
+    VkBool32                  quadOperationsInAllStages;
+} VkPhysicalDeviceSubgroupProperties;
+
+typedef struct VkBindBufferMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindBufferMemoryInfo;
+
+typedef struct VkBindImageMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindImageMemoryInfo;
+
+typedef struct VkPhysicalDevice16BitStorageFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+} VkPhysicalDevice16BitStorageFeatures;
+
+typedef struct VkMemoryDedicatedRequirements {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           prefersDedicatedAllocation;
+    VkBool32           requiresDedicatedAllocation;
+} VkMemoryDedicatedRequirements;
+
+typedef struct VkMemoryDedicatedAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkMemoryDedicatedAllocateInfo;
+
+typedef struct VkMemoryAllocateFlagsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkMemoryAllocateFlags    flags;
+    uint32_t                 deviceMask;
+} VkMemoryAllocateFlagsInfo;
+
+typedef struct VkDeviceGroupRenderPassBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+    uint32_t           deviceRenderAreaCount;
+    const VkRect2D*    pDeviceRenderAreas;
+} VkDeviceGroupRenderPassBeginInfo;
+
+typedef struct VkDeviceGroupCommandBufferBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+} VkDeviceGroupCommandBufferBeginInfo;
+
+typedef struct VkDeviceGroupSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreCount;
+    const uint32_t*    pWaitSemaphoreDeviceIndices;
+    uint32_t           commandBufferCount;
+    const uint32_t*    pCommandBufferDeviceMasks;
+    uint32_t           signalSemaphoreCount;
+    const uint32_t*    pSignalSemaphoreDeviceIndices;
+} VkDeviceGroupSubmitInfo;
+
+typedef struct VkDeviceGroupBindSparseInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           resourceDeviceIndex;
+    uint32_t           memoryDeviceIndex;
+} VkDeviceGroupBindSparseInfo;
+
+typedef struct VkBindBufferMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+} VkBindBufferMemoryDeviceGroupInfo;
+
+typedef struct VkBindImageMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+    uint32_t           splitInstanceBindRegionCount;
+    const VkRect2D*    pSplitInstanceBindRegions;
+} VkBindImageMemoryDeviceGroupInfo;
+
+typedef struct VkPhysicalDeviceGroupProperties {
+    VkStructureType     sType;
+    void*               pNext;
+    uint32_t            physicalDeviceCount;
+    VkPhysicalDevice    physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+    VkBool32            subsetAllocation;
+} VkPhysicalDeviceGroupProperties;
+
+typedef struct VkDeviceGroupDeviceCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    uint32_t                   physicalDeviceCount;
+    const VkPhysicalDevice*    pPhysicalDevices;
+} VkDeviceGroupDeviceCreateInfo;
+
+typedef struct VkBufferMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferMemoryRequirementsInfo2;
+
+typedef struct VkImageMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageMemoryRequirementsInfo2;
+
+typedef struct VkImageSparseMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageSparseMemoryRequirementsInfo2;
+
+typedef struct VkMemoryRequirements2 {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkMemoryRequirements    memoryRequirements;
+} VkMemoryRequirements2;
+
+typedef struct VkSparseImageMemoryRequirements2 {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkSparseImageMemoryRequirements    memoryRequirements;
+} VkSparseImageMemoryRequirements2;
+
+typedef struct VkPhysicalDeviceFeatures2 {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPhysicalDeviceFeatures    features;
+} VkPhysicalDeviceFeatures2;
+
+typedef struct VkPhysicalDeviceProperties2 {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkPhysicalDeviceProperties    properties;
+} VkPhysicalDeviceProperties2;
+
+typedef struct VkFormatProperties2 {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormatProperties    formatProperties;
+} VkFormatProperties2;
+
+typedef struct VkImageFormatProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkImageFormatProperties    imageFormatProperties;
+} VkImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceImageFormatInfo2 {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFormat              format;
+    VkImageType           type;
+    VkImageTiling         tiling;
+    VkImageUsageFlags     usage;
+    VkImageCreateFlags    flags;
+} VkPhysicalDeviceImageFormatInfo2;
+
+typedef struct VkQueueFamilyProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkQueueFamilyProperties    queueFamilyProperties;
+} VkQueueFamilyProperties2;
+
+typedef struct VkPhysicalDeviceMemoryProperties2 {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkPhysicalDeviceMemoryProperties    memoryProperties;
+} VkPhysicalDeviceMemoryProperties2;
+
+typedef struct VkSparseImageFormatProperties2 {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSparseImageFormatProperties    properties;
+} VkSparseImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceSparseImageFormatInfo2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkFormat                 format;
+    VkImageType              type;
+    VkSampleCountFlagBits    samples;
+    VkImageUsageFlags        usage;
+    VkImageTiling            tiling;
+} VkPhysicalDeviceSparseImageFormatInfo2;
+
+typedef struct VkPhysicalDevicePointClippingProperties {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPointClippingBehavior    pointClippingBehavior;
+} VkPhysicalDevicePointClippingProperties;
+
+typedef struct VkInputAttachmentAspectReference {
+    uint32_t              subpass;
+    uint32_t              inputAttachmentIndex;
+    VkImageAspectFlags    aspectMask;
+} VkInputAttachmentAspectReference;
+
+typedef struct VkRenderPassInputAttachmentAspectCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   aspectReferenceCount;
+    const VkInputAttachmentAspectReference*    pAspectReferences;
+} VkRenderPassInputAttachmentAspectCreateInfo;
+
+typedef struct VkImageViewUsageCreateInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    usage;
+} VkImageViewUsageCreateInfo;
+
+typedef struct VkPipelineTessellationDomainOriginStateCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkTessellationDomainOrigin    domainOrigin;
+} VkPipelineTessellationDomainOriginStateCreateInfo;
+
+typedef struct VkRenderPassMultiviewCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           subpassCount;
+    const uint32_t*    pViewMasks;
+    uint32_t           dependencyCount;
+    const int32_t*     pViewOffsets;
+    uint32_t           correlationMaskCount;
+    const uint32_t*    pCorrelationMasks;
+} VkRenderPassMultiviewCreateInfo;
+
+typedef struct VkPhysicalDeviceMultiviewFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+} VkPhysicalDeviceMultiviewFeatures;
+
+typedef struct VkPhysicalDeviceMultiviewProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiviewViewCount;
+    uint32_t           maxMultiviewInstanceIndex;
+} VkPhysicalDeviceMultiviewProperties;
+
+typedef struct VkPhysicalDeviceVariablePointersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+} VkPhysicalDeviceVariablePointersFeatures;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedMemory;
+} VkPhysicalDeviceProtectedMemoryFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedNoFault;
+} VkPhysicalDeviceProtectedMemoryProperties;
+
+typedef struct VkDeviceQueueInfo2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueIndex;
+} VkDeviceQueueInfo2;
+
+typedef struct VkProtectedSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           protectedSubmit;
+} VkProtectedSubmitInfo;
+
+typedef struct VkSamplerYcbcrConversionCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkFormat                         format;
+    VkSamplerYcbcrModelConversion    ycbcrModel;
+    VkSamplerYcbcrRange              ycbcrRange;
+    VkComponentMapping               components;
+    VkChromaLocation                 xChromaOffset;
+    VkChromaLocation                 yChromaOffset;
+    VkFilter                         chromaFilter;
+    VkBool32                         forceExplicitReconstruction;
+} VkSamplerYcbcrConversionCreateInfo;
+
+typedef struct VkSamplerYcbcrConversionInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSamplerYcbcrConversion    conversion;
+} VkSamplerYcbcrConversionInfo;
+
+typedef struct VkBindImagePlaneMemoryInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkBindImagePlaneMemoryInfo;
+
+typedef struct VkImagePlaneMemoryRequirementsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkImagePlaneMemoryRequirementsInfo;
+
+typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           samplerYcbcrConversion;
+} VkPhysicalDeviceSamplerYcbcrConversionFeatures;
+
+typedef struct VkSamplerYcbcrConversionImageFormatProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           combinedImageSamplerDescriptorCount;
+} VkSamplerYcbcrConversionImageFormatProperties;
+
+typedef struct VkDescriptorUpdateTemplateEntry {
+    uint32_t            dstBinding;
+    uint32_t            dstArrayElement;
+    uint32_t            descriptorCount;
+    VkDescriptorType    descriptorType;
+    size_t              offset;
+    size_t              stride;
+} VkDescriptorUpdateTemplateEntry;
+
+typedef struct VkDescriptorUpdateTemplateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkDescriptorUpdateTemplateCreateFlags     flags;
+    uint32_t                                  descriptorUpdateEntryCount;
+    const VkDescriptorUpdateTemplateEntry*    pDescriptorUpdateEntries;
+    VkDescriptorUpdateTemplateType            templateType;
+    VkDescriptorSetLayout                     descriptorSetLayout;
+    VkPipelineBindPoint                       pipelineBindPoint;
+    VkPipelineLayout                          pipelineLayout;
+    uint32_t                                  set;
+} VkDescriptorUpdateTemplateCreateInfo;
+
+typedef struct VkExternalMemoryProperties {
+    VkExternalMemoryFeatureFlags       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlags    compatibleHandleTypes;
+} VkExternalMemoryProperties;
+
+typedef struct VkPhysicalDeviceExternalImageFormatInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalImageFormatInfo;
+
+typedef struct VkExternalImageFormatProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalImageFormatProperties;
+
+typedef struct VkPhysicalDeviceExternalBufferInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkBufferCreateFlags                   flags;
+    VkBufferUsageFlags                    usage;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalBufferInfo;
+
+typedef struct VkExternalBufferProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalBufferProperties;
+
+typedef struct VkPhysicalDeviceIDProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            deviceUUID[VK_UUID_SIZE];
+    uint8_t            driverUUID[VK_UUID_SIZE];
+    uint8_t            deviceLUID[VK_LUID_SIZE];
+    uint32_t           deviceNodeMask;
+    VkBool32           deviceLUIDValid;
+} VkPhysicalDeviceIDProperties;
+
+typedef struct VkExternalMemoryImageCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryImageCreateInfo;
+
+typedef struct VkExternalMemoryBufferCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryBufferCreateInfo;
+
+typedef struct VkExportMemoryAllocateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExportMemoryAllocateInfo;
+
+typedef struct VkPhysicalDeviceExternalFenceInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalFenceInfo;
+
+typedef struct VkExternalFenceProperties {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkExternalFenceHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalFenceHandleTypeFlags    compatibleHandleTypes;
+    VkExternalFenceFeatureFlags       externalFenceFeatures;
+} VkExternalFenceProperties;
+
+typedef struct VkExportFenceCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkExternalFenceHandleTypeFlags    handleTypes;
+} VkExportFenceCreateInfo;
+
+typedef struct VkExportSemaphoreCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalSemaphoreHandleTypeFlags    handleTypes;
+} VkExportSemaphoreCreateInfo;
+
+typedef struct VkPhysicalDeviceExternalSemaphoreInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalSemaphoreInfo;
+
+typedef struct VkExternalSemaphoreProperties {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    VkExternalSemaphoreHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalSemaphoreHandleTypeFlags    compatibleHandleTypes;
+    VkExternalSemaphoreFeatureFlags       externalSemaphoreFeatures;
+} VkExternalSemaphoreProperties;
+
+typedef struct VkPhysicalDeviceMaintenance3Properties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPerSetDescriptors;
+    VkDeviceSize       maxMemoryAllocationSize;
+} VkPhysicalDeviceMaintenance3Properties;
+
+typedef struct VkDescriptorSetLayoutSupport {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supported;
+} VkDescriptorSetLayoutSupport;
+
+typedef struct VkPhysicalDeviceShaderDrawParametersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDrawParameters;
+} VkPhysicalDeviceShaderDrawParametersFeatures;
+
+typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_VERSION_1_2 1
+// Vulkan 1.2 version number
+#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0
+
+#define VK_MAX_DRIVER_NAME_SIZE           256U
+#define VK_MAX_DRIVER_INFO_SIZE           256U
+
+typedef enum VkDriverId {
+    VK_DRIVER_ID_AMD_PROPRIETARY = 1,
+    VK_DRIVER_ID_AMD_OPEN_SOURCE = 2,
+    VK_DRIVER_ID_MESA_RADV = 3,
+    VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4,
+    VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5,
+    VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6,
+    VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7,
+    VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8,
+    VK_DRIVER_ID_ARM_PROPRIETARY = 9,
+    VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10,
+    VK_DRIVER_ID_GGP_PROPRIETARY = 11,
+    VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12,
+    VK_DRIVER_ID_MESA_LLVMPIPE = 13,
+    VK_DRIVER_ID_MOLTENVK = 14,
+    VK_DRIVER_ID_COREAVI_PROPRIETARY = 15,
+    VK_DRIVER_ID_JUICE_PROPRIETARY = 16,
+    VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17,
+    VK_DRIVER_ID_MESA_TURNIP = 18,
+    VK_DRIVER_ID_MESA_V3DV = 19,
+    VK_DRIVER_ID_MESA_PANVK = 20,
+    VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21,
+    VK_DRIVER_ID_MESA_VENUS = 22,
+    VK_DRIVER_ID_MESA_DOZEN = 23,
+    VK_DRIVER_ID_MESA_NVK = 24,
+    VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25,
+    VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY,
+    VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+    VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV,
+    VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+    VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+    VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+    VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+    VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+    VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY,
+    VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+    VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY,
+    VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+    VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF
+} VkDriverId;
+
+typedef enum VkShaderFloatControlsIndependence {
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF
+} VkShaderFloatControlsIndependence;
+
+typedef enum VkSamplerReductionMode {
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0,
+    VK_SAMPLER_REDUCTION_MODE_MIN = 1,
+    VK_SAMPLER_REDUCTION_MODE_MAX = 2,
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+    VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN,
+    VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX,
+    VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerReductionMode;
+
+typedef enum VkSemaphoreType {
+    VK_SEMAPHORE_TYPE_BINARY = 0,
+    VK_SEMAPHORE_TYPE_TIMELINE = 1,
+    VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY,
+    VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE,
+    VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreType;
+
+typedef enum VkResolveModeFlagBits {
+    VK_RESOLVE_MODE_NONE = 0,
+    VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001,
+    VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002,
+    VK_RESOLVE_MODE_MIN_BIT = 0x00000004,
+    VK_RESOLVE_MODE_MAX_BIT = 0x00000008,
+    VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE,
+    VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+    VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT,
+    VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT,
+    VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT,
+    VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkResolveModeFlagBits;
+typedef VkFlags VkResolveModeFlags;
+
+typedef enum VkDescriptorBindingFlagBits {
+    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001,
+    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002,
+    VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004,
+    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008,
+    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+    VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT,
+    VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorBindingFlagBits;
+typedef VkFlags VkDescriptorBindingFlags;
+
+typedef enum VkSemaphoreWaitFlagBits {
+    VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001,
+    VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT,
+    VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreWaitFlagBits;
+typedef VkFlags VkSemaphoreWaitFlags;
+typedef struct VkPhysicalDeviceVulkan11Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+    VkBool32           protectedMemory;
+    VkBool32           samplerYcbcrConversion;
+    VkBool32           shaderDrawParameters;
+} VkPhysicalDeviceVulkan11Features;
+
+typedef struct VkPhysicalDeviceVulkan11Properties {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint8_t                    deviceUUID[VK_UUID_SIZE];
+    uint8_t                    driverUUID[VK_UUID_SIZE];
+    uint8_t                    deviceLUID[VK_LUID_SIZE];
+    uint32_t                   deviceNodeMask;
+    VkBool32                   deviceLUIDValid;
+    uint32_t                   subgroupSize;
+    VkShaderStageFlags         subgroupSupportedStages;
+    VkSubgroupFeatureFlags     subgroupSupportedOperations;
+    VkBool32                   subgroupQuadOperationsInAllStages;
+    VkPointClippingBehavior    pointClippingBehavior;
+    uint32_t                   maxMultiviewViewCount;
+    uint32_t                   maxMultiviewInstanceIndex;
+    VkBool32                   protectedNoFault;
+    uint32_t                   maxPerSetDescriptors;
+    VkDeviceSize               maxMemoryAllocationSize;
+} VkPhysicalDeviceVulkan11Properties;
+
+typedef struct VkPhysicalDeviceVulkan12Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           samplerMirrorClampToEdge;
+    VkBool32           drawIndirectCount;
+    VkBool32           storageBuffer8BitAccess;
+    VkBool32           uniformAndStorageBuffer8BitAccess;
+    VkBool32           storagePushConstant8;
+    VkBool32           shaderBufferInt64Atomics;
+    VkBool32           shaderSharedInt64Atomics;
+    VkBool32           shaderFloat16;
+    VkBool32           shaderInt8;
+    VkBool32           descriptorIndexing;
+    VkBool32           shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32           shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32           shaderSampledImageArrayNonUniformIndexing;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageImageArrayNonUniformIndexing;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32           shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32           descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32           descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUpdateUnusedWhilePending;
+    VkBool32           descriptorBindingPartiallyBound;
+    VkBool32           descriptorBindingVariableDescriptorCount;
+    VkBool32           runtimeDescriptorArray;
+    VkBool32           samplerFilterMinmax;
+    VkBool32           scalarBlockLayout;
+    VkBool32           imagelessFramebuffer;
+    VkBool32           uniformBufferStandardLayout;
+    VkBool32           shaderSubgroupExtendedTypes;
+    VkBool32           separateDepthStencilLayouts;
+    VkBool32           hostQueryReset;
+    VkBool32           timelineSemaphore;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+    VkBool32           vulkanMemoryModel;
+    VkBool32           vulkanMemoryModelDeviceScope;
+    VkBool32           vulkanMemoryModelAvailabilityVisibilityChains;
+    VkBool32           shaderOutputViewportIndex;
+    VkBool32           shaderOutputLayer;
+    VkBool32           subgroupBroadcastDynamicId;
+} VkPhysicalDeviceVulkan12Features;
+
+typedef struct VkConformanceVersion {
+    uint8_t    major;
+    uint8_t    minor;
+    uint8_t    subminor;
+    uint8_t    patch;
+} VkConformanceVersion;
+
+typedef struct VkPhysicalDeviceVulkan12Properties {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkDriverId                           driverID;
+    char                                 driverName[VK_MAX_DRIVER_NAME_SIZE];
+    char                                 driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+    VkConformanceVersion                 conformanceVersion;
+    VkShaderFloatControlsIndependence    denormBehaviorIndependence;
+    VkShaderFloatControlsIndependence    roundingModeIndependence;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32                             shaderDenormPreserveFloat16;
+    VkBool32                             shaderDenormPreserveFloat32;
+    VkBool32                             shaderDenormPreserveFloat64;
+    VkBool32                             shaderDenormFlushToZeroFloat16;
+    VkBool32                             shaderDenormFlushToZeroFloat32;
+    VkBool32                             shaderDenormFlushToZeroFloat64;
+    VkBool32                             shaderRoundingModeRTEFloat16;
+    VkBool32                             shaderRoundingModeRTEFloat32;
+    VkBool32                             shaderRoundingModeRTEFloat64;
+    VkBool32                             shaderRoundingModeRTZFloat16;
+    VkBool32                             shaderRoundingModeRTZFloat32;
+    VkBool32                             shaderRoundingModeRTZFloat64;
+    uint32_t                             maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32                             shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32                             shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32                             shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32                             shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32                             shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32                             robustBufferAccessUpdateAfterBind;
+    VkBool32                             quadDivergentImplicitLod;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t                             maxPerStageUpdateAfterBindResources;
+    uint32_t                             maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t                             maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t                             maxDescriptorSetUpdateAfterBindInputAttachments;
+    VkResolveModeFlags                   supportedDepthResolveModes;
+    VkResolveModeFlags                   supportedStencilResolveModes;
+    VkBool32                             independentResolveNone;
+    VkBool32                             independentResolve;
+    VkBool32                             filterMinmaxSingleComponentFormats;
+    VkBool32                             filterMinmaxImageComponentMapping;
+    uint64_t                             maxTimelineSemaphoreValueDifference;
+    VkSampleCountFlags                   framebufferIntegerColorSampleCounts;
+} VkPhysicalDeviceVulkan12Properties;
+
+typedef struct VkImageFormatListCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           viewFormatCount;
+    const VkFormat*    pViewFormats;
+} VkImageFormatListCreateInfo;
+
+typedef struct VkAttachmentDescription2 {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription2;
+
+typedef struct VkAttachmentReference2 {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachment;
+    VkImageLayout         layout;
+    VkImageAspectFlags    aspectMask;
+} VkAttachmentReference2;
+
+typedef struct VkSubpassDescription2 {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSubpassDescriptionFlags        flags;
+    VkPipelineBindPoint              pipelineBindPoint;
+    uint32_t                         viewMask;
+    uint32_t                         inputAttachmentCount;
+    const VkAttachmentReference2*    pInputAttachments;
+    uint32_t                         colorAttachmentCount;
+    const VkAttachmentReference2*    pColorAttachments;
+    const VkAttachmentReference2*    pResolveAttachments;
+    const VkAttachmentReference2*    pDepthStencilAttachment;
+    uint32_t                         preserveAttachmentCount;
+    const uint32_t*                  pPreserveAttachments;
+} VkSubpassDescription2;
+
+typedef struct VkSubpassDependency2 {
+    VkStructureType         sType;
+    const void*             pNext;
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+    int32_t                 viewOffset;
+} VkSubpassDependency2;
+
+typedef struct VkRenderPassCreateInfo2 {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkRenderPassCreateFlags            flags;
+    uint32_t                           attachmentCount;
+    const VkAttachmentDescription2*    pAttachments;
+    uint32_t                           subpassCount;
+    const VkSubpassDescription2*       pSubpasses;
+    uint32_t                           dependencyCount;
+    const VkSubpassDependency2*        pDependencies;
+    uint32_t                           correlatedViewMaskCount;
+    const uint32_t*                    pCorrelatedViewMasks;
+} VkRenderPassCreateInfo2;
+
+typedef struct VkSubpassBeginInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkSubpassContents    contents;
+} VkSubpassBeginInfo;
+
+typedef struct VkSubpassEndInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+} VkSubpassEndInfo;
+
+typedef struct VkPhysicalDevice8BitStorageFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer8BitAccess;
+    VkBool32           uniformAndStorageBuffer8BitAccess;
+    VkBool32           storagePushConstant8;
+} VkPhysicalDevice8BitStorageFeatures;
+
+typedef struct VkPhysicalDeviceDriverProperties {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkDriverId              driverID;
+    char                    driverName[VK_MAX_DRIVER_NAME_SIZE];
+    char                    driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+    VkConformanceVersion    conformanceVersion;
+} VkPhysicalDeviceDriverProperties;
+
+typedef struct VkPhysicalDeviceShaderAtomicInt64Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferInt64Atomics;
+    VkBool32           shaderSharedInt64Atomics;
+} VkPhysicalDeviceShaderAtomicInt64Features;
+
+typedef struct VkPhysicalDeviceShaderFloat16Int8Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderFloat16;
+    VkBool32           shaderInt8;
+} VkPhysicalDeviceShaderFloat16Int8Features;
+
+typedef struct VkPhysicalDeviceFloatControlsProperties {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkShaderFloatControlsIndependence    denormBehaviorIndependence;
+    VkShaderFloatControlsIndependence    roundingModeIndependence;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32                             shaderDenormPreserveFloat16;
+    VkBool32                             shaderDenormPreserveFloat32;
+    VkBool32                             shaderDenormPreserveFloat64;
+    VkBool32                             shaderDenormFlushToZeroFloat16;
+    VkBool32                             shaderDenormFlushToZeroFloat32;
+    VkBool32                             shaderDenormFlushToZeroFloat64;
+    VkBool32                             shaderRoundingModeRTEFloat16;
+    VkBool32                             shaderRoundingModeRTEFloat32;
+    VkBool32                             shaderRoundingModeRTEFloat64;
+    VkBool32                             shaderRoundingModeRTZFloat16;
+    VkBool32                             shaderRoundingModeRTZFloat32;
+    VkBool32                             shaderRoundingModeRTZFloat64;
+} VkPhysicalDeviceFloatControlsProperties;
+
+typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    uint32_t                           bindingCount;
+    const VkDescriptorBindingFlags*    pBindingFlags;
+} VkDescriptorSetLayoutBindingFlagsCreateInfo;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32           shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32           shaderSampledImageArrayNonUniformIndexing;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageImageArrayNonUniformIndexing;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32           shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32           descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32           descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUpdateUnusedWhilePending;
+    VkBool32           descriptorBindingPartiallyBound;
+    VkBool32           descriptorBindingVariableDescriptorCount;
+    VkBool32           runtimeDescriptorArray;
+} VkPhysicalDeviceDescriptorIndexingFeatures;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32           robustBufferAccessUpdateAfterBind;
+    VkBool32           quadDivergentImplicitLod;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t           maxPerStageUpdateAfterBindResources;
+    uint32_t           maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindInputAttachments;
+} VkPhysicalDeviceDescriptorIndexingProperties;
+
+typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           descriptorSetCount;
+    const uint32_t*    pDescriptorCounts;
+} VkDescriptorSetVariableDescriptorCountAllocateInfo;
+
+typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVariableDescriptorCount;
+} VkDescriptorSetVariableDescriptorCountLayoutSupport;
+
+typedef struct VkSubpassDescriptionDepthStencilResolve {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkResolveModeFlagBits            depthResolveMode;
+    VkResolveModeFlagBits            stencilResolveMode;
+    const VkAttachmentReference2*    pDepthStencilResolveAttachment;
+} VkSubpassDescriptionDepthStencilResolve;
+
+typedef struct VkPhysicalDeviceDepthStencilResolveProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkResolveModeFlags    supportedDepthResolveModes;
+    VkResolveModeFlags    supportedStencilResolveModes;
+    VkBool32              independentResolveNone;
+    VkBool32              independentResolve;
+} VkPhysicalDeviceDepthStencilResolveProperties;
+
+typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           scalarBlockLayout;
+} VkPhysicalDeviceScalarBlockLayoutFeatures;
+
+typedef struct VkImageStencilUsageCreateInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    stencilUsage;
+} VkImageStencilUsageCreateInfo;
+
+typedef struct VkSamplerReductionModeCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSamplerReductionMode    reductionMode;
+} VkSamplerReductionModeCreateInfo;
+
+typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterMinmaxSingleComponentFormats;
+    VkBool32           filterMinmaxImageComponentMapping;
+} VkPhysicalDeviceSamplerFilterMinmaxProperties;
+
+typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vulkanMemoryModel;
+    VkBool32           vulkanMemoryModelDeviceScope;
+    VkBool32           vulkanMemoryModelAvailabilityVisibilityChains;
+} VkPhysicalDeviceVulkanMemoryModelFeatures;
+
+typedef struct VkPhysicalDeviceImagelessFramebufferFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imagelessFramebuffer;
+} VkPhysicalDeviceImagelessFramebufferFeatures;
+
+typedef struct VkFramebufferAttachmentImageInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkImageCreateFlags    flags;
+    VkImageUsageFlags     usage;
+    uint32_t              width;
+    uint32_t              height;
+    uint32_t              layerCount;
+    uint32_t              viewFormatCount;
+    const VkFormat*       pViewFormats;
+} VkFramebufferAttachmentImageInfo;
+
+typedef struct VkFramebufferAttachmentsCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   attachmentImageInfoCount;
+    const VkFramebufferAttachmentImageInfo*    pAttachmentImageInfos;
+} VkFramebufferAttachmentsCreateInfo;
+
+typedef struct VkRenderPassAttachmentBeginInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachmentCount;
+    const VkImageView*    pAttachments;
+} VkRenderPassAttachmentBeginInfo;
+
+typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           uniformBufferStandardLayout;
+} VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupExtendedTypes;
+} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           separateDepthStencilLayouts;
+} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+typedef struct VkAttachmentReferenceStencilLayout {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilLayout;
+} VkAttachmentReferenceStencilLayout;
+
+typedef struct VkAttachmentDescriptionStencilLayout {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilInitialLayout;
+    VkImageLayout      stencilFinalLayout;
+} VkAttachmentDescriptionStencilLayout;
+
+typedef struct VkPhysicalDeviceHostQueryResetFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           hostQueryReset;
+} VkPhysicalDeviceHostQueryResetFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           timelineSemaphore;
+} VkPhysicalDeviceTimelineSemaphoreFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           maxTimelineSemaphoreValueDifference;
+} VkPhysicalDeviceTimelineSemaphoreProperties;
+
+typedef struct VkSemaphoreTypeCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSemaphoreType    semaphoreType;
+    uint64_t           initialValue;
+} VkSemaphoreTypeCreateInfo;
+
+typedef struct VkTimelineSemaphoreSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValueCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValueCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkTimelineSemaphoreSubmitInfo;
+
+typedef struct VkSemaphoreWaitInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSemaphoreWaitFlags    flags;
+    uint32_t                semaphoreCount;
+    const VkSemaphore*      pSemaphores;
+    const uint64_t*         pValues;
+} VkSemaphoreWaitInfo;
+
+typedef struct VkSemaphoreSignalInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSemaphore        semaphore;
+    uint64_t           value;
+} VkSemaphoreSignalInfo;
+
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeatures;
+
+typedef struct VkBufferDeviceAddressInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferDeviceAddressInfo;
+
+typedef struct VkBufferOpaqueCaptureAddressCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkBufferOpaqueCaptureAddressCreateInfo;
+
+typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkMemoryOpaqueCaptureAddressAllocateInfo;
+
+typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkDeviceMemoryOpaqueCaptureAddressInfo;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfo*      pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo*      pSubpassBeginInfo, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfo*                  pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfo*                pSignalInfo);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
+
+
+#define VK_VERSION_1_3 1
+// Vulkan 1.3 version number
+#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0
+
+typedef uint64_t VkFlags64;
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot)
+
+typedef enum VkPipelineCreationFeedbackFlagBits {
+    VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001,
+    VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002,
+    VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004,
+    VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreationFeedbackFlagBits;
+typedef VkFlags VkPipelineCreationFeedbackFlags;
+
+typedef enum VkToolPurposeFlagBits {
+    VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001,
+    VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002,
+    VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004,
+    VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008,
+    VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010,
+    VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020,
+    VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040,
+    VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT,
+    VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT,
+    VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT,
+    VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT,
+    VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT,
+    VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkToolPurposeFlagBits;
+typedef VkFlags VkToolPurposeFlags;
+typedef VkFlags VkPrivateDataSlotCreateFlags;
+typedef VkFlags64 VkPipelineStageFlags2;
+
+// Flag bits for VkPipelineStageFlagBits2
+typedef VkFlags64 VkPipelineStageFlagBits2;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL;
+#endif
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI = 0x20000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL;
+
+typedef VkFlags64 VkAccessFlags2;
+
+// Flag bits for VkAccessFlagBits2
+typedef VkFlags64 VkAccessFlagBits2;
+static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL;
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL;
+#endif
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL;
+
+
+typedef enum VkSubmitFlagBits {
+    VK_SUBMIT_PROTECTED_BIT = 0x00000001,
+    VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT,
+    VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubmitFlagBits;
+typedef VkFlags VkSubmitFlags;
+
+typedef enum VkRenderingFlagBits {
+    VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001,
+    VK_RENDERING_SUSPENDING_BIT = 0x00000002,
+    VK_RENDERING_RESUMING_BIT = 0x00000004,
+    VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008,
+    VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
+    VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT,
+    VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT,
+    VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderingFlagBits;
+typedef VkFlags VkRenderingFlags;
+typedef VkFlags64 VkFormatFeatureFlags2;
+
+// Flag bits for VkFormatFeatureFlagBits2
+typedef VkFlags64 VkFormatFeatureFlagBits2;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL;
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL;
+#endif
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL;
+
+typedef struct VkPhysicalDeviceVulkan13Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustImageAccess;
+    VkBool32           inlineUniformBlock;
+    VkBool32           descriptorBindingInlineUniformBlockUpdateAfterBind;
+    VkBool32           pipelineCreationCacheControl;
+    VkBool32           privateData;
+    VkBool32           shaderDemoteToHelperInvocation;
+    VkBool32           shaderTerminateInvocation;
+    VkBool32           subgroupSizeControl;
+    VkBool32           computeFullSubgroups;
+    VkBool32           synchronization2;
+    VkBool32           textureCompressionASTC_HDR;
+    VkBool32           shaderZeroInitializeWorkgroupMemory;
+    VkBool32           dynamicRendering;
+    VkBool32           shaderIntegerDotProduct;
+    VkBool32           maintenance4;
+} VkPhysicalDeviceVulkan13Features;
+
+typedef struct VkPhysicalDeviceVulkan13Properties {
+    VkStructureType       sType;
+    void*                 pNext;
+    uint32_t              minSubgroupSize;
+    uint32_t              maxSubgroupSize;
+    uint32_t              maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags    requiredSubgroupSizeStages;
+    uint32_t              maxInlineUniformBlockSize;
+    uint32_t              maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t              maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t              maxDescriptorSetInlineUniformBlocks;
+    uint32_t              maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    uint32_t              maxInlineUniformTotalSize;
+    VkBool32              integerDotProduct8BitUnsignedAccelerated;
+    VkBool32              integerDotProduct8BitSignedAccelerated;
+    VkBool32              integerDotProduct8BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedUnsignedAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedSignedAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedMixedSignednessAccelerated;
+    VkBool32              integerDotProduct16BitUnsignedAccelerated;
+    VkBool32              integerDotProduct16BitSignedAccelerated;
+    VkBool32              integerDotProduct16BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct32BitUnsignedAccelerated;
+    VkBool32              integerDotProduct32BitSignedAccelerated;
+    VkBool32              integerDotProduct32BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct64BitUnsignedAccelerated;
+    VkBool32              integerDotProduct64BitSignedAccelerated;
+    VkBool32              integerDotProduct64BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated;
+    VkDeviceSize          storageTexelBufferOffsetAlignmentBytes;
+    VkBool32              storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize          uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32              uniformTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize          maxBufferSize;
+} VkPhysicalDeviceVulkan13Properties;
+
+typedef struct VkPipelineCreationFeedback {
+    VkPipelineCreationFeedbackFlags    flags;
+    uint64_t                           duration;
+} VkPipelineCreationFeedback;
+
+typedef struct VkPipelineCreationFeedbackCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkPipelineCreationFeedback*    pPipelineCreationFeedback;
+    uint32_t                       pipelineStageCreationFeedbackCount;
+    VkPipelineCreationFeedback*    pPipelineStageCreationFeedbacks;
+} VkPipelineCreationFeedbackCreateInfo;
+
+typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderTerminateInvocation;
+} VkPhysicalDeviceShaderTerminateInvocationFeatures;
+
+typedef struct VkPhysicalDeviceToolProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    char                  name[VK_MAX_EXTENSION_NAME_SIZE];
+    char                  version[VK_MAX_EXTENSION_NAME_SIZE];
+    VkToolPurposeFlags    purposes;
+    char                  description[VK_MAX_DESCRIPTION_SIZE];
+    char                  layer[VK_MAX_EXTENSION_NAME_SIZE];
+} VkPhysicalDeviceToolProperties;
+
+typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDemoteToHelperInvocation;
+} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+
+typedef struct VkPhysicalDevicePrivateDataFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           privateData;
+} VkPhysicalDevicePrivateDataFeatures;
+
+typedef struct VkDevicePrivateDataCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           privateDataSlotRequestCount;
+} VkDevicePrivateDataCreateInfo;
+
+typedef struct VkPrivateDataSlotCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPrivateDataSlotCreateFlags    flags;
+} VkPrivateDataSlotCreateInfo;
+
+typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineCreationCacheControl;
+} VkPhysicalDevicePipelineCreationCacheControlFeatures;
+
+typedef struct VkMemoryBarrier2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkPipelineStageFlags2    srcStageMask;
+    VkAccessFlags2           srcAccessMask;
+    VkPipelineStageFlags2    dstStageMask;
+    VkAccessFlags2           dstAccessMask;
+} VkMemoryBarrier2;
+
+typedef struct VkBufferMemoryBarrier2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkPipelineStageFlags2    srcStageMask;
+    VkAccessFlags2           srcAccessMask;
+    VkPipelineStageFlags2    dstStageMask;
+    VkAccessFlags2           dstAccessMask;
+    uint32_t                 srcQueueFamilyIndex;
+    uint32_t                 dstQueueFamilyIndex;
+    VkBuffer                 buffer;
+    VkDeviceSize             offset;
+    VkDeviceSize             size;
+} VkBufferMemoryBarrier2;
+
+typedef struct VkImageMemoryBarrier2 {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkPipelineStageFlags2      srcStageMask;
+    VkAccessFlags2             srcAccessMask;
+    VkPipelineStageFlags2      dstStageMask;
+    VkAccessFlags2             dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier2;
+
+typedef struct VkDependencyInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDependencyFlags                dependencyFlags;
+    uint32_t                         memoryBarrierCount;
+    const VkMemoryBarrier2*          pMemoryBarriers;
+    uint32_t                         bufferMemoryBarrierCount;
+    const VkBufferMemoryBarrier2*    pBufferMemoryBarriers;
+    uint32_t                         imageMemoryBarrierCount;
+    const VkImageMemoryBarrier2*     pImageMemoryBarriers;
+} VkDependencyInfo;
+
+typedef struct VkSemaphoreSubmitInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkSemaphore              semaphore;
+    uint64_t                 value;
+    VkPipelineStageFlags2    stageMask;
+    uint32_t                 deviceIndex;
+} VkSemaphoreSubmitInfo;
+
+typedef struct VkCommandBufferSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkCommandBuffer    commandBuffer;
+    uint32_t           deviceMask;
+} VkCommandBufferSubmitInfo;
+
+typedef struct VkSubmitInfo2 {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkSubmitFlags                       flags;
+    uint32_t                            waitSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfo*        pWaitSemaphoreInfos;
+    uint32_t                            commandBufferInfoCount;
+    const VkCommandBufferSubmitInfo*    pCommandBufferInfos;
+    uint32_t                            signalSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfo*        pSignalSemaphoreInfos;
+} VkSubmitInfo2;
+
+typedef struct VkPhysicalDeviceSynchronization2Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           synchronization2;
+} VkPhysicalDeviceSynchronization2Features;
+
+typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderZeroInitializeWorkgroupMemory;
+} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+
+typedef struct VkPhysicalDeviceImageRobustnessFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustImageAccess;
+} VkPhysicalDeviceImageRobustnessFeatures;
+
+typedef struct VkBufferCopy2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       srcOffset;
+    VkDeviceSize       dstOffset;
+    VkDeviceSize       size;
+} VkBufferCopy2;
+
+typedef struct VkCopyBufferInfo2 {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkBuffer                srcBuffer;
+    VkBuffer                dstBuffer;
+    uint32_t                regionCount;
+    const VkBufferCopy2*    pRegions;
+} VkCopyBufferInfo2;
+
+typedef struct VkImageCopy2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy2;
+
+typedef struct VkCopyImageInfo2 {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkImage                srcImage;
+    VkImageLayout          srcImageLayout;
+    VkImage                dstImage;
+    VkImageLayout          dstImageLayout;
+    uint32_t               regionCount;
+    const VkImageCopy2*    pRegions;
+} VkCopyImageInfo2;
+
+typedef struct VkBufferImageCopy2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy2;
+
+typedef struct VkCopyBufferToImageInfo2 {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBuffer                     srcBuffer;
+    VkImage                      dstImage;
+    VkImageLayout                dstImageLayout;
+    uint32_t                     regionCount;
+    const VkBufferImageCopy2*    pRegions;
+} VkCopyBufferToImageInfo2;
+
+typedef struct VkCopyImageToBufferInfo2 {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkImage                      srcImage;
+    VkImageLayout                srcImageLayout;
+    VkBuffer                     dstBuffer;
+    uint32_t                     regionCount;
+    const VkBufferImageCopy2*    pRegions;
+} VkCopyImageToBufferInfo2;
+
+typedef struct VkImageBlit2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit2;
+
+typedef struct VkBlitImageInfo2 {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkImage                srcImage;
+    VkImageLayout          srcImageLayout;
+    VkImage                dstImage;
+    VkImageLayout          dstImageLayout;
+    uint32_t               regionCount;
+    const VkImageBlit2*    pRegions;
+    VkFilter               filter;
+} VkBlitImageInfo2;
+
+typedef struct VkImageResolve2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve2;
+
+typedef struct VkResolveImageInfo2 {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkImage                   srcImage;
+    VkImageLayout             srcImageLayout;
+    VkImage                   dstImage;
+    VkImageLayout             dstImageLayout;
+    uint32_t                  regionCount;
+    const VkImageResolve2*    pRegions;
+} VkResolveImageInfo2;
+
+typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subgroupSizeControl;
+    VkBool32           computeFullSubgroups;
+} VkPhysicalDeviceSubgroupSizeControlFeatures;
+
+typedef struct VkPhysicalDeviceSubgroupSizeControlProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    uint32_t              minSubgroupSize;
+    uint32_t              maxSubgroupSize;
+    uint32_t              maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags    requiredSubgroupSizeStages;
+} VkPhysicalDeviceSubgroupSizeControlProperties;
+
+typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           requiredSubgroupSize;
+} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
+
+typedef struct VkPhysicalDeviceInlineUniformBlockFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           inlineUniformBlock;
+    VkBool32           descriptorBindingInlineUniformBlockUpdateAfterBind;
+} VkPhysicalDeviceInlineUniformBlockFeatures;
+
+typedef struct VkPhysicalDeviceInlineUniformBlockProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxInlineUniformBlockSize;
+    uint32_t           maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t           maxDescriptorSetInlineUniformBlocks;
+    uint32_t           maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+} VkPhysicalDeviceInlineUniformBlockProperties;
+
+typedef struct VkWriteDescriptorSetInlineUniformBlock {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           dataSize;
+    const void*        pData;
+} VkWriteDescriptorSetInlineUniformBlock;
+
+typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxInlineUniformBlockBindings;
+} VkDescriptorPoolInlineUniformBlockCreateInfo;
+
+typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           textureCompressionASTC_HDR;
+} VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
+
+typedef struct VkRenderingAttachmentInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageView              imageView;
+    VkImageLayout            imageLayout;
+    VkResolveModeFlagBits    resolveMode;
+    VkImageView              resolveImageView;
+    VkImageLayout            resolveImageLayout;
+    VkAttachmentLoadOp       loadOp;
+    VkAttachmentStoreOp      storeOp;
+    VkClearValue             clearValue;
+} VkRenderingAttachmentInfo;
+
+typedef struct VkRenderingInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkRenderingFlags                    flags;
+    VkRect2D                            renderArea;
+    uint32_t                            layerCount;
+    uint32_t                            viewMask;
+    uint32_t                            colorAttachmentCount;
+    const VkRenderingAttachmentInfo*    pColorAttachments;
+    const VkRenderingAttachmentInfo*    pDepthAttachment;
+    const VkRenderingAttachmentInfo*    pStencilAttachment;
+} VkRenderingInfo;
+
+typedef struct VkPipelineRenderingCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           viewMask;
+    uint32_t           colorAttachmentCount;
+    const VkFormat*    pColorAttachmentFormats;
+    VkFormat           depthAttachmentFormat;
+    VkFormat           stencilAttachmentFormat;
+} VkPipelineRenderingCreateInfo;
+
+typedef struct VkPhysicalDeviceDynamicRenderingFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dynamicRendering;
+} VkPhysicalDeviceDynamicRenderingFeatures;
+
+typedef struct VkCommandBufferInheritanceRenderingInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkRenderingFlags         flags;
+    uint32_t                 viewMask;
+    uint32_t                 colorAttachmentCount;
+    const VkFormat*          pColorAttachmentFormats;
+    VkFormat                 depthAttachmentFormat;
+    VkFormat                 stencilAttachmentFormat;
+    VkSampleCountFlagBits    rasterizationSamples;
+} VkCommandBufferInheritanceRenderingInfo;
+
+typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderIntegerDotProduct;
+} VkPhysicalDeviceShaderIntegerDotProductFeatures;
+
+typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           integerDotProduct8BitUnsignedAccelerated;
+    VkBool32           integerDotProduct8BitSignedAccelerated;
+    VkBool32           integerDotProduct8BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedUnsignedAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedSignedAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedMixedSignednessAccelerated;
+    VkBool32           integerDotProduct16BitUnsignedAccelerated;
+    VkBool32           integerDotProduct16BitSignedAccelerated;
+    VkBool32           integerDotProduct16BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct32BitUnsignedAccelerated;
+    VkBool32           integerDotProduct32BitSignedAccelerated;
+    VkBool32           integerDotProduct32BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct64BitUnsignedAccelerated;
+    VkBool32           integerDotProduct64BitSignedAccelerated;
+    VkBool32           integerDotProduct64BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated;
+} VkPhysicalDeviceShaderIntegerDotProductProperties;
+
+typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       storageTexelBufferOffsetAlignmentBytes;
+    VkBool32           storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize       uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32           uniformTexelBufferOffsetSingleTexelAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentProperties;
+
+typedef struct VkFormatProperties3 {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkFormatFeatureFlags2    linearTilingFeatures;
+    VkFormatFeatureFlags2    optimalTilingFeatures;
+    VkFormatFeatureFlags2    bufferFeatures;
+} VkFormatProperties3;
+
+typedef struct VkPhysicalDeviceMaintenance4Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           maintenance4;
+} VkPhysicalDeviceMaintenance4Features;
+
+typedef struct VkPhysicalDeviceMaintenance4Properties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       maxBufferSize;
+} VkPhysicalDeviceMaintenance4Properties;
+
+typedef struct VkDeviceBufferMemoryRequirements {
+    VkStructureType              sType;
+    const void*                  pNext;
+    const VkBufferCreateInfo*    pCreateInfo;
+} VkDeviceBufferMemoryRequirements;
+
+typedef struct VkDeviceImageMemoryRequirements {
+    VkStructureType             sType;
+    const void*                 pNext;
+    const VkImageCreateInfo*    pCreateInfo;
+    VkImageAspectFlagBits       planeAspect;
+} VkDeviceImageMemoryRequirements;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot);
+typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data);
+typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, VkPipelineStageFlags2               stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer                   commandBuffer, uint32_t                                            eventCount, const VkEvent*                     pEvents, const VkDependencyInfo*            pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer                   commandBuffer, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkQueryPool                                         queryPool, uint32_t                                            query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue                           queue, uint32_t                            submitCount, const VkSubmitInfo2*              pSubmits, VkFence           fence);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer                   commandBuffer, const VkRenderingInfo*                              pRenderingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer                   commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolProperties*             pToolProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot(
+    VkDevice                                    device,
+    const VkPrivateDataSlotCreateInfo*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPrivateDataSlot*                          pPrivateDataSlot);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot(
+    VkDevice                                    device,
+    VkPrivateDataSlot                           privateDataSlot,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPrivateData(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t*                                   pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags2                       stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    const VkDependencyInfo*                     pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2(
+    VkCommandBuffer                             commandBuffer,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo2*                        pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferInfo2*                    pCopyBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageInfo2*                     pCopyImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferToImageInfo2*             pCopyBufferToImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageToBufferInfo2*             pCopyImageToBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkBlitImageInfo2*                     pBlitImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkResolveImageInfo2*                  pResolveImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderingInfo*                      pRenderingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode(
+    VkCommandBuffer                             commandBuffer,
+    VkCullModeFlags                             cullMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace(
+    VkCommandBuffer                             commandBuffer,
+    VkFrontFace                                 frontFace);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology(
+    VkCommandBuffer                             commandBuffer,
+    VkPrimitiveTopology                         primitiveTopology);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes,
+    const VkDeviceSize*                         pStrides);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthWriteEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp(
+    VkCommandBuffer                             commandBuffer,
+    VkCompareOp                                 depthCompareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBoundsTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stencilTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    VkStencilOp                                 failOp,
+    VkStencilOp                                 passOp,
+    VkStencilOp                                 depthFailOp,
+    VkCompareOp                                 compareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    rasterizerDiscardEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBiasEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    primitiveRestartEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceBufferMemoryRequirements*     pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_KHR_surface 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
+#define VK_KHR_SURFACE_SPEC_VERSION       25
+#define VK_KHR_SURFACE_EXTENSION_NAME     "VK_KHR_surface"
+
+typedef enum VkPresentModeKHR {
+    VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
+    VK_PRESENT_MODE_MAILBOX_KHR = 1,
+    VK_PRESENT_MODE_FIFO_KHR = 2,
+    VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
+    VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
+    VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
+    VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPresentModeKHR;
+
+typedef enum VkColorSpaceKHR {
+    VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0,
+    VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001,
+    VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002,
+    VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003,
+    VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004,
+    VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005,
+    VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006,
+    VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007,
+    VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008,
+    VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009,
+    VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010,
+    VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011,
+    VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012,
+    VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013,
+    VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014,
+    VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000,
+    VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkColorSpaceKHR;
+
+typedef enum VkSurfaceTransformFlagBitsKHR {
+    VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001,
+    VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002,
+    VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004,
+    VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080,
+    VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100,
+    VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSurfaceTransformFlagBitsKHR;
+
+typedef enum VkCompositeAlphaFlagBitsKHR {
+    VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002,
+    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004,
+    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008,
+    VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCompositeAlphaFlagBitsKHR;
+typedef VkFlags VkCompositeAlphaFlagsKHR;
+typedef VkFlags VkSurfaceTransformFlagsKHR;
+typedef struct VkSurfaceCapabilitiesKHR {
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+} VkSurfaceCapabilitiesKHR;
+
+typedef struct VkSurfaceFormatKHR {
+    VkFormat           format;
+    VkColorSpaceKHR    colorSpace;
+} VkSurfaceFormatKHR;
+
+typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif
+
+
+#define VK_KHR_swapchain 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR)
+#define VK_KHR_SWAPCHAIN_SPEC_VERSION     70
+#define VK_KHR_SWAPCHAIN_EXTENSION_NAME   "VK_KHR_swapchain"
+
+typedef enum VkSwapchainCreateFlagBitsKHR {
+    VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001,
+    VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002,
+    VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004,
+    VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008,
+    VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSwapchainCreateFlagBitsKHR;
+typedef VkFlags VkSwapchainCreateFlagsKHR;
+
+typedef enum VkDeviceGroupPresentModeFlagBitsKHR {
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001,
+    VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002,
+    VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004,
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008,
+    VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDeviceGroupPresentModeFlagBitsKHR;
+typedef VkFlags VkDeviceGroupPresentModeFlagsKHR;
+typedef struct VkSwapchainCreateInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSwapchainCreateFlagsKHR        flags;
+    VkSurfaceKHR                     surface;
+    uint32_t                         minImageCount;
+    VkFormat                         imageFormat;
+    VkColorSpaceKHR                  imageColorSpace;
+    VkExtent2D                       imageExtent;
+    uint32_t                         imageArrayLayers;
+    VkImageUsageFlags                imageUsage;
+    VkSharingMode                    imageSharingMode;
+    uint32_t                         queueFamilyIndexCount;
+    const uint32_t*                  pQueueFamilyIndices;
+    VkSurfaceTransformFlagBitsKHR    preTransform;
+    VkCompositeAlphaFlagBitsKHR      compositeAlpha;
+    VkPresentModeKHR                 presentMode;
+    VkBool32                         clipped;
+    VkSwapchainKHR                   oldSwapchain;
+} VkSwapchainCreateInfoKHR;
+
+typedef struct VkPresentInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 waitSemaphoreCount;
+    const VkSemaphore*       pWaitSemaphores;
+    uint32_t                 swapchainCount;
+    const VkSwapchainKHR*    pSwapchains;
+    const uint32_t*          pImageIndices;
+    VkResult*                pResults;
+} VkPresentInfoKHR;
+
+typedef struct VkImageSwapchainCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+} VkImageSwapchainCreateInfoKHR;
+
+typedef struct VkBindImageMemorySwapchainInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndex;
+} VkBindImageMemorySwapchainInfoKHR;
+
+typedef struct VkAcquireNextImageInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint64_t           timeout;
+    VkSemaphore        semaphore;
+    VkFence            fence;
+    uint32_t           deviceMask;
+} VkAcquireNextImageInfoKHR;
+
+typedef struct VkDeviceGroupPresentCapabilitiesKHR {
+    VkStructureType                     sType;
+    void*                               pNext;
+    uint32_t                            presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupPresentCapabilitiesKHR;
+
+typedef struct VkDeviceGroupPresentInfoKHR {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    uint32_t                               swapchainCount;
+    const uint32_t*                        pDeviceMasks;
+    VkDeviceGroupPresentModeFlagBitsKHR    mode;
+} VkDeviceGroupPresentInfoKHR;
+
+typedef struct VkDeviceGroupSwapchainCreateInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupSwapchainCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
+typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+#endif
+
+
+#define VK_KHR_display 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR)
+#define VK_KHR_DISPLAY_SPEC_VERSION       23
+#define VK_KHR_DISPLAY_EXTENSION_NAME     "VK_KHR_display"
+typedef VkFlags VkDisplayModeCreateFlagsKHR;
+
+typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
+    VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008,
+    VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDisplayPlaneAlphaFlagBitsKHR;
+typedef VkFlags VkDisplayPlaneAlphaFlagsKHR;
+typedef VkFlags VkDisplaySurfaceCreateFlagsKHR;
+typedef struct VkDisplayModeParametersKHR {
+    VkExtent2D    visibleRegion;
+    uint32_t      refreshRate;
+} VkDisplayModeParametersKHR;
+
+typedef struct VkDisplayModeCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDisplayModeCreateFlagsKHR    flags;
+    VkDisplayModeParametersKHR     parameters;
+} VkDisplayModeCreateInfoKHR;
+
+typedef struct VkDisplayModePropertiesKHR {
+    VkDisplayModeKHR              displayMode;
+    VkDisplayModeParametersKHR    parameters;
+} VkDisplayModePropertiesKHR;
+
+typedef struct VkDisplayPlaneCapabilitiesKHR {
+    VkDisplayPlaneAlphaFlagsKHR    supportedAlpha;
+    VkOffset2D                     minSrcPosition;
+    VkOffset2D                     maxSrcPosition;
+    VkExtent2D                     minSrcExtent;
+    VkExtent2D                     maxSrcExtent;
+    VkOffset2D                     minDstPosition;
+    VkOffset2D                     maxDstPosition;
+    VkExtent2D                     minDstExtent;
+    VkExtent2D                     maxDstExtent;
+} VkDisplayPlaneCapabilitiesKHR;
+
+typedef struct VkDisplayPlanePropertiesKHR {
+    VkDisplayKHR    currentDisplay;
+    uint32_t        currentStackIndex;
+} VkDisplayPlanePropertiesKHR;
+
+typedef struct VkDisplayPropertiesKHR {
+    VkDisplayKHR                  display;
+    const char*                   displayName;
+    VkExtent2D                    physicalDimensions;
+    VkExtent2D                    physicalResolution;
+    VkSurfaceTransformFlagsKHR    supportedTransforms;
+    VkBool32                      planeReorderPossible;
+    VkBool32                      persistentContent;
+} VkDisplayPropertiesKHR;
+
+typedef struct VkDisplaySurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkDisplaySurfaceCreateFlagsKHR    flags;
+    VkDisplayModeKHR                  displayMode;
+    uint32_t                          planeIndex;
+    uint32_t                          planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR     transform;
+    float                             globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR    alphaMode;
+    VkExtent2D                        imageExtent;
+} VkDisplaySurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_KHR_display_swapchain 1
+#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10
+#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain"
+typedef struct VkDisplayPresentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkRect2D           srcRect;
+    VkRect2D           dstRect;
+    VkBool32           persistent;
+} VkDisplayPresentInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+#endif
+
+
+#define VK_KHR_sampler_mirror_clamp_to_edge 1
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge"
+
+
+#define VK_KHR_video_queue 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR)
+#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION   8
+#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue"
+
+typedef enum VkQueryResultStatusKHR {
+    VK_QUERY_RESULT_STATUS_ERROR_KHR = -1,
+    VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0,
+    VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1,
+    VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkQueryResultStatusKHR;
+
+typedef enum VkVideoCodecOperationFlagBitsKHR {
+    VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT = 0x00020000,
+#endif
+    VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001,
+    VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002,
+    VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCodecOperationFlagBitsKHR;
+typedef VkFlags VkVideoCodecOperationFlagsKHR;
+
+typedef enum VkVideoChromaSubsamplingFlagBitsKHR {
+    VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0,
+    VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001,
+    VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002,
+    VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004,
+    VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008,
+    VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoChromaSubsamplingFlagBitsKHR;
+typedef VkFlags VkVideoChromaSubsamplingFlagsKHR;
+
+typedef enum VkVideoComponentBitDepthFlagBitsKHR {
+    VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoComponentBitDepthFlagBitsKHR;
+typedef VkFlags VkVideoComponentBitDepthFlagsKHR;
+
+typedef enum VkVideoCapabilityFlagBitsKHR {
+    VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001,
+    VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002,
+    VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoCapabilityFlagsKHR;
+
+typedef enum VkVideoSessionCreateFlagBitsKHR {
+    VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001,
+    VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoSessionCreateFlagBitsKHR;
+typedef VkFlags VkVideoSessionCreateFlagsKHR;
+typedef VkFlags VkVideoSessionParametersCreateFlagsKHR;
+typedef VkFlags VkVideoBeginCodingFlagsKHR;
+typedef VkFlags VkVideoEndCodingFlagsKHR;
+
+typedef enum VkVideoCodingControlFlagBitsKHR {
+    VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR = 0x00000004,
+#endif
+    VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCodingControlFlagBitsKHR;
+typedef VkFlags VkVideoCodingControlFlagsKHR;
+typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           queryResultStatusSupport;
+} VkQueueFamilyQueryResultStatusPropertiesKHR;
+
+typedef struct VkQueueFamilyVideoPropertiesKHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkVideoCodecOperationFlagsKHR    videoCodecOperations;
+} VkQueueFamilyVideoPropertiesKHR;
+
+typedef struct VkVideoProfileInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkVideoCodecOperationFlagBitsKHR    videoCodecOperation;
+    VkVideoChromaSubsamplingFlagsKHR    chromaSubsampling;
+    VkVideoComponentBitDepthFlagsKHR    lumaBitDepth;
+    VkVideoComponentBitDepthFlagsKHR    chromaBitDepth;
+} VkVideoProfileInfoKHR;
+
+typedef struct VkVideoProfileListInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        profileCount;
+    const VkVideoProfileInfoKHR*    pProfiles;
+} VkVideoProfileListInfoKHR;
+
+typedef struct VkVideoCapabilitiesKHR {
+    VkStructureType              sType;
+    void*                        pNext;
+    VkVideoCapabilityFlagsKHR    flags;
+    VkDeviceSize                 minBitstreamBufferOffsetAlignment;
+    VkDeviceSize                 minBitstreamBufferSizeAlignment;
+    VkExtent2D                   pictureAccessGranularity;
+    VkExtent2D                   minCodedExtent;
+    VkExtent2D                   maxCodedExtent;
+    uint32_t                     maxDpbSlots;
+    uint32_t                     maxActiveReferencePictures;
+    VkExtensionProperties        stdHeaderVersion;
+} VkVideoCapabilitiesKHR;
+
+typedef struct VkPhysicalDeviceVideoFormatInfoKHR {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    imageUsage;
+} VkPhysicalDeviceVideoFormatInfoKHR;
+
+typedef struct VkVideoFormatPropertiesKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormat              format;
+    VkComponentMapping    componentMapping;
+    VkImageCreateFlags    imageCreateFlags;
+    VkImageType           imageType;
+    VkImageTiling         imageTiling;
+    VkImageUsageFlags     imageUsageFlags;
+} VkVideoFormatPropertiesKHR;
+
+typedef struct VkVideoPictureResourceInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkOffset2D         codedOffset;
+    VkExtent2D         codedExtent;
+    uint32_t           baseArrayLayer;
+    VkImageView        imageViewBinding;
+} VkVideoPictureResourceInfoKHR;
+
+typedef struct VkVideoReferenceSlotInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    int32_t                                 slotIndex;
+    const VkVideoPictureResourceInfoKHR*    pPictureResource;
+} VkVideoReferenceSlotInfoKHR;
+
+typedef struct VkVideoSessionMemoryRequirementsKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    uint32_t                memoryBindIndex;
+    VkMemoryRequirements    memoryRequirements;
+} VkVideoSessionMemoryRequirementsKHR;
+
+typedef struct VkBindVideoSessionMemoryInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           memoryBindIndex;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+    VkDeviceSize       memorySize;
+} VkBindVideoSessionMemoryInfoKHR;
+
+typedef struct VkVideoSessionCreateInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        queueFamilyIndex;
+    VkVideoSessionCreateFlagsKHR    flags;
+    const VkVideoProfileInfoKHR*    pVideoProfile;
+    VkFormat                        pictureFormat;
+    VkExtent2D                      maxCodedExtent;
+    VkFormat                        referencePictureFormat;
+    uint32_t                        maxDpbSlots;
+    uint32_t                        maxActiveReferencePictures;
+    const VkExtensionProperties*    pStdHeaderVersion;
+} VkVideoSessionCreateInfoKHR;
+
+typedef struct VkVideoSessionParametersCreateInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkVideoSessionParametersCreateFlagsKHR    flags;
+    VkVideoSessionParametersKHR               videoSessionParametersTemplate;
+    VkVideoSessionKHR                         videoSession;
+} VkVideoSessionParametersCreateInfoKHR;
+
+typedef struct VkVideoSessionParametersUpdateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           updateSequenceCount;
+} VkVideoSessionParametersUpdateInfoKHR;
+
+typedef struct VkVideoBeginCodingInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoBeginCodingFlagsKHR            flags;
+    VkVideoSessionKHR                     videoSession;
+    VkVideoSessionParametersKHR           videoSessionParameters;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+} VkVideoBeginCodingInfoKHR;
+
+typedef struct VkVideoEndCodingInfoKHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkVideoEndCodingFlagsKHR    flags;
+} VkVideoEndCodingInfoKHR;
+
+typedef struct VkVideoCodingControlInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkVideoCodingControlFlagsKHR    flags;
+} VkVideoCodingControlInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters);
+typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo);
+typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkVideoProfileInfoKHR*                pVideoProfile,
+    VkVideoCapabilitiesKHR*                     pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceVideoFormatInfoKHR*   pVideoFormatInfo,
+    uint32_t*                                   pVideoFormatPropertyCount,
+    VkVideoFormatPropertiesKHR*                 pVideoFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR(
+    VkDevice                                    device,
+    const VkVideoSessionCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkVideoSessionKHR*                          pVideoSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    uint32_t*                                   pMemoryRequirementsCount,
+    VkVideoSessionMemoryRequirementsKHR*        pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    uint32_t                                    bindSessionMemoryInfoCount,
+    const VkBindVideoSessionMemoryInfoKHR*      pBindSessionMemoryInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR(
+    VkDevice                                    device,
+    const VkVideoSessionParametersCreateInfoKHR* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkVideoSessionParametersKHR*                pVideoSessionParameters);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR(
+    VkDevice                                    device,
+    VkVideoSessionParametersKHR                 videoSessionParameters,
+    const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR(
+    VkDevice                                    device,
+    VkVideoSessionParametersKHR                 videoSessionParameters,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoBeginCodingInfoKHR*            pBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoEndCodingInfoKHR*              pEndCodingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoCodingControlInfoKHR*          pCodingControlInfo);
+#endif
+
+
+#define VK_KHR_video_decode_queue 1
+#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 7
+#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue"
+
+typedef enum VkVideoDecodeCapabilityFlagBitsKHR {
+    VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoDecodeCapabilityFlagsKHR;
+
+typedef enum VkVideoDecodeUsageFlagBitsKHR {
+    VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0,
+    VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004,
+    VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoDecodeUsageFlagsKHR;
+typedef VkFlags VkVideoDecodeFlagsKHR;
+typedef struct VkVideoDecodeCapabilitiesKHR {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkVideoDecodeCapabilityFlagsKHR    flags;
+} VkVideoDecodeCapabilitiesKHR;
+
+typedef struct VkVideoDecodeUsageInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkVideoDecodeUsageFlagsKHR    videoUsageHints;
+} VkVideoDecodeUsageInfoKHR;
+
+typedef struct VkVideoDecodeInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoDecodeFlagsKHR                 flags;
+    VkBuffer                              srcBuffer;
+    VkDeviceSize                          srcBufferOffset;
+    VkDeviceSize                          srcBufferRange;
+    VkVideoPictureResourceInfoKHR         dstPictureResource;
+    const VkVideoReferenceSlotInfoKHR*    pSetupReferenceSlot;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+} VkVideoDecodeInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoDecodeInfoKHR*                 pDecodeInfo);
+#endif
+
+
+#define VK_KHR_video_decode_h264 1
+#include "vk_video/vulkan_video_codec_h264std.h"
+#include "vk_video/vulkan_video_codec_h264std_decode.h"
+#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 8
+#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264"
+
+typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR {
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeH264PictureLayoutFlagBitsKHR;
+typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR;
+typedef struct VkVideoDecodeH264ProfileInfoKHR {
+    VkStructureType                              sType;
+    const void*                                  pNext;
+    StdVideoH264ProfileIdc                       stdProfileIdc;
+    VkVideoDecodeH264PictureLayoutFlagBitsKHR    pictureLayout;
+} VkVideoDecodeH264ProfileInfoKHR;
+
+typedef struct VkVideoDecodeH264CapabilitiesKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    StdVideoH264LevelIdc    maxLevelIdc;
+    VkOffset2D              fieldOffsetGranularity;
+} VkVideoDecodeH264CapabilitiesKHR;
+
+typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH264SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH264PictureParameterSet*     pStdPPSs;
+} VkVideoDecodeH264SessionParametersAddInfoKHR;
+
+typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoDecodeH264SessionParametersAddInfoKHR*    pParametersAddInfo;
+} VkVideoDecodeH264SessionParametersCreateInfoKHR;
+
+typedef struct VkVideoDecodeH264PictureInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    const StdVideoDecodeH264PictureInfo*    pStdPictureInfo;
+    uint32_t                                sliceCount;
+    const uint32_t*                         pSliceOffsets;
+} VkVideoDecodeH264PictureInfoKHR;
+
+typedef struct VkVideoDecodeH264DpbSlotInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    const StdVideoDecodeH264ReferenceInfo*    pStdReferenceInfo;
+} VkVideoDecodeH264DpbSlotInfoKHR;
+
+
+
+#define VK_KHR_dynamic_rendering 1
+#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1
+#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering"
+typedef VkRenderingFlags VkRenderingFlagsKHR;
+
+typedef VkRenderingFlagBits VkRenderingFlagBitsKHR;
+
+typedef VkRenderingInfo VkRenderingInfoKHR;
+
+typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR;
+
+typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR;
+
+typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR;
+
+typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR;
+
+typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+    VkImageLayout      imageLayout;
+    VkExtent2D         shadingRateAttachmentTexelSize;
+} VkRenderingFragmentShadingRateAttachmentInfoKHR;
+
+typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+    VkImageLayout      imageLayout;
+} VkRenderingFragmentDensityMapAttachmentInfoEXT;
+
+typedef struct VkAttachmentSampleCountInfoAMD {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        colorAttachmentCount;
+    const VkSampleCountFlagBits*    pColorAttachmentSamples;
+    VkSampleCountFlagBits           depthStencilAttachmentSamples;
+} VkAttachmentSampleCountInfoAMD;
+
+typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV;
+
+typedef struct VkMultiviewPerViewAttributesInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           perViewAttributes;
+    VkBool32           perViewAttributesPositionXOnly;
+} VkMultiviewPerViewAttributesInfoNVX;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer                   commandBuffer, const VkRenderingInfo*                              pRenderingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer                   commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderingInfo*                      pRenderingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_KHR_multiview 1
+#define VK_KHR_MULTIVIEW_SPEC_VERSION     1
+#define VK_KHR_MULTIVIEW_EXTENSION_NAME   "VK_KHR_multiview"
+typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR;
+
+typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR;
+
+typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR;
+
+
+
+#define VK_KHR_get_physical_device_properties2 1
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2"
+typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR;
+
+typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR;
+
+typedef VkFormatProperties2 VkFormatProperties2KHR;
+
+typedef VkImageFormatProperties2 VkImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR;
+
+typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR;
+
+typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR;
+
+typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+#endif
+
+
+#define VK_KHR_device_group 1
+#define VK_KHR_DEVICE_GROUP_SPEC_VERSION  4
+#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group"
+typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR;
+
+typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR;
+
+typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR;
+
+typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR;
+
+typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR;
+
+typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR;
+
+typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR;
+
+typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR;
+
+typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR;
+
+typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR;
+
+typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+#endif
+
+
+#define VK_KHR_shader_draw_parameters 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters"
+
+
+#define VK_KHR_maintenance1 1
+#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2
+#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1"
+#define VK_KHR_MAINTENANCE1_SPEC_VERSION  VK_KHR_MAINTENANCE_1_SPEC_VERSION
+#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME
+typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+#endif
+
+
+#define VK_KHR_device_group_creation 1
+#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1
+#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation"
+#define VK_MAX_DEVICE_GROUP_SIZE_KHR      VK_MAX_DEVICE_GROUP_SIZE
+typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR;
+
+typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+#endif
+
+
+#define VK_KHR_external_memory_capabilities 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities"
+#define VK_LUID_SIZE_KHR                  VK_LUID_SIZE
+typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR;
+
+typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR;
+
+typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR;
+
+typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR;
+
+typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR;
+
+typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR;
+
+typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR;
+
+typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+#endif
+
+
+#define VK_KHR_external_memory 1
+#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory"
+#define VK_QUEUE_FAMILY_EXTERNAL_KHR      VK_QUEUE_FAMILY_EXTERNAL
+typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR;
+
+typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR;
+
+typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR;
+
+
+
+#define VK_KHR_external_memory_fd 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd"
+typedef struct VkImportMemoryFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    int                                   fd;
+} VkImportMemoryFdInfoKHR;
+
+typedef struct VkMemoryFdPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryFdPropertiesKHR;
+
+typedef struct VkMemoryGetFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore_capabilities 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities"
+typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR;
+
+typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR;
+
+typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR;
+
+typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR;
+
+typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore"
+typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR;
+
+typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR;
+
+typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR;
+
+
+
+#define VK_KHR_external_semaphore_fd 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd"
+typedef struct VkImportSemaphoreFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    int                                      fd;
+} VkImportSemaphoreFdInfoKHR;
+
+typedef struct VkSemaphoreGetFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_push_descriptor 1
+#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2
+#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor"
+typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPushDescriptors;
+} VkPhysicalDevicePushDescriptorPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_shader_float16_int8 1
+#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1
+#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8"
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
+
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR;
+
+
+
+#define VK_KHR_16bit_storage 1
+#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1
+#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage"
+typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_incremental_present 1
+#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2
+#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present"
+typedef struct VkRectLayerKHR {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+    uint32_t      layer;
+} VkRectLayerKHR;
+
+typedef struct VkPresentRegionKHR {
+    uint32_t                 rectangleCount;
+    const VkRectLayerKHR*    pRectangles;
+} VkPresentRegionKHR;
+
+typedef struct VkPresentRegionsKHR {
+    VkStructureType              sType;
+    const void*                  pNext;
+    uint32_t                     swapchainCount;
+    const VkPresentRegionKHR*    pRegions;
+} VkPresentRegionsKHR;
+
+
+
+#define VK_KHR_descriptor_update_template 1
+typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR;
+
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template"
+typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR;
+
+typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR;
+
+typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR;
+
+typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_imageless_framebuffer 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer"
+typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR;
+
+typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR;
+
+typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR;
+
+typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR;
+
+
+
+#define VK_KHR_create_renderpass2 1
+#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1
+#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2"
+typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR;
+
+typedef VkAttachmentDescription2 VkAttachmentDescription2KHR;
+
+typedef VkAttachmentReference2 VkAttachmentReference2KHR;
+
+typedef VkSubpassDescription2 VkSubpassDescription2KHR;
+
+typedef VkSubpassDependency2 VkSubpassDependency2KHR;
+
+typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR;
+
+typedef VkSubpassEndInfo VkSubpassEndInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfo*      pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo*      pSubpassBeginInfo, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo*        pSubpassEndInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+#endif
+
+
+#define VK_KHR_shared_presentable_image 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image"
+typedef struct VkSharedPresentSurfaceCapabilitiesKHR {
+    VkStructureType      sType;
+    void*                pNext;
+    VkImageUsageFlags    sharedPresentSupportedUsageFlags;
+} VkSharedPresentSurfaceCapabilitiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif
+
+
+#define VK_KHR_external_fence_capabilities 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities"
+typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR;
+
+typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR;
+
+typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR;
+
+typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR;
+
+typedef VkExternalFenceProperties VkExternalFencePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+#endif
+
+
+#define VK_KHR_external_fence 1
+#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence"
+typedef VkFenceImportFlags VkFenceImportFlagsKHR;
+
+typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR;
+
+typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR;
+
+
+
+#define VK_KHR_external_fence_fd 1
+#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd"
+typedef struct VkImportFenceFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkFenceImportFlags                   flags;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+    int                                  fd;
+} VkImportFenceFdInfoKHR;
+
+typedef struct VkFenceGetFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkFenceGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_performance_query 1
+#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query"
+
+typedef enum VkPerformanceCounterUnitKHR {
+    VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0,
+    VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1,
+    VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4,
+    VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5,
+    VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6,
+    VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7,
+    VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8,
+    VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9,
+    VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10,
+    VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterUnitKHR;
+
+typedef enum VkPerformanceCounterScopeKHR {
+    VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0,
+    VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1,
+    VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2,
+    VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+    VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+    VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+    VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterScopeKHR;
+
+typedef enum VkPerformanceCounterStorageKHR {
+    VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0,
+    VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5,
+    VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterStorageKHR;
+
+typedef enum VkPerformanceCounterDescriptionFlagBitsKHR {
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterDescriptionFlagBitsKHR;
+typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR;
+
+typedef enum VkAcquireProfilingLockFlagBitsKHR {
+    VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAcquireProfilingLockFlagBitsKHR;
+typedef VkFlags VkAcquireProfilingLockFlagsKHR;
+typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           performanceCounterQueryPools;
+    VkBool32           performanceCounterMultipleQueryPools;
+} VkPhysicalDevicePerformanceQueryFeaturesKHR;
+
+typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           allowCommandBufferQueryCopies;
+} VkPhysicalDevicePerformanceQueryPropertiesKHR;
+
+typedef struct VkPerformanceCounterKHR {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkPerformanceCounterUnitKHR       unit;
+    VkPerformanceCounterScopeKHR      scope;
+    VkPerformanceCounterStorageKHR    storage;
+    uint8_t                           uuid[VK_UUID_SIZE];
+} VkPerformanceCounterKHR;
+
+typedef struct VkPerformanceCounterDescriptionKHR {
+    VkStructureType                            sType;
+    void*                                      pNext;
+    VkPerformanceCounterDescriptionFlagsKHR    flags;
+    char                                       name[VK_MAX_DESCRIPTION_SIZE];
+    char                                       category[VK_MAX_DESCRIPTION_SIZE];
+    char                                       description[VK_MAX_DESCRIPTION_SIZE];
+} VkPerformanceCounterDescriptionKHR;
+
+typedef struct VkQueryPoolPerformanceCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           queueFamilyIndex;
+    uint32_t           counterIndexCount;
+    const uint32_t*    pCounterIndices;
+} VkQueryPoolPerformanceCreateInfoKHR;
+
+typedef union VkPerformanceCounterResultKHR {
+    int32_t     int32;
+    int64_t     int64;
+    uint32_t    uint32;
+    uint64_t    uint64;
+    float       float32;
+    double      float64;
+} VkPerformanceCounterResultKHR;
+
+typedef struct VkAcquireProfilingLockInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAcquireProfilingLockFlagsKHR    flags;
+    uint64_t                          timeout;
+} VkAcquireProfilingLockInfoKHR;
+
+typedef struct VkPerformanceQuerySubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           counterPassIndex;
+} VkPerformanceQuerySubmitInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR(
+    VkDevice                                    device);
+#endif
+
+
+#define VK_KHR_maintenance2 1
+#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1
+#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2"
+#define VK_KHR_MAINTENANCE2_SPEC_VERSION  VK_KHR_MAINTENANCE_2_SPEC_VERSION
+#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME
+typedef VkPointClippingBehavior VkPointClippingBehaviorKHR;
+
+typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR;
+
+typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR;
+
+typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR;
+
+typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR;
+
+typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR;
+
+typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR;
+
+
+
+#define VK_KHR_get_surface_capabilities2 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2"
+typedef struct VkPhysicalDeviceSurfaceInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSurfaceKHR       surface;
+} VkPhysicalDeviceSurfaceInfo2KHR;
+
+typedef struct VkSurfaceCapabilities2KHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkSurfaceCapabilitiesKHR    surfaceCapabilities;
+} VkSurfaceCapabilities2KHR;
+
+typedef struct VkSurfaceFormat2KHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSurfaceFormatKHR    surfaceFormat;
+} VkSurfaceFormat2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+#endif
+
+
+#define VK_KHR_variable_pointers 1
+#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1
+#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers"
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR;
+
+
+
+#define VK_KHR_get_display_properties2 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2"
+typedef struct VkDisplayProperties2KHR {
+    VkStructureType           sType;
+    void*                     pNext;
+    VkDisplayPropertiesKHR    displayProperties;
+} VkDisplayProperties2KHR;
+
+typedef struct VkDisplayPlaneProperties2KHR {
+    VkStructureType                sType;
+    void*                          pNext;
+    VkDisplayPlanePropertiesKHR    displayPlaneProperties;
+} VkDisplayPlaneProperties2KHR;
+
+typedef struct VkDisplayModeProperties2KHR {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkDisplayModePropertiesKHR    displayModeProperties;
+} VkDisplayModeProperties2KHR;
+
+typedef struct VkDisplayPlaneInfo2KHR {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkDisplayModeKHR    mode;
+    uint32_t            planeIndex;
+} VkDisplayPlaneInfo2KHR;
+
+typedef struct VkDisplayPlaneCapabilities2KHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkDisplayPlaneCapabilitiesKHR    capabilities;
+} VkDisplayPlaneCapabilities2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+#endif
+
+
+#define VK_KHR_dedicated_allocation 1
+#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3
+#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation"
+typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR;
+
+typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR;
+
+
+
+#define VK_KHR_storage_buffer_storage_class 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class"
+
+
+#define VK_KHR_relaxed_block_layout 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout"
+
+
+#define VK_KHR_get_memory_requirements2 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2"
+typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR;
+
+typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR;
+
+typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR;
+
+typedef VkMemoryRequirements2 VkMemoryRequirements2KHR;
+
+typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_KHR_image_format_list 1
+#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1
+#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list"
+typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR;
+
+
+
+#define VK_KHR_sampler_ycbcr_conversion 1
+typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR;
+
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion"
+typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR;
+
+typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR;
+
+typedef VkChromaLocation VkChromaLocationKHR;
+
+typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR;
+
+typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR;
+
+typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR;
+
+typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR;
+
+typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR;
+
+typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+#endif
+
+
+#define VK_KHR_bind_memory2 1
+#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1
+#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2"
+typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR;
+
+typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+#endif
+
+
+#define VK_KHR_maintenance3 1
+#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1
+#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3"
+#define VK_KHR_MAINTENANCE3_SPEC_VERSION  VK_KHR_MAINTENANCE_3_SPEC_VERSION
+#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME
+typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR;
+
+typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_KHR_draw_indirect_count 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_KHR_shader_subgroup_extended_types 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types"
+typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+
+
+
+#define VK_KHR_8bit_storage 1
+#define VK_KHR_8BIT_STORAGE_SPEC_VERSION  1
+#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage"
+typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_shader_atomic_int64 1
+#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1
+#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64"
+typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR;
+
+
+
+#define VK_KHR_shader_clock 1
+#define VK_KHR_SHADER_CLOCK_SPEC_VERSION  1
+#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock"
+typedef struct VkPhysicalDeviceShaderClockFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupClock;
+    VkBool32           shaderDeviceClock;
+} VkPhysicalDeviceShaderClockFeaturesKHR;
+
+
+
+#define VK_KHR_video_decode_h265 1
+#include "vk_video/vulkan_video_codec_h265std.h"
+#include "vk_video/vulkan_video_codec_h265std_decode.h"
+#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 7
+#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265"
+typedef struct VkVideoDecodeH265ProfileInfoKHR {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH265ProfileIdc    stdProfileIdc;
+} VkVideoDecodeH265ProfileInfoKHR;
+
+typedef struct VkVideoDecodeH265CapabilitiesKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    StdVideoH265LevelIdc    maxLevelIdc;
+} VkVideoDecodeH265CapabilitiesKHR;
+
+typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdVPSCount;
+    const StdVideoH265VideoParameterSet*       pStdVPSs;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH265SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH265PictureParameterSet*     pStdPPSs;
+} VkVideoDecodeH265SessionParametersAddInfoKHR;
+
+typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdVPSCount;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoDecodeH265SessionParametersAddInfoKHR*    pParametersAddInfo;
+} VkVideoDecodeH265SessionParametersCreateInfoKHR;
+
+typedef struct VkVideoDecodeH265PictureInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    StdVideoDecodeH265PictureInfo*    pStdPictureInfo;
+    uint32_t                          sliceSegmentCount;
+    const uint32_t*                   pSliceSegmentOffsets;
+} VkVideoDecodeH265PictureInfoKHR;
+
+typedef struct VkVideoDecodeH265DpbSlotInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    const StdVideoDecodeH265ReferenceInfo*    pStdReferenceInfo;
+} VkVideoDecodeH265DpbSlotInfoKHR;
+
+
+
+#define VK_KHR_global_priority 1
+#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR   16U
+#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1
+#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority"
+
+typedef enum VkQueueGlobalPriorityKHR {
+    VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128,
+    VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256,
+    VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512,
+    VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024,
+    VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkQueueGlobalPriorityKHR;
+typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkQueueGlobalPriorityKHR    globalPriority;
+} VkDeviceQueueGlobalPriorityCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           globalPriorityQuery;
+} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+
+typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    uint32_t                    priorityCount;
+    VkQueueGlobalPriorityKHR    priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR];
+} VkQueueFamilyGlobalPriorityPropertiesKHR;
+
+
+
+#define VK_KHR_driver_properties 1
+#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties"
+#define VK_MAX_DRIVER_NAME_SIZE_KHR       VK_MAX_DRIVER_NAME_SIZE
+#define VK_MAX_DRIVER_INFO_SIZE_KHR       VK_MAX_DRIVER_INFO_SIZE
+typedef VkDriverId VkDriverIdKHR;
+
+typedef VkConformanceVersion VkConformanceVersionKHR;
+
+typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR;
+
+
+
+#define VK_KHR_shader_float_controls 1
+#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4
+#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls"
+typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR;
+
+typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR;
+
+
+
+#define VK_KHR_depth_stencil_resolve 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve"
+typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR;
+
+typedef VkResolveModeFlags VkResolveModeFlagsKHR;
+
+typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR;
+
+typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR;
+
+
+
+#define VK_KHR_swapchain_mutable_format 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format"
+
+
+#define VK_KHR_timeline_semaphore 1
+#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2
+#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore"
+typedef VkSemaphoreType VkSemaphoreTypeKHR;
+
+typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR;
+
+typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR;
+
+typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR;
+
+typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR;
+
+typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR;
+
+typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR;
+
+typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR;
+
+typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfo*                  pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfo*                pSignalInfo);
+#endif
+
+
+#define VK_KHR_vulkan_memory_model 1
+#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3
+#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model"
+typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR;
+
+
+
+#define VK_KHR_shader_terminate_invocation 1
+#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1
+#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation"
+typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+
+
+
+#define VK_KHR_fragment_shading_rate 1
+#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2
+#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate"
+
+typedef enum VkFragmentShadingRateCombinerOpKHR {
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkFragmentShadingRateCombinerOpKHR;
+typedef struct VkFragmentShadingRateAttachmentInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    const VkAttachmentReference2*    pFragmentShadingRateAttachment;
+    VkExtent2D                       shadingRateAttachmentTexelSize;
+} VkFragmentShadingRateAttachmentInfoKHR;
+
+typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExtent2D                            fragmentSize;
+    VkFragmentShadingRateCombinerOpKHR    combinerOps[2];
+} VkPipelineFragmentShadingRateStateCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineFragmentShadingRate;
+    VkBool32           primitiveFragmentShadingRate;
+    VkBool32           attachmentFragmentShadingRate;
+} VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkExtent2D               minFragmentShadingRateAttachmentTexelSize;
+    VkExtent2D               maxFragmentShadingRateAttachmentTexelSize;
+    uint32_t                 maxFragmentShadingRateAttachmentTexelSizeAspectRatio;
+    VkBool32                 primitiveFragmentShadingRateWithMultipleViewports;
+    VkBool32                 layeredShadingRateAttachments;
+    VkBool32                 fragmentShadingRateNonTrivialCombinerOps;
+    VkExtent2D               maxFragmentSize;
+    uint32_t                 maxFragmentSizeAspectRatio;
+    uint32_t                 maxFragmentShadingRateCoverageSamples;
+    VkSampleCountFlagBits    maxFragmentShadingRateRasterizationSamples;
+    VkBool32                 fragmentShadingRateWithShaderDepthStencilWrites;
+    VkBool32                 fragmentShadingRateWithSampleMask;
+    VkBool32                 fragmentShadingRateWithShaderSampleMask;
+    VkBool32                 fragmentShadingRateWithConservativeRasterization;
+    VkBool32                 fragmentShadingRateWithFragmentShaderInterlock;
+    VkBool32                 fragmentShadingRateWithCustomSampleLocations;
+    VkBool32                 fragmentShadingRateStrictMultiplyCombiner;
+} VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSampleCountFlags    sampleCounts;
+    VkExtent2D            fragmentSize;
+} VkPhysicalDeviceFragmentShadingRateKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer           commandBuffer, const VkExtent2D*                           pFragmentSize, const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pFragmentShadingRateCount,
+    VkPhysicalDeviceFragmentShadingRateKHR*     pFragmentShadingRates);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkExtent2D*                           pFragmentSize,
+    const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+#endif
+
+
+#define VK_KHR_spirv_1_4 1
+#define VK_KHR_SPIRV_1_4_SPEC_VERSION     1
+#define VK_KHR_SPIRV_1_4_EXTENSION_NAME   "VK_KHR_spirv_1_4"
+
+
+#define VK_KHR_surface_protected_capabilities 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities"
+typedef struct VkSurfaceProtectedCapabilitiesKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           supportsProtected;
+} VkSurfaceProtectedCapabilitiesKHR;
+
+
+
+#define VK_KHR_separate_depth_stencil_layouts 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts"
+typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+
+typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR;
+
+typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR;
+
+
+
+#define VK_KHR_present_wait 1
+#define VK_KHR_PRESENT_WAIT_SPEC_VERSION  1
+#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait"
+typedef struct VkPhysicalDevicePresentWaitFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentWait;
+} VkPhysicalDevicePresentWaitFeaturesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    presentId,
+    uint64_t                                    timeout);
+#endif
+
+
+#define VK_KHR_uniform_buffer_standard_layout 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout"
+typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_buffer_device_address 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address"
+typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR;
+
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR;
+
+typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR;
+
+typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR;
+
+typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
+
+
+#define VK_KHR_deferred_host_operations 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR)
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations"
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation);
+typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator);
+typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeferredOperationKHR*                     pDeferredOperation);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+#endif
+
+
+#define VK_KHR_pipeline_executable_properties 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties"
+
+typedef enum VkPipelineExecutableStatisticFormatKHR {
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPipelineExecutableStatisticFormatKHR;
+typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineExecutableInfo;
+} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+typedef struct VkPipelineInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+} VkPipelineInfoKHR;
+
+typedef struct VkPipelineExecutablePropertiesKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    stages;
+    char                  name[VK_MAX_DESCRIPTION_SIZE];
+    char                  description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t              subgroupSize;
+} VkPipelineExecutablePropertiesKHR;
+
+typedef struct VkPipelineExecutableInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+    uint32_t           executableIndex;
+} VkPipelineExecutableInfoKHR;
+
+typedef union VkPipelineExecutableStatisticValueKHR {
+    VkBool32    b32;
+    int64_t     i64;
+    uint64_t    u64;
+    double      f64;
+} VkPipelineExecutableStatisticValueKHR;
+
+typedef struct VkPipelineExecutableStatisticKHR {
+    VkStructureType                           sType;
+    void*                                     pNext;
+    char                                      name[VK_MAX_DESCRIPTION_SIZE];
+    char                                      description[VK_MAX_DESCRIPTION_SIZE];
+    VkPipelineExecutableStatisticFormatKHR    format;
+    VkPipelineExecutableStatisticValueKHR     value;
+} VkPipelineExecutableStatisticKHR;
+
+typedef struct VkPipelineExecutableInternalRepresentationKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    char               name[VK_MAX_DESCRIPTION_SIZE];
+    char               description[VK_MAX_DESCRIPTION_SIZE];
+    VkBool32           isText;
+    size_t             dataSize;
+    void*              pData;
+} VkPipelineExecutableInternalRepresentationKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+#endif
+
+
+#define VK_KHR_shader_integer_dot_product 1
+#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1
+#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product"
+typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR;
+
+typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR;
+
+
+
+#define VK_KHR_pipeline_library 1
+#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library"
+typedef struct VkPipelineLibraryCreateInfoKHR {
+    VkStructureType      sType;
+    const void*          pNext;
+    uint32_t             libraryCount;
+    const VkPipeline*    pLibraries;
+} VkPipelineLibraryCreateInfoKHR;
+
+
+
+#define VK_KHR_shader_non_semantic_info 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info"
+
+
+#define VK_KHR_present_id 1
+#define VK_KHR_PRESENT_ID_SPEC_VERSION    1
+#define VK_KHR_PRESENT_ID_EXTENSION_NAME  "VK_KHR_present_id"
+typedef struct VkPresentIdKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           swapchainCount;
+    const uint64_t*    pPresentIds;
+} VkPresentIdKHR;
+
+typedef struct VkPhysicalDevicePresentIdFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentId;
+} VkPhysicalDevicePresentIdFeaturesKHR;
+
+
+
+#define VK_KHR_synchronization2 1
+#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1
+#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2"
+typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR;
+
+typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR;
+
+typedef VkAccessFlags2 VkAccessFlags2KHR;
+
+typedef VkAccessFlagBits2 VkAccessFlagBits2KHR;
+
+typedef VkSubmitFlagBits VkSubmitFlagBitsKHR;
+
+typedef VkSubmitFlags VkSubmitFlagsKHR;
+
+typedef VkMemoryBarrier2 VkMemoryBarrier2KHR;
+
+typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR;
+
+typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR;
+
+typedef VkDependencyInfo VkDependencyInfoKHR;
+
+typedef VkSubmitInfo2 VkSubmitInfo2KHR;
+
+typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR;
+
+typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR;
+
+typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR;
+
+typedef struct VkQueueFamilyCheckpointProperties2NV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkPipelineStageFlags2    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointProperties2NV;
+
+typedef struct VkCheckpointData2NV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkPipelineStageFlags2    stage;
+    void*                    pCheckpointMarker;
+} VkCheckpointData2NV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, VkPipelineStageFlags2               stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer                   commandBuffer, uint32_t                                            eventCount, const VkEvent*                     pEvents, const VkDependencyInfo*            pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer                   commandBuffer, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkQueryPool                                         queryPool, uint32_t                                            query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue                           queue, uint32_t                            submitCount, const VkSubmitInfo2*              pSubmits, VkFence           fence);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkBuffer                                            dstBuffer, VkDeviceSize                                        dstOffset, uint32_t                                            marker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags2                       stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    const VkDependencyInfo*                     pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo2*                        pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointData2NV*                        pCheckpointData);
+#endif
+
+
+#define VK_KHR_fragment_shader_barycentric 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric"
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderBarycentric;
+} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           triStripVertexOrderIndependentOfProvokingVertex;
+} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+
+
+#define VK_KHR_shader_subgroup_uniform_control_flow 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow"
+typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupUniformControlFlow;
+} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+
+
+#define VK_KHR_zero_initialize_workgroup_memory 1
+#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1
+#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory"
+typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
+
+
+
+#define VK_KHR_workgroup_memory_explicit_layout 1
+#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout"
+typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           workgroupMemoryExplicitLayout;
+    VkBool32           workgroupMemoryExplicitLayoutScalarBlockLayout;
+    VkBool32           workgroupMemoryExplicitLayout8BitAccess;
+    VkBool32           workgroupMemoryExplicitLayout16BitAccess;
+} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_copy_commands2 1
+#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1
+#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2"
+typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR;
+
+typedef VkCopyImageInfo2 VkCopyImageInfo2KHR;
+
+typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR;
+
+typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR;
+
+typedef VkBlitImageInfo2 VkBlitImageInfo2KHR;
+
+typedef VkResolveImageInfo2 VkResolveImageInfo2KHR;
+
+typedef VkBufferCopy2 VkBufferCopy2KHR;
+
+typedef VkImageCopy2 VkImageCopy2KHR;
+
+typedef VkImageBlit2 VkImageBlit2KHR;
+
+typedef VkBufferImageCopy2 VkBufferImageCopy2KHR;
+
+typedef VkImageResolve2 VkImageResolve2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferInfo2*                    pCopyBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageInfo2*                     pCopyImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferToImageInfo2*             pCopyBufferToImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageToBufferInfo2*             pCopyImageToBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkBlitImageInfo2*                     pBlitImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkResolveImageInfo2*                  pResolveImageInfo);
+#endif
+
+
+#define VK_KHR_format_feature_flags2 1
+#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2
+#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2"
+typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR;
+
+typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR;
+
+typedef VkFormatProperties3 VkFormatProperties3KHR;
+
+
+
+#define VK_KHR_ray_tracing_maintenance1 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1"
+typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingMaintenance1;
+    VkBool32           rayTracingPipelineTraceRaysIndirect2;
+} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+typedef struct VkTraceRaysIndirectCommand2KHR {
+    VkDeviceAddress    raygenShaderRecordAddress;
+    VkDeviceSize       raygenShaderRecordSize;
+    VkDeviceAddress    missShaderBindingTableAddress;
+    VkDeviceSize       missShaderBindingTableSize;
+    VkDeviceSize       missShaderBindingTableStride;
+    VkDeviceAddress    hitShaderBindingTableAddress;
+    VkDeviceSize       hitShaderBindingTableSize;
+    VkDeviceSize       hitShaderBindingTableStride;
+    VkDeviceAddress    callableShaderBindingTableAddress;
+    VkDeviceSize       callableShaderBindingTableSize;
+    VkDeviceSize       callableShaderBindingTableStride;
+    uint32_t           width;
+    uint32_t           height;
+    uint32_t           depth;
+} VkTraceRaysIndirectCommand2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             indirectDeviceAddress);
+#endif
+
+
+#define VK_KHR_portability_enumeration 1
+#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration"
+
+
+#define VK_KHR_maintenance4 1
+#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2
+#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4"
+typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR;
+
+typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR;
+
+typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR;
+
+typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceBufferMemoryRequirements*     pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_EXT_debug_report 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
+#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  10
+#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report"
+
+typedef enum VkDebugReportObjectTypeEXT {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10,
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001,
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportObjectTypeEXT;
+
+typedef enum VkDebugReportFlagBitsEXT {
+    VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001,
+    VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002,
+    VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004,
+    VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008,
+    VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010,
+    VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportFlagBitsEXT;
+typedef VkFlags VkDebugReportFlagsEXT;
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)(
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage,
+    void*                                       pUserData);
+
+typedef struct VkDebugReportCallbackCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDebugReportFlagsEXT           flags;
+    PFN_vkDebugReportCallbackEXT    pfnCallback;
+    void*                           pUserData;
+} VkDebugReportCallbackCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+#endif
+
+
+#define VK_NV_glsl_shader 1
+#define VK_NV_GLSL_SHADER_SPEC_VERSION    1
+#define VK_NV_GLSL_SHADER_EXTENSION_NAME  "VK_NV_glsl_shader"
+
+
+#define VK_EXT_depth_range_unrestricted 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted"
+
+
+#define VK_IMG_filter_cubic 1
+#define VK_IMG_FILTER_CUBIC_SPEC_VERSION  1
+#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic"
+
+
+#define VK_AMD_rasterization_order 1
+#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1
+#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order"
+
+typedef enum VkRasterizationOrderAMD {
+    VK_RASTERIZATION_ORDER_STRICT_AMD = 0,
+    VK_RASTERIZATION_ORDER_RELAXED_AMD = 1,
+    VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkRasterizationOrderAMD;
+typedef struct VkPipelineRasterizationStateRasterizationOrderAMD {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkRasterizationOrderAMD    rasterizationOrder;
+} VkPipelineRasterizationStateRasterizationOrderAMD;
+
+
+
+#define VK_AMD_shader_trinary_minmax 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax"
+
+
+#define VK_AMD_shader_explicit_vertex_parameter 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter"
+
+
+#define VK_EXT_debug_marker 1
+#define VK_EXT_DEBUG_MARKER_SPEC_VERSION  4
+#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker"
+typedef struct VkDebugMarkerObjectNameInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    const char*                   pObjectName;
+} VkDebugMarkerObjectNameInfoEXT;
+
+typedef struct VkDebugMarkerObjectTagInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    uint64_t                      tagName;
+    size_t                        tagSize;
+    const void*                   pTag;
+} VkDebugMarkerObjectTagInfoEXT;
+
+typedef struct VkDebugMarkerMarkerInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pMarkerName;
+    float              color[4];
+} VkDebugMarkerMarkerInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+#endif
+
+
+#define VK_AMD_gcn_shader 1
+#define VK_AMD_GCN_SHADER_SPEC_VERSION    1
+#define VK_AMD_GCN_SHADER_EXTENSION_NAME  "VK_AMD_gcn_shader"
+
+
+#define VK_NV_dedicated_allocation 1
+#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation"
+typedef struct VkDedicatedAllocationImageCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationImageCreateInfoNV;
+
+typedef struct VkDedicatedAllocationBufferCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationBufferCreateInfoNV;
+
+typedef struct VkDedicatedAllocationMemoryAllocateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkDedicatedAllocationMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_transform_feedback 1
+#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback"
+typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT;
+typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           transformFeedback;
+    VkBool32           geometryStreams;
+} VkPhysicalDeviceTransformFeedbackFeaturesEXT;
+
+typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxTransformFeedbackStreams;
+    uint32_t           maxTransformFeedbackBuffers;
+    VkDeviceSize       maxTransformFeedbackBufferSize;
+    uint32_t           maxTransformFeedbackStreamDataSize;
+    uint32_t           maxTransformFeedbackBufferDataSize;
+    uint32_t           maxTransformFeedbackBufferDataStride;
+    VkBool32           transformFeedbackQueries;
+    VkBool32           transformFeedbackStreamsLinesTriangles;
+    VkBool32           transformFeedbackRasterizationStreamSelect;
+    VkBool32           transformFeedbackDraw;
+} VkPhysicalDeviceTransformFeedbackPropertiesEXT;
+
+typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    VkPipelineRasterizationStateStreamCreateFlagsEXT    flags;
+    uint32_t                                            rasterizationStream;
+} VkPipelineRasterizationStateStreamCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+#endif
+
+
+#define VK_NVX_binary_import 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX)
+#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1
+#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import"
+typedef struct VkCuModuleCreateInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    size_t             dataSize;
+    const void*        pData;
+} VkCuModuleCreateInfoNVX;
+
+typedef struct VkCuFunctionCreateInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkCuModuleNVX      module;
+    const char*        pName;
+} VkCuFunctionCreateInfoNVX;
+
+typedef struct VkCuLaunchInfoNVX {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkCuFunctionNVX        function;
+    uint32_t               gridDimX;
+    uint32_t               gridDimY;
+    uint32_t               gridDimZ;
+    uint32_t               blockDimX;
+    uint32_t               blockDimY;
+    uint32_t               blockDimZ;
+    uint32_t               sharedMemBytes;
+    size_t                 paramCount;
+    const void* const *    pParams;
+    size_t                 extraCount;
+    const void* const *    pExtras;
+} VkCuLaunchInfoNVX;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction);
+typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX(
+    VkDevice                                    device,
+    const VkCuModuleCreateInfoNVX*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCuModuleNVX*                              pModule);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX(
+    VkDevice                                    device,
+    const VkCuFunctionCreateInfoNVX*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCuFunctionNVX*                            pFunction);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX(
+    VkDevice                                    device,
+    VkCuModuleNVX                               module,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX(
+    VkDevice                                    device,
+    VkCuFunctionNVX                             function,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCuLaunchInfoNVX*                    pLaunchInfo);
+#endif
+
+
+#define VK_NVX_image_view_handle 1
+#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2
+#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle"
+typedef struct VkImageViewHandleInfoNVX {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkImageView         imageView;
+    VkDescriptorType    descriptorType;
+    VkSampler           sampler;
+} VkImageViewHandleInfoNVX;
+
+typedef struct VkImageViewAddressPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceAddress    deviceAddress;
+    VkDeviceSize       size;
+} VkImageViewAddressPropertiesNVX;
+
+typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    VkImageViewAddressPropertiesNVX*            pProperties);
+#endif
+
+
+#define VK_AMD_draw_indirect_count 1
+#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2
+#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_AMD_negative_viewport_height 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height"
+
+
+#define VK_AMD_gpu_shader_half_float 1
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float"
+
+
+#define VK_AMD_shader_ballot 1
+#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1
+#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot"
+
+
+#define VK_AMD_texture_gather_bias_lod 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod"
+typedef struct VkTextureLODGatherFormatPropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supportsTextureGatherLODBiasAMD;
+} VkTextureLODGatherFormatPropertiesAMD;
+
+
+
+#define VK_AMD_shader_info 1
+#define VK_AMD_SHADER_INFO_SPEC_VERSION   1
+#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info"
+
+typedef enum VkShaderInfoTypeAMD {
+    VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0,
+    VK_SHADER_INFO_TYPE_BINARY_AMD = 1,
+    VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2,
+    VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderInfoTypeAMD;
+typedef struct VkShaderResourceUsageAMD {
+    uint32_t    numUsedVgprs;
+    uint32_t    numUsedSgprs;
+    uint32_t    ldsSizePerLocalWorkGroup;
+    size_t      ldsUsageSizeInBytes;
+    size_t      scratchMemUsageInBytes;
+} VkShaderResourceUsageAMD;
+
+typedef struct VkShaderStatisticsInfoAMD {
+    VkShaderStageFlags          shaderStageMask;
+    VkShaderResourceUsageAMD    resourceUsage;
+    uint32_t                    numPhysicalVgprs;
+    uint32_t                    numPhysicalSgprs;
+    uint32_t                    numAvailableVgprs;
+    uint32_t                    numAvailableSgprs;
+    uint32_t                    computeWorkGroupSize[3];
+} VkShaderStatisticsInfoAMD;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+#endif
+
+
+#define VK_AMD_shader_image_load_store_lod 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod"
+
+
+#define VK_NV_corner_sampled_image 1
+#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2
+#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image"
+typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cornerSampledImage;
+} VkPhysicalDeviceCornerSampledImageFeaturesNV;
+
+
+
+#define VK_IMG_format_pvrtc 1
+#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION  1
+#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc"
+
+
+#define VK_NV_external_memory_capabilities 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities"
+
+typedef enum VkExternalMemoryHandleTypeFlagBitsNV {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBitsNV;
+typedef VkFlags VkExternalMemoryHandleTypeFlagsNV;
+
+typedef enum VkExternalMemoryFeatureFlagBitsNV {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBitsNV;
+typedef VkFlags VkExternalMemoryFeatureFlagsNV;
+typedef struct VkExternalImageFormatPropertiesNV {
+    VkImageFormatProperties              imageFormatProperties;
+    VkExternalMemoryFeatureFlagsNV       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlagsNV    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlagsNV    compatibleHandleTypes;
+} VkExternalImageFormatPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+#endif
+
+
+#define VK_NV_external_memory 1
+#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory"
+typedef struct VkExternalMemoryImageCreateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExternalMemoryImageCreateInfoNV;
+
+typedef struct VkExportMemoryAllocateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExportMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_validation_flags 1
+#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2
+#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags"
+
+typedef enum VkValidationCheckEXT {
+    VK_VALIDATION_CHECK_ALL_EXT = 0,
+    VK_VALIDATION_CHECK_SHADERS_EXT = 1,
+    VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCheckEXT;
+typedef struct VkValidationFlagsEXT {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       disabledValidationCheckCount;
+    const VkValidationCheckEXT*    pDisabledValidationChecks;
+} VkValidationFlagsEXT;
+
+
+
+#define VK_EXT_shader_subgroup_ballot 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot"
+
+
+#define VK_EXT_shader_subgroup_vote 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote"
+
+
+#define VK_EXT_texture_compression_astc_hdr 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr"
+typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+
+
+
+#define VK_EXT_astc_decode_mode 1
+#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1
+#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode"
+typedef struct VkImageViewASTCDecodeModeEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkFormat           decodeMode;
+} VkImageViewASTCDecodeModeEXT;
+
+typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           decodeModeSharedExponent;
+} VkPhysicalDeviceASTCDecodeFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_robustness 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness"
+
+typedef enum VkPipelineRobustnessBufferBehaviorEXT {
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessBufferBehaviorEXT;
+
+typedef enum VkPipelineRobustnessImageBehaviorEXT {
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessImageBehaviorEXT;
+typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineRobustness;
+} VkPhysicalDevicePipelineRobustnessFeaturesEXT;
+
+typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT {
+    VkStructureType                          sType;
+    void*                                    pNext;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessStorageBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessUniformBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessVertexInputs;
+    VkPipelineRobustnessImageBehaviorEXT     defaultRobustnessImages;
+} VkPhysicalDevicePipelineRobustnessPropertiesEXT;
+
+typedef struct VkPipelineRobustnessCreateInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineRobustnessBufferBehaviorEXT    storageBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    uniformBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    vertexInputs;
+    VkPipelineRobustnessImageBehaviorEXT     images;
+} VkPipelineRobustnessCreateInfoEXT;
+
+
+
+#define VK_EXT_conditional_rendering 1
+#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2
+#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering"
+
+typedef enum VkConditionalRenderingFlagBitsEXT {
+    VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001,
+    VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConditionalRenderingFlagBitsEXT;
+typedef VkFlags VkConditionalRenderingFlagsEXT;
+typedef struct VkConditionalRenderingBeginInfoEXT {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+    VkConditionalRenderingFlagsEXT    flags;
+} VkConditionalRenderingBeginInfoEXT;
+
+typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           conditionalRendering;
+    VkBool32           inheritedConditionalRendering;
+} VkPhysicalDeviceConditionalRenderingFeaturesEXT;
+
+typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           conditionalRenderingEnable;
+} VkCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
+typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_NV_clip_space_w_scaling 1
+#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1
+#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling"
+typedef struct VkViewportWScalingNV {
+    float    xcoeff;
+    float    ycoeff;
+} VkViewportWScalingNV;
+
+typedef struct VkPipelineViewportWScalingStateCreateInfoNV {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkBool32                       viewportWScalingEnable;
+    uint32_t                       viewportCount;
+    const VkViewportWScalingNV*    pViewportWScalings;
+} VkPipelineViewportWScalingStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+#endif
+
+
+#define VK_EXT_direct_mode_display 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display"
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+#endif
+
+
+#define VK_EXT_display_surface_counter 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter"
+
+typedef enum VkSurfaceCounterFlagBitsEXT {
+    VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001,
+    VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT,
+    VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSurfaceCounterFlagBitsEXT;
+typedef VkFlags VkSurfaceCounterFlagsEXT;
+typedef struct VkSurfaceCapabilities2EXT {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+    VkSurfaceCounterFlagsEXT         supportedSurfaceCounters;
+} VkSurfaceCapabilities2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+#endif
+
+
+#define VK_EXT_display_control 1
+#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control"
+
+typedef enum VkDisplayPowerStateEXT {
+    VK_DISPLAY_POWER_STATE_OFF_EXT = 0,
+    VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1,
+    VK_DISPLAY_POWER_STATE_ON_EXT = 2,
+    VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayPowerStateEXT;
+
+typedef enum VkDeviceEventTypeEXT {
+    VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0,
+    VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceEventTypeEXT;
+
+typedef enum VkDisplayEventTypeEXT {
+    VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0,
+    VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayEventTypeEXT;
+typedef struct VkDisplayPowerInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkDisplayPowerStateEXT    powerState;
+} VkDisplayPowerInfoEXT;
+
+typedef struct VkDeviceEventInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkDeviceEventTypeEXT    deviceEvent;
+} VkDeviceEventInfoEXT;
+
+typedef struct VkDisplayEventInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDisplayEventTypeEXT    displayEvent;
+} VkDisplayEventInfoEXT;
+
+typedef struct VkSwapchainCounterCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSurfaceCounterFlagsEXT    surfaceCounters;
+} VkSwapchainCounterCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+#endif
+
+
+#define VK_GOOGLE_display_timing 1
+#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1
+#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing"
+typedef struct VkRefreshCycleDurationGOOGLE {
+    uint64_t    refreshDuration;
+} VkRefreshCycleDurationGOOGLE;
+
+typedef struct VkPastPresentationTimingGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+    uint64_t    actualPresentTime;
+    uint64_t    earliestPresentTime;
+    uint64_t    presentMargin;
+} VkPastPresentationTimingGOOGLE;
+
+typedef struct VkPresentTimeGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+} VkPresentTimeGOOGLE;
+
+typedef struct VkPresentTimesInfoGOOGLE {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint32_t                      swapchainCount;
+    const VkPresentTimeGOOGLE*    pTimes;
+} VkPresentTimesInfoGOOGLE;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+#endif
+
+
+#define VK_NV_sample_mask_override_coverage 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage"
+
+
+#define VK_NV_geometry_shader_passthrough 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough"
+
+
+#define VK_NV_viewport_array2 1
+#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2"
+#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION
+#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME
+
+
+#define VK_NVX_multiview_per_view_attributes 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes"
+typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           perViewPositionAllComponents;
+} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+
+
+#define VK_NV_viewport_swizzle 1
+#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle"
+
+typedef enum VkViewportCoordinateSwizzleNV {
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkViewportCoordinateSwizzleNV;
+typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV;
+typedef struct VkViewportSwizzleNV {
+    VkViewportCoordinateSwizzleNV    x;
+    VkViewportCoordinateSwizzleNV    y;
+    VkViewportCoordinateSwizzleNV    z;
+    VkViewportCoordinateSwizzleNV    w;
+} VkViewportSwizzleNV;
+
+typedef struct VkPipelineViewportSwizzleStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineViewportSwizzleStateCreateFlagsNV    flags;
+    uint32_t                                       viewportCount;
+    const VkViewportSwizzleNV*                     pViewportSwizzles;
+} VkPipelineViewportSwizzleStateCreateInfoNV;
+
+
+
+#define VK_EXT_discard_rectangles 1
+#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1
+#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles"
+
+typedef enum VkDiscardRectangleModeEXT {
+    VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0,
+    VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1,
+    VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDiscardRectangleModeEXT;
+typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDiscardRectangles;
+} VkPhysicalDeviceDiscardRectanglePropertiesEXT;
+
+typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineDiscardRectangleStateCreateFlagsEXT    flags;
+    VkDiscardRectangleModeEXT                        discardRectangleMode;
+    uint32_t                                         discardRectangleCount;
+    const VkRect2D*                                  pDiscardRectangles;
+} VkPipelineDiscardRectangleStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+#endif
+
+
+#define VK_EXT_conservative_rasterization 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization"
+
+typedef enum VkConservativeRasterizationModeEXT {
+    VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConservativeRasterizationModeEXT;
+typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    float              primitiveOverestimationSize;
+    float              maxExtraPrimitiveOverestimationSize;
+    float              extraPrimitiveOverestimationSizeGranularity;
+    VkBool32           primitiveUnderestimation;
+    VkBool32           conservativePointAndLineRasterization;
+    VkBool32           degenerateTrianglesRasterized;
+    VkBool32           degenerateLinesRasterized;
+    VkBool32           fullyCoveredFragmentShaderInputVariable;
+    VkBool32           conservativeRasterizationPostDepthCoverage;
+} VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT {
+    VkStructureType                                           sType;
+    const void*                                               pNext;
+    VkPipelineRasterizationConservativeStateCreateFlagsEXT    flags;
+    VkConservativeRasterizationModeEXT                        conservativeRasterizationMode;
+    float                                                     extraPrimitiveOverestimationSize;
+} VkPipelineRasterizationConservativeStateCreateInfoEXT;
+
+
+
+#define VK_EXT_depth_clip_enable 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable"
+typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClipEnable;
+} VkPhysicalDeviceDepthClipEnableFeaturesEXT;
+
+typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    VkPipelineRasterizationDepthClipStateCreateFlagsEXT    flags;
+    VkBool32                                               depthClipEnable;
+} VkPipelineRasterizationDepthClipStateCreateInfoEXT;
+
+
+
+#define VK_EXT_swapchain_colorspace 1
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace"
+
+
+#define VK_EXT_hdr_metadata 1
+#define VK_EXT_HDR_METADATA_SPEC_VERSION  2
+#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata"
+typedef struct VkXYColorEXT {
+    float    x;
+    float    y;
+} VkXYColorEXT;
+
+typedef struct VkHdrMetadataEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkXYColorEXT       displayPrimaryRed;
+    VkXYColorEXT       displayPrimaryGreen;
+    VkXYColorEXT       displayPrimaryBlue;
+    VkXYColorEXT       whitePoint;
+    float              maxLuminance;
+    float              minLuminance;
+    float              maxContentLightLevel;
+    float              maxFrameAverageLightLevel;
+} VkHdrMetadataEXT;
+
+typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+#endif
+
+
+#define VK_EXT_external_memory_dma_buf 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf"
+
+
+#define VK_EXT_queue_family_foreign 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign"
+#define VK_QUEUE_FAMILY_FOREIGN_EXT       (~2U)
+
+
+#define VK_EXT_debug_utils 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT)
+#define VK_EXT_DEBUG_UTILS_SPEC_VERSION   2
+#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils"
+typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT;
+
+typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageSeverityFlagBitsEXT;
+
+typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageTypeFlagBitsEXT;
+typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
+typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT;
+typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT;
+typedef struct VkDebugUtilsLabelEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pLabelName;
+    float              color[4];
+} VkDebugUtilsLabelEXT;
+
+typedef struct VkDebugUtilsObjectNameInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    const char*        pObjectName;
+} VkDebugUtilsObjectNameInfoEXT;
+
+typedef struct VkDebugUtilsMessengerCallbackDataEXT {
+    VkStructureType                              sType;
+    const void*                                  pNext;
+    VkDebugUtilsMessengerCallbackDataFlagsEXT    flags;
+    const char*                                  pMessageIdName;
+    int32_t                                      messageIdNumber;
+    const char*                                  pMessage;
+    uint32_t                                     queueLabelCount;
+    const VkDebugUtilsLabelEXT*                  pQueueLabels;
+    uint32_t                                     cmdBufLabelCount;
+    const VkDebugUtilsLabelEXT*                  pCmdBufLabels;
+    uint32_t                                     objectCount;
+    const VkDebugUtilsObjectNameInfoEXT*         pObjects;
+} VkDebugUtilsMessengerCallbackDataEXT;
+
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)(
+    VkDebugUtilsMessageSeverityFlagBitsEXT           messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT                  messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT*      pCallbackData,
+    void*                                            pUserData);
+
+typedef struct VkDebugUtilsMessengerCreateInfoEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkDebugUtilsMessengerCreateFlagsEXT     flags;
+    VkDebugUtilsMessageSeverityFlagsEXT     messageSeverity;
+    VkDebugUtilsMessageTypeFlagsEXT         messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT    pfnUserCallback;
+    void*                                   pUserData;
+} VkDebugUtilsMessengerCreateInfoEXT;
+
+typedef struct VkDebugUtilsObjectTagInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    uint64_t           tagName;
+    size_t             tagSize;
+    const void*        pTag;
+} VkDebugUtilsObjectTagInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue);
+typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+#endif
+
+
+#define VK_EXT_sampler_filter_minmax 1
+#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2
+#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax"
+typedef VkSamplerReductionMode VkSamplerReductionModeEXT;
+
+typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT;
+
+typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+
+
+
+#define VK_AMD_gpu_shader_int16 1
+#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16"
+
+
+#define VK_AMD_mixed_attachment_samples 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples"
+
+
+#define VK_AMD_shader_fragment_mask 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask"
+
+
+#define VK_EXT_inline_uniform_block 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block"
+typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT;
+
+typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT;
+
+typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT;
+
+typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT;
+
+
+
+#define VK_EXT_shader_stencil_export 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export"
+
+
+#define VK_EXT_sample_locations 1
+#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1
+#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations"
+typedef struct VkSampleLocationEXT {
+    float    x;
+    float    y;
+} VkSampleLocationEXT;
+
+typedef struct VkSampleLocationsInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkSampleCountFlagBits         sampleLocationsPerPixel;
+    VkExtent2D                    sampleLocationGridSize;
+    uint32_t                      sampleLocationsCount;
+    const VkSampleLocationEXT*    pSampleLocations;
+} VkSampleLocationsInfoEXT;
+
+typedef struct VkAttachmentSampleLocationsEXT {
+    uint32_t                    attachmentIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkAttachmentSampleLocationsEXT;
+
+typedef struct VkSubpassSampleLocationsEXT {
+    uint32_t                    subpassIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkSubpassSampleLocationsEXT;
+
+typedef struct VkRenderPassSampleLocationsBeginInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    uint32_t                                 attachmentInitialSampleLocationsCount;
+    const VkAttachmentSampleLocationsEXT*    pAttachmentInitialSampleLocations;
+    uint32_t                                 postSubpassSampleLocationsCount;
+    const VkSubpassSampleLocationsEXT*       pPostSubpassSampleLocations;
+} VkRenderPassSampleLocationsBeginInfoEXT;
+
+typedef struct VkPipelineSampleLocationsStateCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkBool32                    sampleLocationsEnable;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkPipelineSampleLocationsStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSampleCountFlags    sampleLocationSampleCounts;
+    VkExtent2D            maxSampleLocationGridSize;
+    float                 sampleLocationCoordinateRange[2];
+    uint32_t              sampleLocationSubPixelBits;
+    VkBool32              variableSampleLocations;
+} VkPhysicalDeviceSampleLocationsPropertiesEXT;
+
+typedef struct VkMultisamplePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         maxSampleLocationGridSize;
+} VkMultisamplePropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+#endif
+
+
+#define VK_EXT_blend_operation_advanced 1
+#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2
+#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced"
+
+typedef enum VkBlendOverlapEXT {
+    VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0,
+    VK_BLEND_OVERLAP_DISJOINT_EXT = 1,
+    VK_BLEND_OVERLAP_CONJOINT_EXT = 2,
+    VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBlendOverlapEXT;
+typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           advancedBlendCoherentOperations;
+} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           advancedBlendMaxColorAttachments;
+    VkBool32           advancedBlendIndependentBlend;
+    VkBool32           advancedBlendNonPremultipliedSrcColor;
+    VkBool32           advancedBlendNonPremultipliedDstColor;
+    VkBool32           advancedBlendCorrelatedOverlap;
+    VkBool32           advancedBlendAllOperations;
+} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+} VkPipelineColorBlendAdvancedStateCreateInfoEXT;
+
+
+
+#define VK_NV_fragment_coverage_to_color 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color"
+typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV;
+typedef struct VkPipelineCoverageToColorStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineCoverageToColorStateCreateFlagsNV    flags;
+    VkBool32                                       coverageToColorEnable;
+    uint32_t                                       coverageToColorLocation;
+} VkPipelineCoverageToColorStateCreateInfoNV;
+
+
+
+#define VK_NV_framebuffer_mixed_samples 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples"
+
+typedef enum VkCoverageModulationModeNV {
+    VK_COVERAGE_MODULATION_MODE_NONE_NV = 0,
+    VK_COVERAGE_MODULATION_MODE_RGB_NV = 1,
+    VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2,
+    VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3,
+    VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageModulationModeNV;
+typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV;
+typedef struct VkPipelineCoverageModulationStateCreateInfoNV {
+    VkStructureType                                   sType;
+    const void*                                       pNext;
+    VkPipelineCoverageModulationStateCreateFlagsNV    flags;
+    VkCoverageModulationModeNV                        coverageModulationMode;
+    VkBool32                                          coverageModulationTableEnable;
+    uint32_t                                          coverageModulationTableCount;
+    const float*                                      pCoverageModulationTable;
+} VkPipelineCoverageModulationStateCreateInfoNV;
+
+
+
+#define VK_NV_fill_rectangle 1
+#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1
+#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle"
+
+
+#define VK_NV_shader_sm_builtins 1
+#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1
+#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins"
+typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderSMCount;
+    uint32_t           shaderWarpsPerSM;
+} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
+
+typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSMBuiltins;
+} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
+
+
+
+#define VK_EXT_post_depth_coverage 1
+#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1
+#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage"
+
+
+#define VK_EXT_image_drm_format_modifier 1
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier"
+typedef struct VkDrmFormatModifierPropertiesEXT {
+    uint64_t                drmFormatModifier;
+    uint32_t                drmFormatModifierPlaneCount;
+    VkFormatFeatureFlags    drmFormatModifierTilingFeatures;
+} VkDrmFormatModifierPropertiesEXT;
+
+typedef struct VkDrmFormatModifierPropertiesListEXT {
+    VkStructureType                      sType;
+    void*                                pNext;
+    uint32_t                             drmFormatModifierCount;
+    VkDrmFormatModifierPropertiesEXT*    pDrmFormatModifierProperties;
+} VkDrmFormatModifierPropertiesListEXT;
+
+typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           drmFormatModifier;
+    VkSharingMode      sharingMode;
+    uint32_t           queueFamilyIndexCount;
+    const uint32_t*    pQueueFamilyIndices;
+} VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+typedef struct VkImageDrmFormatModifierListCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           drmFormatModifierCount;
+    const uint64_t*    pDrmFormatModifiers;
+} VkImageDrmFormatModifierListCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint64_t                      drmFormatModifier;
+    uint32_t                      drmFormatModifierPlaneCount;
+    const VkSubresourceLayout*    pPlaneLayouts;
+} VkImageDrmFormatModifierExplicitCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           drmFormatModifier;
+} VkImageDrmFormatModifierPropertiesEXT;
+
+typedef struct VkDrmFormatModifierProperties2EXT {
+    uint64_t                 drmFormatModifier;
+    uint32_t                 drmFormatModifierPlaneCount;
+    VkFormatFeatureFlags2    drmFormatModifierTilingFeatures;
+} VkDrmFormatModifierProperties2EXT;
+
+typedef struct VkDrmFormatModifierPropertiesList2EXT {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    uint32_t                              drmFormatModifierCount;
+    VkDrmFormatModifierProperties2EXT*    pDrmFormatModifierProperties;
+} VkDrmFormatModifierPropertiesList2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+#endif
+
+
+#define VK_EXT_validation_cache 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT)
+#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1
+#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache"
+
+typedef enum VkValidationCacheHeaderVersionEXT {
+    VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1,
+    VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCacheHeaderVersionEXT;
+typedef VkFlags VkValidationCacheCreateFlagsEXT;
+typedef struct VkValidationCacheCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkValidationCacheCreateFlagsEXT    flags;
+    size_t                             initialDataSize;
+    const void*                        pInitialData;
+} VkValidationCacheCreateInfoEXT;
+
+typedef struct VkShaderModuleValidationCacheCreateInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkValidationCacheEXT    validationCache;
+} VkShaderModuleValidationCacheCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+#endif
+
+
+#define VK_EXT_descriptor_indexing 1
+#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2
+#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing"
+typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT;
+
+typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT;
+
+typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+
+typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT;
+
+typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT;
+
+typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+
+typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+
+
+
+#define VK_EXT_shader_viewport_index_layer 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer"
+
+
+#define VK_NV_shading_rate_image 1
+#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3
+#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image"
+
+typedef enum VkShadingRatePaletteEntryNV {
+    VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0,
+    VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1,
+    VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2,
+    VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3,
+    VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11,
+    VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF
+} VkShadingRatePaletteEntryNV;
+
+typedef enum VkCoarseSampleOrderTypeNV {
+    VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0,
+    VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1,
+    VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2,
+    VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3,
+    VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoarseSampleOrderTypeNV;
+typedef struct VkShadingRatePaletteNV {
+    uint32_t                              shadingRatePaletteEntryCount;
+    const VkShadingRatePaletteEntryNV*    pShadingRatePaletteEntries;
+} VkShadingRatePaletteNV;
+
+typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkBool32                         shadingRateImageEnable;
+    uint32_t                         viewportCount;
+    const VkShadingRatePaletteNV*    pShadingRatePalettes;
+} VkPipelineViewportShadingRateImageStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shadingRateImage;
+    VkBool32           shadingRateCoarseSampleOrder;
+} VkPhysicalDeviceShadingRateImageFeaturesNV;
+
+typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         shadingRateTexelSize;
+    uint32_t           shadingRatePaletteSize;
+    uint32_t           shadingRateMaxCoarseSamples;
+} VkPhysicalDeviceShadingRateImagePropertiesNV;
+
+typedef struct VkCoarseSampleLocationNV {
+    uint32_t    pixelX;
+    uint32_t    pixelY;
+    uint32_t    sample;
+} VkCoarseSampleLocationNV;
+
+typedef struct VkCoarseSampleOrderCustomNV {
+    VkShadingRatePaletteEntryNV        shadingRate;
+    uint32_t                           sampleCount;
+    uint32_t                           sampleLocationCount;
+    const VkCoarseSampleLocationNV*    pSampleLocations;
+} VkCoarseSampleOrderCustomNV;
+
+typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkCoarseSampleOrderTypeNV             sampleOrderType;
+    uint32_t                              customSampleOrderCount;
+    const VkCoarseSampleOrderCustomNV*    pCustomSampleOrders;
+} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+#endif
+
+
+#define VK_NV_ray_tracing 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV)
+#define VK_NV_RAY_TRACING_SPEC_VERSION    3
+#define VK_NV_RAY_TRACING_EXTENSION_NAME  "VK_NV_ray_tracing"
+#define VK_SHADER_UNUSED_KHR              (~0U)
+#define VK_SHADER_UNUSED_NV               VK_SHADER_UNUSED_KHR
+
+typedef enum VkRayTracingShaderGroupTypeKHR {
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkRayTracingShaderGroupTypeKHR;
+typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV;
+
+
+typedef enum VkGeometryTypeKHR {
+    VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0,
+    VK_GEOMETRY_TYPE_AABBS_KHR = 1,
+    VK_GEOMETRY_TYPE_INSTANCES_KHR = 2,
+    VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+    VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR,
+    VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryTypeKHR;
+typedef VkGeometryTypeKHR VkGeometryTypeNV;
+
+
+typedef enum VkAccelerationStructureTypeKHR {
+    VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2,
+    VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+    VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
+    VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureTypeKHR;
+typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV;
+
+
+typedef enum VkCopyAccelerationStructureModeKHR {
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCopyAccelerationStructureModeKHR;
+typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV;
+
+
+typedef enum VkAccelerationStructureMemoryRequirementsTypeNV {
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureMemoryRequirementsTypeNV;
+
+typedef enum VkGeometryFlagBitsKHR {
+    VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001,
+    VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002,
+    VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR,
+    VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryFlagBitsKHR;
+typedef VkFlags VkGeometryFlagsKHR;
+typedef VkGeometryFlagsKHR VkGeometryFlagsNV;
+
+typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV;
+
+
+typedef enum VkGeometryInstanceFlagBitsKHR {
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002,
+    VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
+    VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
+    VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010,
+    VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryInstanceFlagBitsKHR;
+typedef VkFlags VkGeometryInstanceFlagsKHR;
+typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV;
+
+typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV;
+
+
+typedef enum VkBuildAccelerationStructureFlagBitsKHR {
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
+    VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
+    VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkBuildAccelerationStructureFlagBitsKHR;
+typedef VkFlags VkBuildAccelerationStructureFlagsKHR;
+typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV;
+
+typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV;
+
+typedef struct VkRayTracingShaderGroupCreateInfoNV {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRayTracingShaderGroupTypeKHR    type;
+    uint32_t                          generalShader;
+    uint32_t                          closestHitShader;
+    uint32_t                          anyHitShader;
+    uint32_t                          intersectionShader;
+} VkRayTracingShaderGroupCreateInfoNV;
+
+typedef struct VkRayTracingPipelineCreateInfoNV {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineCreateFlags                         flags;
+    uint32_t                                      stageCount;
+    const VkPipelineShaderStageCreateInfo*        pStages;
+    uint32_t                                      groupCount;
+    const VkRayTracingShaderGroupCreateInfoNV*    pGroups;
+    uint32_t                                      maxRecursionDepth;
+    VkPipelineLayout                              layout;
+    VkPipeline                                    basePipelineHandle;
+    int32_t                                       basePipelineIndex;
+} VkRayTracingPipelineCreateInfoNV;
+
+typedef struct VkGeometryTrianglesNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           vertexData;
+    VkDeviceSize       vertexOffset;
+    uint32_t           vertexCount;
+    VkDeviceSize       vertexStride;
+    VkFormat           vertexFormat;
+    VkBuffer           indexData;
+    VkDeviceSize       indexOffset;
+    uint32_t           indexCount;
+    VkIndexType        indexType;
+    VkBuffer           transformData;
+    VkDeviceSize       transformOffset;
+} VkGeometryTrianglesNV;
+
+typedef struct VkGeometryAABBNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           aabbData;
+    uint32_t           numAABBs;
+    uint32_t           stride;
+    VkDeviceSize       offset;
+} VkGeometryAABBNV;
+
+typedef struct VkGeometryDataNV {
+    VkGeometryTrianglesNV    triangles;
+    VkGeometryAABBNV         aabbs;
+} VkGeometryDataNV;
+
+typedef struct VkGeometryNV {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkGeometryTypeKHR     geometryType;
+    VkGeometryDataNV      geometry;
+    VkGeometryFlagsKHR    flags;
+} VkGeometryNV;
+
+typedef struct VkAccelerationStructureInfoNV {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkAccelerationStructureTypeNV          type;
+    VkBuildAccelerationStructureFlagsNV    flags;
+    uint32_t                               instanceCount;
+    uint32_t                               geometryCount;
+    const VkGeometryNV*                    pGeometries;
+} VkAccelerationStructureInfoNV;
+
+typedef struct VkAccelerationStructureCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceSize                     compactedSize;
+    VkAccelerationStructureInfoNV    info;
+} VkAccelerationStructureCreateInfoNV;
+
+typedef struct VkBindAccelerationStructureMemoryInfoNV {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkAccelerationStructureNV    accelerationStructure;
+    VkDeviceMemory               memory;
+    VkDeviceSize                 memoryOffset;
+    uint32_t                     deviceIndexCount;
+    const uint32_t*              pDeviceIndices;
+} VkBindAccelerationStructureMemoryInfoNV;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureNV {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    uint32_t                            accelerationStructureCount;
+    const VkAccelerationStructureNV*    pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureNV;
+
+typedef struct VkAccelerationStructureMemoryRequirementsInfoNV {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    VkAccelerationStructureMemoryRequirementsTypeNV    type;
+    VkAccelerationStructureNV                          accelerationStructure;
+} VkAccelerationStructureMemoryRequirementsInfoNV;
+
+typedef struct VkPhysicalDeviceRayTracingPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderGroupHandleSize;
+    uint32_t           maxRecursionDepth;
+    uint32_t           maxShaderGroupStride;
+    uint32_t           shaderGroupBaseAlignment;
+    uint64_t           maxGeometryCount;
+    uint64_t           maxInstanceCount;
+    uint64_t           maxTriangleCount;
+    uint32_t           maxDescriptorSetAccelerationStructures;
+} VkPhysicalDeviceRayTracingPropertiesNV;
+
+typedef struct VkTransformMatrixKHR {
+    float    matrix[3][4];
+} VkTransformMatrixKHR;
+
+typedef VkTransformMatrixKHR VkTransformMatrixNV;
+
+typedef struct VkAabbPositionsKHR {
+    float    minX;
+    float    minY;
+    float    minZ;
+    float    maxX;
+    float    maxY;
+    float    maxZ;
+} VkAabbPositionsKHR;
+
+typedef VkAabbPositionsKHR VkAabbPositionsNV;
+
+typedef struct VkAccelerationStructureInstanceKHR {
+    VkTransformMatrixKHR          transform;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureInstanceKHR;
+
+typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeKHR          mode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+#endif
+
+
+#define VK_NV_representative_fragment_test 1
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test"
+typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           representativeFragmentTest;
+} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           representativeFragmentTestEnable;
+} VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+
+
+#define VK_EXT_filter_cubic 1
+#define VK_EXT_FILTER_CUBIC_SPEC_VERSION  3
+#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic"
+typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageViewType    imageViewType;
+} VkPhysicalDeviceImageViewImageFormatInfoEXT;
+
+typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterCubic;
+    VkBool32           filterCubicMinmax;
+} VkFilterCubicImageViewImageFormatPropertiesEXT;
+
+
+
+#define VK_QCOM_render_pass_shader_resolve 1
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve"
+
+
+#define VK_EXT_global_priority 1
+#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2
+#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority"
+typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT;
+
+typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT;
+
+
+
+#define VK_EXT_external_memory_host 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host"
+typedef struct VkImportMemoryHostPointerInfoEXT {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    void*                                 pHostPointer;
+} VkImportMemoryHostPointerInfoEXT;
+
+typedef struct VkMemoryHostPointerPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryHostPointerPropertiesEXT;
+
+typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       minImportedHostPointerAlignment;
+} VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+#endif
+
+
+#define VK_AMD_buffer_marker 1
+#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1
+#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker"
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+#endif
+
+
+#define VK_AMD_pipeline_compiler_control 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control"
+
+typedef enum VkPipelineCompilerControlFlagBitsAMD {
+    VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkPipelineCompilerControlFlagBitsAMD;
+typedef VkFlags VkPipelineCompilerControlFlagsAMD;
+typedef struct VkPipelineCompilerControlCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineCompilerControlFlagsAMD    compilerControlFlags;
+} VkPipelineCompilerControlCreateInfoAMD;
+
+
+
+#define VK_EXT_calibrated_timestamps 1
+#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2
+#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps"
+
+typedef enum VkTimeDomainEXT {
+    VK_TIME_DOMAIN_DEVICE_EXT = 0,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2,
+    VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3,
+    VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkTimeDomainEXT;
+typedef struct VkCalibratedTimestampInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkTimeDomainEXT    timeDomain;
+} VkCalibratedTimestampInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains);
+typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+#endif
+
+
+#define VK_AMD_shader_core_properties 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2
+#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties"
+typedef struct VkPhysicalDeviceShaderCorePropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderEngineCount;
+    uint32_t           shaderArraysPerEngineCount;
+    uint32_t           computeUnitsPerShaderArray;
+    uint32_t           simdPerComputeUnit;
+    uint32_t           wavefrontsPerSimd;
+    uint32_t           wavefrontSize;
+    uint32_t           sgprsPerSimd;
+    uint32_t           minSgprAllocation;
+    uint32_t           maxSgprAllocation;
+    uint32_t           sgprAllocationGranularity;
+    uint32_t           vgprsPerSimd;
+    uint32_t           minVgprAllocation;
+    uint32_t           maxVgprAllocation;
+    uint32_t           vgprAllocationGranularity;
+} VkPhysicalDeviceShaderCorePropertiesAMD;
+
+
+
+#define VK_AMD_memory_overallocation_behavior 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior"
+
+typedef enum VkMemoryOverallocationBehaviorAMD {
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkMemoryOverallocationBehaviorAMD;
+typedef struct VkDeviceMemoryOverallocationCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkMemoryOverallocationBehaviorAMD    overallocationBehavior;
+} VkDeviceMemoryOverallocationCreateInfoAMD;
+
+
+
+#define VK_EXT_vertex_attribute_divisor 1
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor"
+typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVertexAttribDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+typedef struct VkVertexInputBindingDivisorDescriptionEXT {
+    uint32_t    binding;
+    uint32_t    divisor;
+} VkVertexInputBindingDivisorDescriptionEXT;
+
+typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    uint32_t                                            vertexBindingDivisorCount;
+    const VkVertexInputBindingDivisorDescriptionEXT*    pVertexBindingDivisors;
+} VkPipelineVertexInputDivisorStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vertexAttributeInstanceRateDivisor;
+    VkBool32           vertexAttributeInstanceRateZeroDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_creation_feedback 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback"
+typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT;
+
+typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT;
+
+typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT;
+
+typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT;
+
+
+
+#define VK_NV_shader_subgroup_partitioned 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned"
+
+
+#define VK_NV_compute_shader_derivatives 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives"
+typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           computeDerivativeGroupQuads;
+    VkBool32           computeDerivativeGroupLinear;
+} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+
+
+#define VK_NV_mesh_shader 1
+#define VK_NV_MESH_SHADER_SPEC_VERSION    1
+#define VK_NV_MESH_SHADER_EXTENSION_NAME  "VK_NV_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           taskShader;
+    VkBool32           meshShader;
+} VkPhysicalDeviceMeshShaderFeaturesNV;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDrawMeshTasksCount;
+    uint32_t           maxTaskWorkGroupInvocations;
+    uint32_t           maxTaskWorkGroupSize[3];
+    uint32_t           maxTaskTotalMemorySize;
+    uint32_t           maxTaskOutputCount;
+    uint32_t           maxMeshWorkGroupInvocations;
+    uint32_t           maxMeshWorkGroupSize[3];
+    uint32_t           maxMeshTotalMemorySize;
+    uint32_t           maxMeshOutputVertices;
+    uint32_t           maxMeshOutputPrimitives;
+    uint32_t           maxMeshMultiviewViewCount;
+    uint32_t           meshOutputPerVertexGranularity;
+    uint32_t           meshOutputPerPrimitiveGranularity;
+} VkPhysicalDeviceMeshShaderPropertiesNV;
+
+typedef struct VkDrawMeshTasksIndirectCommandNV {
+    uint32_t    taskCount;
+    uint32_t    firstTask;
+} VkDrawMeshTasksIndirectCommandNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_NV_fragment_shader_barycentric 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric"
+typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+
+
+
+#define VK_NV_shader_image_footprint 1
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint"
+typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageFootprint;
+} VkPhysicalDeviceShaderImageFootprintFeaturesNV;
+
+
+
+#define VK_NV_scissor_exclusive 1
+#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1
+#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive"
+typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           exclusiveScissorCount;
+    const VkRect2D*    pExclusiveScissors;
+} VkPipelineViewportExclusiveScissorStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           exclusiveScissor;
+} VkPhysicalDeviceExclusiveScissorFeaturesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+#endif
+
+
+#define VK_NV_device_diagnostic_checkpoints 1
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints"
+typedef struct VkQueueFamilyCheckpointPropertiesNV {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkPipelineStageFlags    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointPropertiesNV;
+
+typedef struct VkCheckpointDataNV {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPipelineStageFlagBits    stage;
+    void*                      pCheckpointMarker;
+} VkCheckpointDataNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+#endif
+
+
+#define VK_INTEL_shader_integer_functions2 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2"
+typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderIntegerFunctions2;
+} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+
+
+#define VK_INTEL_performance_query 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL)
+#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2
+#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query"
+
+typedef enum VkPerformanceConfigurationTypeINTEL {
+    VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0,
+    VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceConfigurationTypeINTEL;
+
+typedef enum VkQueryPoolSamplingModeINTEL {
+    VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0,
+    VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkQueryPoolSamplingModeINTEL;
+
+typedef enum VkPerformanceOverrideTypeINTEL {
+    VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0,
+    VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1,
+    VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceOverrideTypeINTEL;
+
+typedef enum VkPerformanceParameterTypeINTEL {
+    VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0,
+    VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1,
+    VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceParameterTypeINTEL;
+
+typedef enum VkPerformanceValueTypeINTEL {
+    VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0,
+    VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1,
+    VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2,
+    VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3,
+    VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4,
+    VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceValueTypeINTEL;
+typedef union VkPerformanceValueDataINTEL {
+    uint32_t       value32;
+    uint64_t       value64;
+    float          valueFloat;
+    VkBool32       valueBool;
+    const char*    valueString;
+} VkPerformanceValueDataINTEL;
+
+typedef struct VkPerformanceValueINTEL {
+    VkPerformanceValueTypeINTEL    type;
+    VkPerformanceValueDataINTEL    data;
+} VkPerformanceValueINTEL;
+
+typedef struct VkInitializePerformanceApiInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    void*              pUserData;
+} VkInitializePerformanceApiInfoINTEL;
+
+typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkQueryPoolSamplingModeINTEL    performanceCountersSampling;
+} VkQueryPoolPerformanceQueryCreateInfoINTEL;
+
+typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL;
+
+typedef struct VkPerformanceMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           marker;
+} VkPerformanceMarkerInfoINTEL;
+
+typedef struct VkPerformanceStreamMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           marker;
+} VkPerformanceStreamMarkerInfoINTEL;
+
+typedef struct VkPerformanceOverrideInfoINTEL {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkPerformanceOverrideTypeINTEL    type;
+    VkBool32                          enable;
+    uint64_t                          parameter;
+} VkPerformanceOverrideInfoINTEL;
+
+typedef struct VkPerformanceConfigurationAcquireInfoINTEL {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkPerformanceConfigurationTypeINTEL    type;
+} VkPerformanceConfigurationAcquireInfoINTEL;
+
+typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
+typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration);
+typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+#endif
+
+
+#define VK_EXT_pci_bus_info 1
+#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION  2
+#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info"
+typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           pciDomain;
+    uint32_t           pciBus;
+    uint32_t           pciDevice;
+    uint32_t           pciFunction;
+} VkPhysicalDevicePCIBusInfoPropertiesEXT;
+
+
+
+#define VK_AMD_display_native_hdr 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr"
+typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           localDimmingSupport;
+} VkDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           localDimmingEnable;
+} VkSwapchainDisplayNativeHdrCreateInfoAMD;
+
+typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+#endif
+
+
+#define VK_EXT_fragment_density_map 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2
+#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map"
+typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMap;
+    VkBool32           fragmentDensityMapDynamic;
+    VkBool32           fragmentDensityMapNonSubsampledImages;
+} VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         minFragmentDensityTexelSize;
+    VkExtent2D         maxFragmentDensityTexelSize;
+    VkBool32           fragmentDensityInvocations;
+} VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkAttachmentReference    fragmentDensityMapAttachment;
+} VkRenderPassFragmentDensityMapCreateInfoEXT;
+
+
+
+#define VK_EXT_scalar_block_layout 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout"
+typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT;
+
+
+
+#define VK_GOOGLE_hlsl_functionality1 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1"
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME
+
+
+#define VK_GOOGLE_decorate_string 1
+#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1
+#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string"
+
+
+#define VK_EXT_subgroup_size_control 1
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control"
+typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT;
+
+typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT;
+
+typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_core_properties2 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2"
+
+typedef enum VkShaderCorePropertiesFlagBitsAMD {
+    VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderCorePropertiesFlagBitsAMD;
+typedef VkFlags VkShaderCorePropertiesFlagsAMD;
+typedef struct VkPhysicalDeviceShaderCoreProperties2AMD {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkShaderCorePropertiesFlagsAMD    shaderCoreFeatures;
+    uint32_t                          activeComputeUnitCount;
+} VkPhysicalDeviceShaderCoreProperties2AMD;
+
+
+
+#define VK_AMD_device_coherent_memory 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory"
+typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceCoherentMemory;
+} VkPhysicalDeviceCoherentMemoryFeaturesAMD;
+
+
+
+#define VK_EXT_shader_image_atomic_int64 1
+#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1
+#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64"
+typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderImageInt64Atomics;
+    VkBool32           sparseImageInt64Atomics;
+} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+
+
+#define VK_EXT_memory_budget 1
+#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1
+#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget"
+typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       heapBudget[VK_MAX_MEMORY_HEAPS];
+    VkDeviceSize       heapUsage[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryBudgetPropertiesEXT;
+
+
+
+#define VK_EXT_memory_priority 1
+#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1
+#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority"
+typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           memoryPriority;
+} VkPhysicalDeviceMemoryPriorityFeaturesEXT;
+
+typedef struct VkMemoryPriorityAllocateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    float              priority;
+} VkMemoryPriorityAllocateInfoEXT;
+
+
+
+#define VK_NV_dedicated_allocation_image_aliasing 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing"
+typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dedicatedAllocationImageAliasing;
+} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+
+
+#define VK_EXT_buffer_device_address 1
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address"
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT;
+
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT;
+
+typedef struct VkBufferDeviceAddressCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceAddress    deviceAddress;
+} VkBufferDeviceAddressCreateInfoEXT;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+#endif
+
+
+#define VK_EXT_tooling_info 1
+#define VK_EXT_TOOLING_INFO_SPEC_VERSION  1
+#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info"
+typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT;
+
+typedef VkToolPurposeFlags VkToolPurposeFlagsEXT;
+
+typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolProperties*             pToolProperties);
+#endif
+
+
+#define VK_EXT_separate_stencil_usage 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage"
+typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT;
+
+
+
+#define VK_EXT_validation_features 1
+#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5
+#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features"
+
+typedef enum VkValidationFeatureEnableEXT {
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0,
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1,
+    VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2,
+    VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3,
+    VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4,
+    VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureEnableEXT;
+
+typedef enum VkValidationFeatureDisableEXT {
+    VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0,
+    VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1,
+    VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2,
+    VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3,
+    VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4,
+    VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5,
+    VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6,
+    VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7,
+    VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureDisableEXT;
+typedef struct VkValidationFeaturesEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    uint32_t                                enabledValidationFeatureCount;
+    const VkValidationFeatureEnableEXT*     pEnabledValidationFeatures;
+    uint32_t                                disabledValidationFeatureCount;
+    const VkValidationFeatureDisableEXT*    pDisabledValidationFeatures;
+} VkValidationFeaturesEXT;
+
+
+
+#define VK_NV_cooperative_matrix 1
+#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1
+#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix"
+
+typedef enum VkComponentTypeNV {
+    VK_COMPONENT_TYPE_FLOAT16_NV = 0,
+    VK_COMPONENT_TYPE_FLOAT32_NV = 1,
+    VK_COMPONENT_TYPE_FLOAT64_NV = 2,
+    VK_COMPONENT_TYPE_SINT8_NV = 3,
+    VK_COMPONENT_TYPE_SINT16_NV = 4,
+    VK_COMPONENT_TYPE_SINT32_NV = 5,
+    VK_COMPONENT_TYPE_SINT64_NV = 6,
+    VK_COMPONENT_TYPE_UINT8_NV = 7,
+    VK_COMPONENT_TYPE_UINT16_NV = 8,
+    VK_COMPONENT_TYPE_UINT32_NV = 9,
+    VK_COMPONENT_TYPE_UINT64_NV = 10,
+    VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkComponentTypeNV;
+
+typedef enum VkScopeNV {
+    VK_SCOPE_DEVICE_NV = 1,
+    VK_SCOPE_WORKGROUP_NV = 2,
+    VK_SCOPE_SUBGROUP_NV = 3,
+    VK_SCOPE_QUEUE_FAMILY_NV = 5,
+    VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkScopeNV;
+typedef struct VkCooperativeMatrixPropertiesNV {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             MSize;
+    uint32_t             NSize;
+    uint32_t             KSize;
+    VkComponentTypeNV    AType;
+    VkComponentTypeNV    BType;
+    VkComponentTypeNV    CType;
+    VkComponentTypeNV    DType;
+    VkScopeNV            scope;
+} VkCooperativeMatrixPropertiesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cooperativeMatrix;
+    VkBool32           cooperativeMatrixRobustBufferAccess;
+} VkPhysicalDeviceCooperativeMatrixFeaturesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    cooperativeMatrixSupportedStages;
+} VkPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+#endif
+
+
+#define VK_NV_coverage_reduction_mode 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode"
+
+typedef enum VkCoverageReductionModeNV {
+    VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0,
+    VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1,
+    VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageReductionModeNV;
+typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV;
+typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           coverageReductionMode;
+} VkPhysicalDeviceCoverageReductionModeFeaturesNV;
+
+typedef struct VkPipelineCoverageReductionStateCreateInfoNV {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCoverageReductionStateCreateFlagsNV    flags;
+    VkCoverageReductionModeNV                        coverageReductionMode;
+} VkPipelineCoverageReductionStateCreateInfoNV;
+
+typedef struct VkFramebufferMixedSamplesCombinationNV {
+    VkStructureType              sType;
+    void*                        pNext;
+    VkCoverageReductionModeNV    coverageReductionMode;
+    VkSampleCountFlagBits        rasterizationSamples;
+    VkSampleCountFlags           depthStencilSamples;
+    VkSampleCountFlags           colorSamples;
+} VkFramebufferMixedSamplesCombinationNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+#endif
+
+
+#define VK_EXT_fragment_shader_interlock 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock"
+typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderSampleInterlock;
+    VkBool32           fragmentShaderPixelInterlock;
+    VkBool32           fragmentShaderShadingRateInterlock;
+} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+
+
+#define VK_EXT_ycbcr_image_arrays 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays"
+typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           ycbcrImageArrays;
+} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+
+
+#define VK_EXT_provoking_vertex 1
+#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1
+#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex"
+
+typedef enum VkProvokingVertexModeEXT {
+    VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0,
+    VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1,
+    VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkProvokingVertexModeEXT;
+typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           provokingVertexLast;
+    VkBool32           transformFeedbackPreservesProvokingVertex;
+} VkPhysicalDeviceProvokingVertexFeaturesEXT;
+
+typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           provokingVertexModePerPipeline;
+    VkBool32           transformFeedbackPreservesTriangleFanProvokingVertex;
+} VkPhysicalDeviceProvokingVertexPropertiesEXT;
+
+typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkProvokingVertexModeEXT    provokingVertexMode;
+} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+
+
+#define VK_EXT_headless_surface 1
+#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1
+#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface"
+typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT;
+typedef struct VkHeadlessSurfaceCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkHeadlessSurfaceCreateFlagsEXT    flags;
+} VkHeadlessSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_EXT_line_rasterization 1
+#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization"
+
+typedef enum VkLineRasterizationModeEXT {
+    VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1,
+    VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3,
+    VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkLineRasterizationModeEXT;
+typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rectangularLines;
+    VkBool32           bresenhamLines;
+    VkBool32           smoothLines;
+    VkBool32           stippledRectangularLines;
+    VkBool32           stippledBresenhamLines;
+    VkBool32           stippledSmoothLines;
+} VkPhysicalDeviceLineRasterizationFeaturesEXT;
+
+typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           lineSubPixelPrecisionBits;
+} VkPhysicalDeviceLineRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationLineStateCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkLineRasterizationModeEXT    lineRasterizationMode;
+    VkBool32                      stippledLineEnable;
+    uint32_t                      lineStippleFactor;
+    uint16_t                      lineStipplePattern;
+} VkPipelineRasterizationLineStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+#endif
+
+
+#define VK_EXT_shader_atomic_float 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float"
+typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferFloat32Atomics;
+    VkBool32           shaderBufferFloat32AtomicAdd;
+    VkBool32           shaderBufferFloat64Atomics;
+    VkBool32           shaderBufferFloat64AtomicAdd;
+    VkBool32           shaderSharedFloat32Atomics;
+    VkBool32           shaderSharedFloat32AtomicAdd;
+    VkBool32           shaderSharedFloat64Atomics;
+    VkBool32           shaderSharedFloat64AtomicAdd;
+    VkBool32           shaderImageFloat32Atomics;
+    VkBool32           shaderImageFloat32AtomicAdd;
+    VkBool32           sparseImageFloat32Atomics;
+    VkBool32           sparseImageFloat32AtomicAdd;
+} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+
+
+#define VK_EXT_host_query_reset 1
+#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1
+#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset"
+typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+#endif
+
+
+#define VK_EXT_index_type_uint8 1
+#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1
+#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8"
+typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           indexTypeUint8;
+} VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+
+
+#define VK_EXT_extended_dynamic_state 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state"
+typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState;
+} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCullModeFlags                             cullMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkFrontFace                                 frontFace);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPrimitiveTopology                         primitiveTopology);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes,
+    const VkDeviceSize*                         pStrides);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthWriteEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCompareOp                                 depthCompareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBoundsTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stencilTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    VkStencilOp                                 failOp,
+    VkStencilOp                                 passOp,
+    VkStencilOp                                 depthFailOp,
+    VkCompareOp                                 compareOp);
+#endif
+
+
+#define VK_EXT_shader_atomic_float2 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2"
+typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferFloat16Atomics;
+    VkBool32           shaderBufferFloat16AtomicAdd;
+    VkBool32           shaderBufferFloat16AtomicMinMax;
+    VkBool32           shaderBufferFloat32AtomicMinMax;
+    VkBool32           shaderBufferFloat64AtomicMinMax;
+    VkBool32           shaderSharedFloat16Atomics;
+    VkBool32           shaderSharedFloat16AtomicAdd;
+    VkBool32           shaderSharedFloat16AtomicMinMax;
+    VkBool32           shaderSharedFloat32AtomicMinMax;
+    VkBool32           shaderSharedFloat64AtomicMinMax;
+    VkBool32           shaderImageFloat32AtomicMinMax;
+    VkBool32           sparseImageFloat32AtomicMinMax;
+} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+
+
+#define VK_EXT_surface_maintenance1 1
+#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1"
+
+typedef enum VkPresentScalingFlagBitsEXT {
+    VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001,
+    VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002,
+    VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004,
+    VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPresentScalingFlagBitsEXT;
+typedef VkFlags VkPresentScalingFlagsEXT;
+
+typedef enum VkPresentGravityFlagBitsEXT {
+    VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001,
+    VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002,
+    VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004,
+    VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPresentGravityFlagBitsEXT;
+typedef VkFlags VkPresentGravityFlagsEXT;
+typedef struct VkSurfacePresentModeEXT {
+    VkStructureType     sType;
+    void*               pNext;
+    VkPresentModeKHR    presentMode;
+} VkSurfacePresentModeEXT;
+
+typedef struct VkSurfacePresentScalingCapabilitiesEXT {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPresentScalingFlagsEXT    supportedPresentScaling;
+    VkPresentGravityFlagsEXT    supportedPresentGravityX;
+    VkPresentGravityFlagsEXT    supportedPresentGravityY;
+    VkExtent2D                  minScaledImageExtent;
+    VkExtent2D                  maxScaledImageExtent;
+} VkSurfacePresentScalingCapabilitiesEXT;
+
+typedef struct VkSurfacePresentModeCompatibilityEXT {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             presentModeCount;
+    VkPresentModeKHR*    pPresentModes;
+} VkSurfacePresentModeCompatibilityEXT;
+
+
+
+#define VK_EXT_swapchain_maintenance1 1
+#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1"
+typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           swapchainMaintenance1;
+} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+
+typedef struct VkSwapchainPresentFenceInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           swapchainCount;
+    const VkFence*     pFences;
+} VkSwapchainPresentFenceInfoEXT;
+
+typedef struct VkSwapchainPresentModesCreateInfoEXT {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint32_t                   presentModeCount;
+    const VkPresentModeKHR*    pPresentModes;
+} VkSwapchainPresentModesCreateInfoEXT;
+
+typedef struct VkSwapchainPresentModeInfoEXT {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint32_t                   swapchainCount;
+    const VkPresentModeKHR*    pPresentModes;
+} VkSwapchainPresentModeInfoEXT;
+
+typedef struct VkSwapchainPresentScalingCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkPresentScalingFlagsEXT    scalingBehavior;
+    VkPresentGravityFlagsEXT    presentGravityX;
+    VkPresentGravityFlagsEXT    presentGravityY;
+} VkSwapchainPresentScalingCreateInfoEXT;
+
+typedef struct VkReleaseSwapchainImagesInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndexCount;
+    const uint32_t*    pImageIndices;
+} VkReleaseSwapchainImagesInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT(
+    VkDevice                                    device,
+    const VkReleaseSwapchainImagesInfoEXT*      pReleaseInfo);
+#endif
+
+
+#define VK_EXT_shader_demote_to_helper_invocation 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation"
+typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+
+
+
+#define VK_NV_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV)
+#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3
+#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands"
+
+typedef enum VkIndirectCommandsTokenTypeNV {
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNV;
+
+typedef enum VkIndirectStateFlagBitsNV {
+    VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001,
+    VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectStateFlagBitsNV;
+typedef VkFlags VkIndirectStateFlagsNV;
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV {
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNV;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV;
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxGraphicsShaderGroupCount;
+    uint32_t           maxIndirectSequenceCount;
+    uint32_t           maxIndirectCommandsTokenCount;
+    uint32_t           maxIndirectCommandsStreamCount;
+    uint32_t           maxIndirectCommandsTokenOffset;
+    uint32_t           maxIndirectCommandsStreamStride;
+    uint32_t           minSequencesCountBufferOffsetAlignment;
+    uint32_t           minSequencesIndexBufferOffsetAlignment;
+    uint32_t           minIndirectCommandsBufferOffsetAlignment;
+} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceGeneratedCommands;
+} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+typedef struct VkGraphicsShaderGroupCreateInfoNV {
+    VkStructureType                                 sType;
+    const void*                                     pNext;
+    uint32_t                                        stageCount;
+    const VkPipelineShaderStageCreateInfo*          pStages;
+    const VkPipelineVertexInputStateCreateInfo*     pVertexInputState;
+    const VkPipelineTessellationStateCreateInfo*    pTessellationState;
+} VkGraphicsShaderGroupCreateInfoNV;
+
+typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    groupCount;
+    const VkGraphicsShaderGroupCreateInfoNV*    pGroups;
+    uint32_t                                    pipelineCount;
+    const VkPipeline*                           pPipelines;
+} VkGraphicsPipelineShaderGroupsCreateInfoNV;
+
+typedef struct VkBindShaderGroupIndirectCommandNV {
+    uint32_t    groupIndex;
+} VkBindShaderGroupIndirectCommandNV;
+
+typedef struct VkBindIndexBufferIndirectCommandNV {
+    VkDeviceAddress    bufferAddress;
+    uint32_t           size;
+    VkIndexType        indexType;
+} VkBindIndexBufferIndirectCommandNV;
+
+typedef struct VkBindVertexBufferIndirectCommandNV {
+    VkDeviceAddress    bufferAddress;
+    uint32_t           size;
+    uint32_t           stride;
+} VkBindVertexBufferIndirectCommandNV;
+
+typedef struct VkSetStateFlagsIndirectCommandNV {
+    uint32_t    data;
+} VkSetStateFlagsIndirectCommandNV;
+
+typedef struct VkIndirectCommandsStreamNV {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+} VkIndirectCommandsStreamNV;
+
+typedef struct VkIndirectCommandsLayoutTokenNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkIndirectCommandsTokenTypeNV    tokenType;
+    uint32_t                         stream;
+    uint32_t                         offset;
+    uint32_t                         vertexBindingUnit;
+    VkBool32                         vertexDynamicStride;
+    VkPipelineLayout                 pushconstantPipelineLayout;
+    VkShaderStageFlags               pushconstantShaderStageFlags;
+    uint32_t                         pushconstantOffset;
+    uint32_t                         pushconstantSize;
+    VkIndirectStateFlagsNV           indirectStateFlags;
+    uint32_t                         indexTypeCount;
+    const VkIndexType*               pIndexTypes;
+    const uint32_t*                  pIndexTypeValues;
+} VkIndirectCommandsLayoutTokenNV;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNV {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkIndirectCommandsLayoutUsageFlagsNV      flags;
+    VkPipelineBindPoint                       pipelineBindPoint;
+    uint32_t                                  tokenCount;
+    const VkIndirectCommandsLayoutTokenNV*    pTokens;
+    uint32_t                                  streamCount;
+    const uint32_t*                           pStreamStrides;
+} VkIndirectCommandsLayoutCreateInfoNV;
+
+typedef struct VkGeneratedCommandsInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineBindPoint                  pipelineBindPoint;
+    VkPipeline                           pipeline;
+    VkIndirectCommandsLayoutNV           indirectCommandsLayout;
+    uint32_t                             streamCount;
+    const VkIndirectCommandsStreamNV*    pStreams;
+    uint32_t                             sequencesCount;
+    VkBuffer                             preprocessBuffer;
+    VkDeviceSize                         preprocessOffset;
+    VkDeviceSize                         preprocessSize;
+    VkBuffer                             sequencesCountBuffer;
+    VkDeviceSize                         sequencesCountOffset;
+    VkBuffer                             sequencesIndexBuffer;
+    VkDeviceSize                         sequencesIndexOffset;
+} VkGeneratedCommandsInfoNV;
+
+typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineBindPoint           pipelineBindPoint;
+    VkPipeline                    pipeline;
+    VkIndirectCommandsLayoutNV    indirectCommandsLayout;
+    uint32_t                      maxSequencesCount;
+} VkGeneratedCommandsMemoryRequirementsInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkGeneratedCommandsInfoNV*            pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    isPreprocessed,
+    const VkGeneratedCommandsInfoNV*            pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline,
+    uint32_t                                    groupIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNV*                 pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNV                  indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+#endif
+
+
+#define VK_NV_inherited_viewport_scissor 1
+#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1
+#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor"
+typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           inheritedViewportScissor2D;
+} VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
+
+typedef struct VkCommandBufferInheritanceViewportScissorInfoNV {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             viewportScissor2D;
+    uint32_t             viewportDepthCount;
+    const VkViewport*    pViewportDepths;
+} VkCommandBufferInheritanceViewportScissorInfoNV;
+
+
+
+#define VK_EXT_texel_buffer_alignment 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment"
+typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           texelBufferAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+
+
+
+#define VK_QCOM_render_pass_transform 1
+#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3
+#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform"
+typedef struct VkRenderPassTransformBeginInfoQCOM {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+} VkRenderPassTransformBeginInfoQCOM;
+
+typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+    VkRect2D                         renderArea;
+} VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+
+
+#define VK_EXT_device_memory_report 1
+#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2
+#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report"
+
+typedef enum VkDeviceMemoryReportEventTypeEXT {
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceMemoryReportEventTypeEXT;
+typedef VkFlags VkDeviceMemoryReportFlagsEXT;
+typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceMemoryReport;
+} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+typedef struct VkDeviceMemoryReportCallbackDataEXT {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkDeviceMemoryReportFlagsEXT        flags;
+    VkDeviceMemoryReportEventTypeEXT    type;
+    uint64_t                            memoryObjectId;
+    VkDeviceSize                        size;
+    VkObjectType                        objectType;
+    uint64_t                            objectHandle;
+    uint32_t                            heapIndex;
+} VkDeviceMemoryReportCallbackDataEXT;
+
+typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)(
+    const VkDeviceMemoryReportCallbackDataEXT*  pCallbackData,
+    void*                                       pUserData);
+
+typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDeviceMemoryReportFlagsEXT           flags;
+    PFN_vkDeviceMemoryReportCallbackEXT    pfnUserCallback;
+    void*                                  pUserData;
+} VkDeviceDeviceMemoryReportCreateInfoEXT;
+
+
+
+#define VK_EXT_acquire_drm_display 1
+#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    int32_t                                     drmFd,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    int32_t                                     drmFd,
+    uint32_t                                    connectorId,
+    VkDisplayKHR*                               display);
+#endif
+
+
+#define VK_EXT_robustness2 1
+#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION  1
+#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2"
+typedef struct VkPhysicalDeviceRobustness2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustBufferAccess2;
+    VkBool32           robustImageAccess2;
+    VkBool32           nullDescriptor;
+} VkPhysicalDeviceRobustness2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceRobustness2PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       robustStorageBufferAccessSizeAlignment;
+    VkDeviceSize       robustUniformBufferAccessSizeAlignment;
+} VkPhysicalDeviceRobustness2PropertiesEXT;
+
+
+
+#define VK_EXT_custom_border_color 1
+#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12
+#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color"
+typedef struct VkSamplerCustomBorderColorCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkClearColorValue    customBorderColor;
+    VkFormat             format;
+} VkSamplerCustomBorderColorCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxCustomBorderColorSamplers;
+} VkPhysicalDeviceCustomBorderColorPropertiesEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           customBorderColors;
+    VkBool32           customBorderColorWithoutFormat;
+} VkPhysicalDeviceCustomBorderColorFeaturesEXT;
+
+
+
+#define VK_GOOGLE_user_type 1
+#define VK_GOOGLE_USER_TYPE_SPEC_VERSION  1
+#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+
+
+#define VK_NV_present_barrier 1
+#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1
+#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier"
+typedef struct VkPhysicalDevicePresentBarrierFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrier;
+} VkPhysicalDevicePresentBarrierFeaturesNV;
+
+typedef struct VkSurfaceCapabilitiesPresentBarrierNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrierSupported;
+} VkSurfaceCapabilitiesPresentBarrierNV;
+
+typedef struct VkSwapchainPresentBarrierCreateInfoNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrierEnable;
+} VkSwapchainPresentBarrierCreateInfoNV;
+
+
+
+#define VK_EXT_private_data 1
+typedef VkPrivateDataSlot VkPrivateDataSlotEXT;
+
+#define VK_EXT_PRIVATE_DATA_SPEC_VERSION  1
+#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data"
+typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT;
+
+typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT;
+
+typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT;
+
+typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot);
+typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data);
+typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT(
+    VkDevice                                    device,
+    const VkPrivateDataSlotCreateInfo*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPrivateDataSlot*                          pPrivateDataSlot);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT(
+    VkDevice                                    device,
+    VkPrivateDataSlot                           privateDataSlot,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t*                                   pData);
+#endif
+
+
+#define VK_EXT_pipeline_creation_cache_control 1
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control"
+typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+
+
+
+#define VK_NV_device_diagnostics_config 1
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config"
+
+typedef enum VkDeviceDiagnosticsConfigFlagBitsNV {
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkDeviceDiagnosticsConfigFlagBitsNV;
+typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV;
+typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           diagnosticsConfig;
+} VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceDiagnosticsConfigFlagsNV    flags;
+} VkDeviceDiagnosticsConfigCreateInfoNV;
+
+
+
+#define VK_QCOM_render_pass_store_ops 1
+#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2
+#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
+
+
+#define VK_EXT_descriptor_buffer 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR)
+#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1
+#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer"
+typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           combinedImageSamplerDescriptorSingleArray;
+    VkBool32           bufferlessPushDescriptors;
+    VkBool32           allowSamplerImageViewPostSubmitCreation;
+    VkDeviceSize       descriptorBufferOffsetAlignment;
+    uint32_t           maxDescriptorBufferBindings;
+    uint32_t           maxResourceDescriptorBufferBindings;
+    uint32_t           maxSamplerDescriptorBufferBindings;
+    uint32_t           maxEmbeddedImmutableSamplerBindings;
+    uint32_t           maxEmbeddedImmutableSamplers;
+    size_t             bufferCaptureReplayDescriptorDataSize;
+    size_t             imageCaptureReplayDescriptorDataSize;
+    size_t             imageViewCaptureReplayDescriptorDataSize;
+    size_t             samplerCaptureReplayDescriptorDataSize;
+    size_t             accelerationStructureCaptureReplayDescriptorDataSize;
+    size_t             samplerDescriptorSize;
+    size_t             combinedImageSamplerDescriptorSize;
+    size_t             sampledImageDescriptorSize;
+    size_t             storageImageDescriptorSize;
+    size_t             uniformTexelBufferDescriptorSize;
+    size_t             robustUniformTexelBufferDescriptorSize;
+    size_t             storageTexelBufferDescriptorSize;
+    size_t             robustStorageTexelBufferDescriptorSize;
+    size_t             uniformBufferDescriptorSize;
+    size_t             robustUniformBufferDescriptorSize;
+    size_t             storageBufferDescriptorSize;
+    size_t             robustStorageBufferDescriptorSize;
+    size_t             inputAttachmentDescriptorSize;
+    size_t             accelerationStructureDescriptorSize;
+    VkDeviceSize       maxSamplerDescriptorBufferRange;
+    VkDeviceSize       maxResourceDescriptorBufferRange;
+    VkDeviceSize       samplerDescriptorBufferAddressSpaceSize;
+    VkDeviceSize       resourceDescriptorBufferAddressSpaceSize;
+    VkDeviceSize       descriptorBufferAddressSpaceSize;
+} VkPhysicalDeviceDescriptorBufferPropertiesEXT;
+
+typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    size_t             combinedImageSamplerDensityMapDescriptorSize;
+} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+
+typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           descriptorBuffer;
+    VkBool32           descriptorBufferCaptureReplay;
+    VkBool32           descriptorBufferImageLayoutIgnored;
+    VkBool32           descriptorBufferPushDescriptors;
+} VkPhysicalDeviceDescriptorBufferFeaturesEXT;
+
+typedef struct VkDescriptorAddressInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceAddress    address;
+    VkDeviceSize       range;
+    VkFormat           format;
+} VkDescriptorAddressInfoEXT;
+
+typedef struct VkDescriptorBufferBindingInfoEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkDeviceAddress       address;
+    VkBufferUsageFlags    usage;
+} VkDescriptorBufferBindingInfoEXT;
+
+typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBuffer           buffer;
+} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT;
+
+typedef union VkDescriptorDataEXT {
+    const VkSampler*                     pSampler;
+    const VkDescriptorImageInfo*         pCombinedImageSampler;
+    const VkDescriptorImageInfo*         pInputAttachmentImage;
+    const VkDescriptorImageInfo*         pSampledImage;
+    const VkDescriptorImageInfo*         pStorageImage;
+    const VkDescriptorAddressInfoEXT*    pUniformTexelBuffer;
+    const VkDescriptorAddressInfoEXT*    pStorageTexelBuffer;
+    const VkDescriptorAddressInfoEXT*    pUniformBuffer;
+    const VkDescriptorAddressInfoEXT*    pStorageBuffer;
+    VkDeviceAddress                      accelerationStructure;
+} VkDescriptorDataEXT;
+
+typedef struct VkDescriptorGetInfoEXT {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkDescriptorType       type;
+    VkDescriptorDataEXT    data;
+} VkDescriptorGetInfoEXT;
+
+typedef struct VkBufferCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferCaptureDescriptorDataInfoEXT;
+
+typedef struct VkImageCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageCaptureDescriptorDataInfoEXT;
+
+typedef struct VkImageViewCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+} VkImageViewCaptureDescriptorDataInfoEXT;
+
+typedef struct VkSamplerCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSampler          sampler;
+} VkSamplerCaptureDescriptorDataInfoEXT;
+
+typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const void*        opaqueCaptureDescriptorData;
+} VkOpaqueCaptureDescriptorDataCreateInfoEXT;
+
+typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkAccelerationStructureKHR    accelerationStructure;
+    VkAccelerationStructureNV     accelerationStructureNV;
+} VkAccelerationStructureCaptureDescriptorDataInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set);
+typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       layout,
+    VkDeviceSize*                               pLayoutSizeInBytes);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       layout,
+    uint32_t                                    binding,
+    VkDeviceSize*                               pOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT(
+    VkDevice                                    device,
+    const VkDescriptorGetInfoEXT*               pDescriptorInfo,
+    size_t                                      dataSize,
+    void*                                       pDescriptor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    bufferCount,
+    const VkDescriptorBufferBindingInfoEXT*     pBindingInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    setCount,
+    const uint32_t*                             pBufferIndices,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkBufferCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkImageCaptureDescriptorDataInfoEXT*  pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkImageViewCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkSamplerCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+#endif
+
+
+#define VK_EXT_graphics_pipeline_library 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library"
+
+typedef enum VkGraphicsPipelineLibraryFlagBitsEXT {
+    VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001,
+    VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkGraphicsPipelineLibraryFlagBitsEXT;
+typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT;
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           graphicsPipelineLibrary;
+} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           graphicsPipelineLibraryFastLinking;
+    VkBool32           graphicsPipelineLibraryIndependentInterpolationDecoration;
+} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+typedef struct VkGraphicsPipelineLibraryCreateInfoEXT {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkGraphicsPipelineLibraryFlagsEXT    flags;
+} VkGraphicsPipelineLibraryCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_early_and_late_fragment_tests 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests"
+typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderEarlyAndLateFragmentTests;
+} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+
+
+#define VK_NV_fragment_shading_rate_enums 1
+#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums"
+
+typedef enum VkFragmentShadingRateTypeNV {
+    VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0,
+    VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1,
+    VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkFragmentShadingRateTypeNV;
+
+typedef enum VkFragmentShadingRateNV {
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10,
+    VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11,
+    VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12,
+    VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13,
+    VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14,
+    VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15,
+    VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkFragmentShadingRateNV;
+typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShadingRateEnums;
+    VkBool32           supersampleFragmentShadingRates;
+    VkBool32           noInvocationFragmentShadingRates;
+} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkSampleCountFlagBits    maxFragmentShadingRateInvocationCount;
+} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+
+typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkFragmentShadingRateTypeNV           shadingRateType;
+    VkFragmentShadingRateNV               shadingRate;
+    VkFragmentShadingRateCombinerOpKHR    combinerOps[2];
+} VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer           commandBuffer, VkFragmentShadingRateNV                     shadingRate, const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV(
+    VkCommandBuffer                             commandBuffer,
+    VkFragmentShadingRateNV                     shadingRate,
+    const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+#endif
+
+
+#define VK_NV_ray_tracing_motion_blur 1
+#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1
+#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur"
+
+typedef enum VkAccelerationStructureMotionInstanceTypeNV {
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureMotionInstanceTypeNV;
+typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV;
+typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV;
+typedef union VkDeviceOrHostAddressConstKHR {
+    VkDeviceAddress    deviceAddress;
+    const void*        hostAddress;
+} VkDeviceOrHostAddressConstKHR;
+
+typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    vertexData;
+} VkAccelerationStructureGeometryMotionTrianglesDataNV;
+
+typedef struct VkAccelerationStructureMotionInfoNV {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    maxInstances;
+    VkAccelerationStructureMotionInfoFlagsNV    flags;
+} VkAccelerationStructureMotionInfoNV;
+
+typedef struct VkAccelerationStructureMatrixMotionInstanceNV {
+    VkTransformMatrixKHR          transformT0;
+    VkTransformMatrixKHR          transformT1;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureMatrixMotionInstanceNV;
+
+typedef struct VkSRTDataNV {
+    float    sx;
+    float    a;
+    float    b;
+    float    pvx;
+    float    sy;
+    float    c;
+    float    pvy;
+    float    sz;
+    float    pvz;
+    float    qx;
+    float    qy;
+    float    qz;
+    float    qw;
+    float    tx;
+    float    ty;
+    float    tz;
+} VkSRTDataNV;
+
+typedef struct VkAccelerationStructureSRTMotionInstanceNV {
+    VkSRTDataNV                   transformT0;
+    VkSRTDataNV                   transformT1;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureSRTMotionInstanceNV;
+
+typedef union VkAccelerationStructureMotionInstanceDataNV {
+    VkAccelerationStructureInstanceKHR               staticInstance;
+    VkAccelerationStructureMatrixMotionInstanceNV    matrixMotionInstance;
+    VkAccelerationStructureSRTMotionInstanceNV       srtMotionInstance;
+} VkAccelerationStructureMotionInstanceDataNV;
+
+typedef struct VkAccelerationStructureMotionInstanceNV {
+    VkAccelerationStructureMotionInstanceTypeNV     type;
+    VkAccelerationStructureMotionInstanceFlagsNV    flags;
+    VkAccelerationStructureMotionInstanceDataNV     data;
+} VkAccelerationStructureMotionInstanceNV;
+
+typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingMotionBlur;
+    VkBool32           rayTracingMotionBlurPipelineTraceRaysIndirect;
+} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
+
+
+
+#define VK_EXT_ycbcr_2plane_444_formats 1
+#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1
+#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats"
+typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           ycbcr2plane444Formats;
+} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+
+
+#define VK_EXT_fragment_density_map2 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2"
+typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMapDeferred;
+} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subsampledLoads;
+    VkBool32           subsampledCoarseReconstructionEarlyAccess;
+    uint32_t           maxSubsampledArrayLayers;
+    uint32_t           maxDescriptorSetSubsampledSamplers;
+} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+
+
+#define VK_QCOM_rotated_copy_commands 1
+#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1
+#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands"
+typedef struct VkCopyCommandTransformInfoQCOM {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+} VkCopyCommandTransformInfoQCOM;
+
+
+
+#define VK_EXT_image_robustness 1
+#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness"
+typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT;
+
+
+
+#define VK_EXT_image_compression_control 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control"
+
+typedef enum VkImageCompressionFlagBitsEXT {
+    VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002,
+    VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004,
+    VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFlagBitsEXT;
+typedef VkFlags VkImageCompressionFlagsEXT;
+
+typedef enum VkImageCompressionFixedRateFlagBitsEXT {
+    VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFixedRateFlagBitsEXT;
+typedef VkFlags VkImageCompressionFixedRateFlagsEXT;
+typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageCompressionControl;
+} VkPhysicalDeviceImageCompressionControlFeaturesEXT;
+
+typedef struct VkImageCompressionControlEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImageCompressionFlagsEXT              flags;
+    uint32_t                                compressionControlPlaneCount;
+    VkImageCompressionFixedRateFlagsEXT*    pFixedRateFlags;
+} VkImageCompressionControlEXT;
+
+typedef struct VkSubresourceLayout2EXT {
+    VkStructureType        sType;
+    void*                  pNext;
+    VkSubresourceLayout    subresourceLayout;
+} VkSubresourceLayout2EXT;
+
+typedef struct VkImageSubresource2EXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkImageSubresource    imageSubresource;
+} VkImageSubresource2EXT;
+
+typedef struct VkImageCompressionPropertiesEXT {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkImageCompressionFlagsEXT             imageCompressionFlags;
+    VkImageCompressionFixedRateFlagsEXT    imageCompressionFixedRateFlags;
+} VkImageCompressionPropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource2EXT*               pSubresource,
+    VkSubresourceLayout2EXT*                    pLayout);
+#endif
+
+
+#define VK_EXT_attachment_feedback_loop_layout 1
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout"
+typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           attachmentFeedbackLoopLayout;
+} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+
+
+#define VK_EXT_4444_formats 1
+#define VK_EXT_4444_FORMATS_SPEC_VERSION  1
+#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats"
+typedef struct VkPhysicalDevice4444FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           formatA4R4G4B4;
+    VkBool32           formatA4B4G4R4;
+} VkPhysicalDevice4444FormatsFeaturesEXT;
+
+
+
+#define VK_EXT_device_fault 1
+#define VK_EXT_DEVICE_FAULT_SPEC_VERSION  1
+#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault"
+
+typedef enum VkDeviceFaultAddressTypeEXT {
+    VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultAddressTypeEXT;
+
+typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT {
+    VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1,
+    VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultVendorBinaryHeaderVersionEXT;
+typedef struct VkPhysicalDeviceFaultFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceFault;
+    VkBool32           deviceFaultVendorBinary;
+} VkPhysicalDeviceFaultFeaturesEXT;
+
+typedef struct VkDeviceFaultCountsEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           addressInfoCount;
+    uint32_t           vendorInfoCount;
+    VkDeviceSize       vendorBinarySize;
+} VkDeviceFaultCountsEXT;
+
+typedef struct VkDeviceFaultAddressInfoEXT {
+    VkDeviceFaultAddressTypeEXT    addressType;
+    VkDeviceAddress                reportedAddress;
+    VkDeviceSize                   addressPrecision;
+} VkDeviceFaultAddressInfoEXT;
+
+typedef struct VkDeviceFaultVendorInfoEXT {
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+    uint64_t    vendorFaultCode;
+    uint64_t    vendorFaultData;
+} VkDeviceFaultVendorInfoEXT;
+
+typedef struct VkDeviceFaultInfoEXT {
+    VkStructureType                 sType;
+    void*                           pNext;
+    char                            description[VK_MAX_DESCRIPTION_SIZE];
+    VkDeviceFaultAddressInfoEXT*    pAddressInfos;
+    VkDeviceFaultVendorInfoEXT*     pVendorInfos;
+    void*                           pVendorBinaryData;
+} VkDeviceFaultInfoEXT;
+
+typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT {
+    uint32_t                                     headerSize;
+    VkDeviceFaultVendorBinaryHeaderVersionEXT    headerVersion;
+    uint32_t                                     vendorID;
+    uint32_t                                     deviceID;
+    uint32_t                                     driverVersion;
+    uint8_t                                      pipelineCacheUUID[VK_UUID_SIZE];
+    uint32_t                                     applicationNameOffset;
+    uint32_t                                     applicationVersion;
+    uint32_t                                     engineNameOffset;
+} VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT(
+    VkDevice                                    device,
+    VkDeviceFaultCountsEXT*                     pFaultCounts,
+    VkDeviceFaultInfoEXT*                       pFaultInfo);
+#endif
+
+
+#define VK_ARM_rasterization_order_attachment_access 1
+#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
+#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access"
+typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rasterizationOrderColorAttachmentAccess;
+    VkBool32           rasterizationOrderDepthAttachmentAccess;
+    VkBool32           rasterizationOrderStencilAttachmentAccess;
+} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+
+
+
+#define VK_EXT_rgba10x6_formats 1
+#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1
+#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats"
+typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           formatRgba10x6WithoutYCbCrSampler;
+} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+
+
+
+#define VK_VALVE_mutable_descriptor_type 1
+#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
+#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type"
+typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           mutableDescriptorType;
+} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+
+typedef struct VkMutableDescriptorTypeListEXT {
+    uint32_t                   descriptorTypeCount;
+    const VkDescriptorType*    pDescriptorTypes;
+} VkMutableDescriptorTypeListEXT;
+
+typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE;
+
+typedef struct VkMutableDescriptorTypeCreateInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    uint32_t                                 mutableDescriptorTypeListCount;
+    const VkMutableDescriptorTypeListEXT*    pMutableDescriptorTypeLists;
+} VkMutableDescriptorTypeCreateInfoEXT;
+
+typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE;
+
+
+
+#define VK_EXT_vertex_input_dynamic_state 1
+#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2
+#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state"
+typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vertexInputDynamicState;
+} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+
+typedef struct VkVertexInputBindingDescription2EXT {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+    uint32_t             divisor;
+} VkVertexInputBindingDescription2EXT;
+
+typedef struct VkVertexInputAttributeDescription2EXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           location;
+    uint32_t           binding;
+    VkFormat           format;
+    uint32_t           offset;
+} VkVertexInputAttributeDescription2EXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexBindingDescriptionCount,
+    const VkVertexInputBindingDescription2EXT*  pVertexBindingDescriptions,
+    uint32_t                                    vertexAttributeDescriptionCount,
+    const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions);
+#endif
+
+
+#define VK_EXT_physical_device_drm 1
+#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1
+#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm"
+typedef struct VkPhysicalDeviceDrmPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           hasPrimary;
+    VkBool32           hasRender;
+    int64_t            primaryMajor;
+    int64_t            primaryMinor;
+    int64_t            renderMajor;
+    int64_t            renderMinor;
+} VkPhysicalDeviceDrmPropertiesEXT;
+
+
+
+#define VK_EXT_device_address_binding_report 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report"
+
+typedef enum VkDeviceAddressBindingTypeEXT {
+    VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0,
+    VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1,
+    VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingTypeEXT;
+
+typedef enum VkDeviceAddressBindingFlagBitsEXT {
+    VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001,
+    VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingFlagBitsEXT;
+typedef VkFlags VkDeviceAddressBindingFlagsEXT;
+typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           reportAddressBinding;
+} VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+typedef struct VkDeviceAddressBindingCallbackDataEXT {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkDeviceAddressBindingFlagsEXT    flags;
+    VkDeviceAddress                   baseAddress;
+    VkDeviceSize                      size;
+    VkDeviceAddressBindingTypeEXT     bindingType;
+} VkDeviceAddressBindingCallbackDataEXT;
+
+
+
+#define VK_EXT_depth_clip_control 1
+#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control"
+typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClipControl;
+} VkPhysicalDeviceDepthClipControlFeaturesEXT;
+
+typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           negativeOneToOne;
+} VkPipelineViewportDepthClipControlCreateInfoEXT;
+
+
+
+#define VK_EXT_primitive_topology_list_restart 1
+#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1
+#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart"
+typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           primitiveTopologyListRestart;
+    VkBool32           primitiveTopologyPatchListRestart;
+} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+
+
+#define VK_HUAWEI_subpass_shading 1
+#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 2
+#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading"
+typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkRenderPass       renderPass;
+    uint32_t           subpass;
+} VkSubpassShadingPipelineCreateInfoHUAWEI;
+
+typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subpassShading;
+} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
+
+typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxSubpassShadingWorkgroupSizeAspectRatio;
+} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
+    VkDevice                                    device,
+    VkRenderPass                                renderpass,
+    VkExtent2D*                                 pMaxWorkgroupSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_HUAWEI_invocation_mask 1
+#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1
+#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask"
+typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           invocationMask;
+} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+#endif
+
+
+#define VK_NV_external_memory_rdma 1
+typedef void* VkRemoteAddressNV;
+#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma"
+typedef struct VkMemoryGetRemoteAddressInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetRemoteAddressInfoNV;
+
+typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           externalMemoryRDMA;
+} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV(
+    VkDevice                                    device,
+    const VkMemoryGetRemoteAddressInfoNV*       pMemoryGetRemoteAddressInfo,
+    VkRemoteAddressNV*                          pAddress);
+#endif
+
+
+#define VK_EXT_pipeline_properties 1
+#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties"
+typedef VkPipelineInfoKHR VkPipelineInfoEXT;
+
+typedef struct VkPipelinePropertiesIdentifierEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            pipelineIdentifier[VK_UUID_SIZE];
+} VkPipelinePropertiesIdentifierEXT;
+
+typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelinePropertiesIdentifier;
+} VkPhysicalDevicePipelinePropertiesFeaturesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT(
+    VkDevice                                    device,
+    const VkPipelineInfoEXT*                    pPipelineInfo,
+    VkBaseOutStructure*                         pPipelineProperties);
+#endif
+
+
+#define VK_EXT_multisampled_render_to_single_sampled 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled"
+typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multisampledRenderToSingleSampled;
+} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+typedef struct VkSubpassResolvePerformanceQueryEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           optimal;
+} VkSubpassResolvePerformanceQueryEXT;
+
+typedef struct VkMultisampledRenderToSingleSampledInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkBool32                 multisampledRenderToSingleSampledEnable;
+    VkSampleCountFlagBits    rasterizationSamples;
+} VkMultisampledRenderToSingleSampledInfoEXT;
+
+
+
+#define VK_EXT_extended_dynamic_state2 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2"
+typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState2;
+    VkBool32           extendedDynamicState2LogicOp;
+    VkBool32           extendedDynamicState2PatchControlPoints;
+} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    patchControlPoints);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    rasterizerDiscardEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBiasEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkLogicOp                                   logicOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    primitiveRestartEnable);
+#endif
+
+
+#define VK_EXT_color_write_enable 1
+#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1
+#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable"
+typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           colorWriteEnable;
+} VkPhysicalDeviceColorWriteEnableFeaturesEXT;
+
+typedef struct VkPipelineColorWriteCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           attachmentCount;
+    const VkBool32*    pColorWriteEnables;
+} VkPipelineColorWriteCreateInfoEXT;
+
+typedef void                                    (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer       commandBuffer, uint32_t                                attachmentCount, const VkBool32*   pColorWriteEnables);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void                                    VKAPI_CALL vkCmdSetColorWriteEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkBool32*                             pColorWriteEnables);
+#endif
+
+
+#define VK_EXT_primitives_generated_query 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query"
+typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           primitivesGeneratedQuery;
+    VkBool32           primitivesGeneratedQueryWithRasterizerDiscard;
+    VkBool32           primitivesGeneratedQueryWithNonZeroStreams;
+} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+
+
+#define VK_EXT_global_priority_query 1
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query"
+#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT   VK_MAX_GLOBAL_PRIORITY_SIZE_KHR
+typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+
+typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT;
+
+
+
+#define VK_EXT_image_view_min_lod 1
+#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1
+#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod"
+typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           minLod;
+} VkPhysicalDeviceImageViewMinLodFeaturesEXT;
+
+typedef struct VkImageViewMinLodCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    float              minLod;
+} VkImageViewMinLodCreateInfoEXT;
+
+
+
+#define VK_EXT_multi_draw 1
+#define VK_EXT_MULTI_DRAW_SPEC_VERSION    1
+#define VK_EXT_MULTI_DRAW_EXTENSION_NAME  "VK_EXT_multi_draw"
+typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiDraw;
+} VkPhysicalDeviceMultiDrawFeaturesEXT;
+
+typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiDrawCount;
+} VkPhysicalDeviceMultiDrawPropertiesEXT;
+
+typedef struct VkMultiDrawInfoEXT {
+    uint32_t    firstVertex;
+    uint32_t    vertexCount;
+} VkMultiDrawInfoEXT;
+
+typedef struct VkMultiDrawIndexedInfoEXT {
+    uint32_t    firstIndex;
+    uint32_t    indexCount;
+    int32_t     vertexOffset;
+} VkMultiDrawIndexedInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    drawCount,
+    const VkMultiDrawInfoEXT*                   pVertexInfo,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    drawCount,
+    const VkMultiDrawIndexedInfoEXT*            pIndexInfo,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    uint32_t                                    stride,
+    const int32_t*                              pVertexOffset);
+#endif
+
+
+#define VK_EXT_image_2d_view_of_3d 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d"
+typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           image2DViewOf3D;
+    VkBool32           sampler2DViewOf3D;
+} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+
+
+#define VK_EXT_opacity_micromap 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT)
+#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2
+#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap"
+
+typedef enum VkMicromapTypeEXT {
+    VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0,
+    VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapTypeEXT;
+
+typedef enum VkBuildMicromapModeEXT {
+    VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0,
+    VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapModeEXT;
+
+typedef enum VkCopyMicromapModeEXT {
+    VK_COPY_MICROMAP_MODE_CLONE_EXT = 0,
+    VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1,
+    VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2,
+    VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3,
+    VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkCopyMicromapModeEXT;
+
+typedef enum VkOpacityMicromapFormatEXT {
+    VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1,
+    VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2,
+    VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapFormatEXT;
+
+typedef enum VkOpacityMicromapSpecialIndexEXT {
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapSpecialIndexEXT;
+
+typedef enum VkAccelerationStructureCompatibilityKHR {
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCompatibilityKHR;
+
+typedef enum VkAccelerationStructureBuildTypeKHR {
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureBuildTypeKHR;
+
+typedef enum VkBuildMicromapFlagBitsEXT {
+    VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001,
+    VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002,
+    VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004,
+    VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapFlagBitsEXT;
+typedef VkFlags VkBuildMicromapFlagsEXT;
+
+typedef enum VkMicromapCreateFlagBitsEXT {
+    VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001,
+    VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapCreateFlagBitsEXT;
+typedef VkFlags VkMicromapCreateFlagsEXT;
+typedef struct VkMicromapUsageEXT {
+    uint32_t    count;
+    uint32_t    subdivisionLevel;
+    uint32_t    format;
+} VkMicromapUsageEXT;
+
+typedef union VkDeviceOrHostAddressKHR {
+    VkDeviceAddress    deviceAddress;
+    void*              hostAddress;
+} VkDeviceOrHostAddressKHR;
+
+typedef struct VkMicromapBuildInfoEXT {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkMicromapTypeEXT                   type;
+    VkBuildMicromapFlagsEXT             flags;
+    VkBuildMicromapModeEXT              mode;
+    VkMicromapEXT                       dstMicromap;
+    uint32_t                            usageCountsCount;
+    const VkMicromapUsageEXT*           pUsageCounts;
+    const VkMicromapUsageEXT* const*    ppUsageCounts;
+    VkDeviceOrHostAddressConstKHR       data;
+    VkDeviceOrHostAddressKHR            scratchData;
+    VkDeviceOrHostAddressConstKHR       triangleArray;
+    VkDeviceSize                        triangleArrayStride;
+} VkMicromapBuildInfoEXT;
+
+typedef struct VkMicromapCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkMicromapCreateFlagsEXT    createFlags;
+    VkBuffer                    buffer;
+    VkDeviceSize                offset;
+    VkDeviceSize                size;
+    VkMicromapTypeEXT           type;
+    VkDeviceAddress             deviceAddress;
+} VkMicromapCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           micromap;
+    VkBool32           micromapCaptureReplay;
+    VkBool32           micromapHostCommands;
+} VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxOpacity2StateSubdivisionLevel;
+    uint32_t           maxOpacity4StateSubdivisionLevel;
+} VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+typedef struct VkMicromapVersionInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const uint8_t*     pVersionData;
+} VkMicromapVersionInfoEXT;
+
+typedef struct VkCopyMicromapToMemoryInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkMicromapEXT               src;
+    VkDeviceOrHostAddressKHR    dst;
+    VkCopyMicromapModeEXT       mode;
+} VkCopyMicromapToMemoryInfoEXT;
+
+typedef struct VkCopyMemoryToMicromapInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    src;
+    VkMicromapEXT                    dst;
+    VkCopyMicromapModeEXT            mode;
+} VkCopyMemoryToMicromapInfoEXT;
+
+typedef struct VkCopyMicromapInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkMicromapEXT            src;
+    VkMicromapEXT            dst;
+    VkCopyMicromapModeEXT    mode;
+} VkCopyMicromapInfoEXT;
+
+typedef struct VkMicromapBuildSizesInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       micromapSize;
+    VkDeviceSize       buildScratchSize;
+    VkBool32           discardable;
+} VkMicromapBuildSizesInfoEXT;
+
+typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkIndexType                         indexType;
+    VkDeviceOrHostAddressConstKHR       indexBuffer;
+    VkDeviceSize                        indexStride;
+    uint32_t                            baseTriangle;
+    uint32_t                            usageCountsCount;
+    const VkMicromapUsageEXT*           pUsageCounts;
+    const VkMicromapUsageEXT* const*    ppUsageCounts;
+    VkMicromapEXT                       micromap;
+} VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+typedef struct VkMicromapTriangleEXT {
+    uint32_t    dataOffset;
+    uint16_t    subdivisionLevel;
+    uint16_t    format;
+} VkMicromapTriangleEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice                                           device, const VkMicromapCreateInfoEXT*        pCreateInfo, const VkAllocationCallbacks*       pAllocator, VkMicromapEXT*                        pMicromap);
+typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer                                    commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice                                           device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType  queryType, size_t       dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice                                            device, VkAccelerationStructureBuildTypeKHR                 buildType, const VkMicromapBuildInfoEXT*  pBuildInfo, VkMicromapBuildSizesInfoEXT*           pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT(
+    VkDevice                                    device,
+    const VkMicromapCreateInfoEXT*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkMicromapEXT*                              pMicromap);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT(
+    VkDevice                                    device,
+    VkMicromapEXT                               micromap,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkMicromapBuildInfoEXT*               pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    uint32_t                                    infoCount,
+    const VkMicromapBuildInfoEXT*               pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMicromapInfoEXT*                pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMicromapToMemoryInfoEXT*        pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMemoryToMicromapInfoEXT*        pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT(
+    VkDevice                                    device,
+    uint32_t                                    micromapCount,
+    const VkMicromapEXT*                        pMicromaps,
+    VkQueryType                                 queryType,
+    size_t                                      dataSize,
+    void*                                       pData,
+    size_t                                      stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMicromapInfoEXT*                pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMicromapToMemoryInfoEXT*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMemoryToMicromapInfoEXT*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    micromapCount,
+    const VkMicromapEXT*                        pMicromaps,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT(
+    VkDevice                                    device,
+    const VkMicromapVersionInfoEXT*             pVersionInfo,
+    VkAccelerationStructureCompatibilityKHR*    pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT(
+    VkDevice                                    device,
+    VkAccelerationStructureBuildTypeKHR         buildType,
+    const VkMicromapBuildInfoEXT*               pBuildInfo,
+    VkMicromapBuildSizesInfoEXT*                pSizeInfo);
+#endif
+
+
+#define VK_EXT_load_store_op_none 1
+#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1
+#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none"
+
+
+#define VK_HUAWEI_cluster_culling_shader 1
+#define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 1
+#define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader"
+typedef struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           clustercullingShader;
+    VkBool32           multiviewClusterCullingShader;
+} VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+
+typedef struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxWorkGroupCount[3];
+    uint32_t           maxWorkGroupSize[3];
+    uint32_t           maxOutputClusterCount;
+} VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterHUAWEI)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterIndirectHUAWEI)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterHUAWEI(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+#endif
+
+
+#define VK_EXT_border_color_swizzle 1
+#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1
+#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle"
+typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           borderColorSwizzle;
+    VkBool32           borderColorSwizzleFromImage;
+} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
+
+typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkComponentMapping    components;
+    VkBool32              srgb;
+} VkSamplerBorderColorComponentMappingCreateInfoEXT;
+
+
+
+#define VK_EXT_pageable_device_local_memory 1
+#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1
+#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory"
+typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pageableDeviceLocalMemory;
+} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice       device, VkDeviceMemory memory, float          priority);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    float                                       priority);
+#endif
+
+
+#define VK_VALVE_descriptor_set_host_mapping 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping"
+typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           descriptorSetHostMapping;
+} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+typedef struct VkDescriptorSetBindingReferenceVALVE {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDescriptorSetLayout    descriptorSetLayout;
+    uint32_t                 binding;
+} VkDescriptorSetBindingReferenceVALVE;
+
+typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE {
+    VkStructureType    sType;
+    void*              pNext;
+    size_t             descriptorOffset;
+    uint32_t           descriptorSize;
+} VkDescriptorSetLayoutHostMappingInfoVALVE;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE(
+    VkDevice                                    device,
+    const VkDescriptorSetBindingReferenceVALVE* pBindingReference,
+    VkDescriptorSetLayoutHostMappingInfoVALVE*  pHostMapping);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    void**                                      ppData);
+#endif
+
+
+#define VK_EXT_depth_clamp_zero_one 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one"
+typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClampZeroOne;
+} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+
+
+#define VK_EXT_non_seamless_cube_map 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map"
+typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           nonSeamlessCubeMap;
+} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+
+
+#define VK_QCOM_fragment_density_map_offset 1
+#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1
+#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset"
+typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMapOffset;
+} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+
+typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         fragmentDensityOffsetGranularity;
+} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+
+typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM {
+    VkStructureType      sType;
+    const void*          pNext;
+    uint32_t             fragmentDensityOffsetCount;
+    const VkOffset2D*    pFragmentDensityOffsets;
+} VkSubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+
+
+#define VK_NV_copy_memory_indirect 1
+#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1
+#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect"
+typedef struct VkCopyMemoryIndirectCommandNV {
+    VkDeviceAddress    srcAddress;
+    VkDeviceAddress    dstAddress;
+    VkDeviceSize       size;
+} VkCopyMemoryIndirectCommandNV;
+
+typedef struct VkCopyMemoryToImageIndirectCommandNV {
+    VkDeviceAddress             srcAddress;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkCopyMemoryToImageIndirectCommandNV;
+
+typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           indirectCopy;
+} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV;
+
+typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkQueueFlags       supportedQueues;
+} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             copyBufferAddress,
+    uint32_t                                    copyCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             copyBufferAddress,
+    uint32_t                                    copyCount,
+    uint32_t                                    stride,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    const VkImageSubresourceLayers*             pImageSubresources);
+#endif
+
+
+#define VK_NV_memory_decompression 1
+#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1
+#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression"
+
+// Flag bits for VkMemoryDecompressionMethodFlagBitsNV
+typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV;
+static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL;
+
+typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV;
+typedef struct VkDecompressMemoryRegionNV {
+    VkDeviceAddress                       srcAddress;
+    VkDeviceAddress                       dstAddress;
+    VkDeviceSize                          compressedSize;
+    VkDeviceSize                          decompressedSize;
+    VkMemoryDecompressionMethodFlagsNV    decompressionMethod;
+} VkDecompressMemoryRegionNV;
+
+typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           memoryDecompression;
+} VkPhysicalDeviceMemoryDecompressionFeaturesNV;
+
+typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    VkMemoryDecompressionMethodFlagsNV    decompressionMethods;
+    uint64_t                              maxDecompressionIndirectCount;
+} VkPhysicalDeviceMemoryDecompressionPropertiesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    decompressRegionCount,
+    const VkDecompressMemoryRegionNV*           pDecompressMemoryRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             indirectCommandsAddress,
+    VkDeviceAddress                             indirectCommandsCountAddress,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_NV_linear_color_attachment 1
+#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1
+#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment"
+typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           linearColorAttachment;
+} VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+
+
+#define VK_GOOGLE_surfaceless_query 1
+#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2
+#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query"
+
+
+#define VK_EXT_image_compression_control_swapchain 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain"
+typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageCompressionControlSwapchain;
+} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+
+
+#define VK_QCOM_image_processing 1
+#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1
+#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing"
+typedef struct VkImageViewSampleWeightCreateInfoQCOM {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkOffset2D         filterCenter;
+    VkExtent2D         filterSize;
+    uint32_t           numPhases;
+} VkImageViewSampleWeightCreateInfoQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           textureSampleWeighted;
+    VkBool32           textureBoxFilter;
+    VkBool32           textureBlockMatch;
+} VkPhysicalDeviceImageProcessingFeaturesQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxWeightFilterPhases;
+    VkExtent2D         maxWeightFilterDimension;
+    VkExtent2D         maxBlockMatchRegion;
+    VkExtent2D         maxBoxFilterBlockSize;
+} VkPhysicalDeviceImageProcessingPropertiesQCOM;
+
+
+
+#define VK_EXT_extended_dynamic_state3 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3"
+typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState3TessellationDomainOrigin;
+    VkBool32           extendedDynamicState3DepthClampEnable;
+    VkBool32           extendedDynamicState3PolygonMode;
+    VkBool32           extendedDynamicState3RasterizationSamples;
+    VkBool32           extendedDynamicState3SampleMask;
+    VkBool32           extendedDynamicState3AlphaToCoverageEnable;
+    VkBool32           extendedDynamicState3AlphaToOneEnable;
+    VkBool32           extendedDynamicState3LogicOpEnable;
+    VkBool32           extendedDynamicState3ColorBlendEnable;
+    VkBool32           extendedDynamicState3ColorBlendEquation;
+    VkBool32           extendedDynamicState3ColorWriteMask;
+    VkBool32           extendedDynamicState3RasterizationStream;
+    VkBool32           extendedDynamicState3ConservativeRasterizationMode;
+    VkBool32           extendedDynamicState3ExtraPrimitiveOverestimationSize;
+    VkBool32           extendedDynamicState3DepthClipEnable;
+    VkBool32           extendedDynamicState3SampleLocationsEnable;
+    VkBool32           extendedDynamicState3ColorBlendAdvanced;
+    VkBool32           extendedDynamicState3ProvokingVertexMode;
+    VkBool32           extendedDynamicState3LineRasterizationMode;
+    VkBool32           extendedDynamicState3LineStippleEnable;
+    VkBool32           extendedDynamicState3DepthClipNegativeOneToOne;
+    VkBool32           extendedDynamicState3ViewportWScalingEnable;
+    VkBool32           extendedDynamicState3ViewportSwizzle;
+    VkBool32           extendedDynamicState3CoverageToColorEnable;
+    VkBool32           extendedDynamicState3CoverageToColorLocation;
+    VkBool32           extendedDynamicState3CoverageModulationMode;
+    VkBool32           extendedDynamicState3CoverageModulationTableEnable;
+    VkBool32           extendedDynamicState3CoverageModulationTable;
+    VkBool32           extendedDynamicState3CoverageReductionMode;
+    VkBool32           extendedDynamicState3RepresentativeFragmentTestEnable;
+    VkBool32           extendedDynamicState3ShadingRateImageEnable;
+} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dynamicPrimitiveTopologyUnrestricted;
+} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+typedef struct VkColorBlendEquationEXT {
+    VkBlendFactor    srcColorBlendFactor;
+    VkBlendFactor    dstColorBlendFactor;
+    VkBlendOp        colorBlendOp;
+    VkBlendFactor    srcAlphaBlendFactor;
+    VkBlendFactor    dstAlphaBlendFactor;
+    VkBlendOp        alphaBlendOp;
+} VkColorBlendEquationEXT;
+
+typedef struct VkColorBlendAdvancedEXT {
+    VkBlendOp            advancedBlendOp;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+    VkBool32             clampResults;
+} VkColorBlendAdvancedEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits  rasterizationSamples);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits  samples, const VkSampleMask*    pSampleMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream);
+typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkTessellationDomainOrigin                  domainOrigin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthClampEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPolygonMode                               polygonMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkSampleCountFlagBits                       rasterizationSamples);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkSampleCountFlagBits                       samples,
+    const VkSampleMask*                         pSampleMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    alphaToCoverageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    alphaToOneEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    logicOpEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkBool32*                             pColorBlendEnables);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorBlendEquationEXT*              pColorBlendEquations);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorComponentFlags*                pColorWriteMasks);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    rasterizationStream);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkConservativeRasterizationModeEXT          conservativeRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT(
+    VkCommandBuffer                             commandBuffer,
+    float                                       extraPrimitiveOverestimationSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthClipEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    sampleLocationsEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorBlendAdvancedEXT*              pColorBlendAdvanced);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkProvokingVertexModeEXT                    provokingVertexMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkLineRasterizationModeEXT                  lineRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stippledLineEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    negativeOneToOne);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    viewportWScalingEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportSwizzleNV*                  pViewportSwizzles);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    coverageToColorEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    coverageToColorLocation);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoverageModulationModeNV                  coverageModulationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    coverageModulationTableEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    coverageModulationTableCount,
+    const float*                                pCoverageModulationTable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    shadingRateImageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    representativeFragmentTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoverageReductionModeNV                   coverageReductionMode);
+#endif
+
+
+#define VK_EXT_subpass_merge_feedback 1
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback"
+
+typedef enum VkSubpassMergeStatusEXT {
+    VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0,
+    VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13,
+    VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSubpassMergeStatusEXT;
+typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subpassMergeFeedback;
+} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+typedef struct VkRenderPassCreationControlEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           disallowMerging;
+} VkRenderPassCreationControlEXT;
+
+typedef struct VkRenderPassCreationFeedbackInfoEXT {
+    uint32_t    postMergeSubpassCount;
+} VkRenderPassCreationFeedbackInfoEXT;
+
+typedef struct VkRenderPassCreationFeedbackCreateInfoEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkRenderPassCreationFeedbackInfoEXT*    pRenderPassFeedback;
+} VkRenderPassCreationFeedbackCreateInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackInfoEXT {
+    VkSubpassMergeStatusEXT    subpassMergeStatus;
+    char                       description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t                   postMergeIndex;
+} VkRenderPassSubpassFeedbackInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkRenderPassSubpassFeedbackInfoEXT*    pSubpassFeedback;
+} VkRenderPassSubpassFeedbackCreateInfoEXT;
+
+
+
+#define VK_LUNARG_direct_driver_loading 1
+#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1
+#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading"
+
+typedef enum VkDirectDriverLoadingModeLUNARG {
+    VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0,
+    VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1,
+    VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF
+} VkDirectDriverLoadingModeLUNARG;
+typedef VkFlags VkDirectDriverLoadingFlagsLUNARG;
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)(
+    VkInstance instance, const char* pName);
+
+typedef struct VkDirectDriverLoadingInfoLUNARG {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkDirectDriverLoadingFlagsLUNARG    flags;
+    PFN_vkGetInstanceProcAddrLUNARG     pfnGetInstanceProcAddr;
+} VkDirectDriverLoadingInfoLUNARG;
+
+typedef struct VkDirectDriverLoadingListLUNARG {
+    VkStructureType                           sType;
+    void*                                     pNext;
+    VkDirectDriverLoadingModeLUNARG           mode;
+    uint32_t                                  driverCount;
+    const VkDirectDriverLoadingInfoLUNARG*    pDrivers;
+} VkDirectDriverLoadingListLUNARG;
+
+
+
+#define VK_EXT_shader_module_identifier 1
+#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier"
+typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderModuleIdentifier;
+} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE];
+} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           identifierSize;
+    const uint8_t*     pIdentifier;
+} VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+typedef struct VkShaderModuleIdentifierEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           identifierSize;
+    uint8_t            identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT];
+} VkShaderModuleIdentifierEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier);
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    VkShaderModuleIdentifierEXT*                pIdentifier);
+
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    VkShaderModuleIdentifierEXT*                pIdentifier);
+#endif
+
+
+#define VK_EXT_rasterization_order_attachment_access 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access"
+
+
+#define VK_NV_optical_flow 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV)
+#define VK_NV_OPTICAL_FLOW_SPEC_VERSION   1
+#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow"
+
+typedef enum VkOpticalFlowPerformanceLevelNV {
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowPerformanceLevelNV;
+
+typedef enum VkOpticalFlowSessionBindingPointNV {
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionBindingPointNV;
+
+typedef enum VkOpticalFlowGridSizeFlagBitsNV {
+    VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowGridSizeFlagBitsNV;
+typedef VkFlags   VkOpticalFlowGridSizeFlagsNV;
+
+typedef enum VkOpticalFlowUsageFlagBitsNV {
+    VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010,
+    VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowUsageFlagBitsNV;
+typedef VkFlags   VkOpticalFlowUsageFlagsNV;
+
+typedef enum VkOpticalFlowSessionCreateFlagBitsNV {
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010,
+    VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionCreateFlagBitsNV;
+typedef VkFlags   VkOpticalFlowSessionCreateFlagsNV;
+
+typedef enum VkOpticalFlowExecuteFlagBitsNV {
+    VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowExecuteFlagBitsNV;
+typedef VkFlags   VkOpticalFlowExecuteFlagsNV;
+typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           opticalFlow;
+} VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV {
+    VkStructureType                 sType;
+    void*                           pNext;
+    VkOpticalFlowGridSizeFlagsNV    supportedOutputGridSizes;
+    VkOpticalFlowGridSizeFlagsNV    supportedHintGridSizes;
+    VkBool32                        hintSupported;
+    VkBool32                        costSupported;
+    VkBool32                        bidirectionalFlowSupported;
+    VkBool32                        globalFlowSupported;
+    uint32_t                        minWidth;
+    uint32_t                        minHeight;
+    uint32_t                        maxWidth;
+    uint32_t                        maxHeight;
+    uint32_t                        maxNumRegionsOfInterest;
+} VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+typedef struct VkOpticalFlowImageFormatInfoNV {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkOpticalFlowUsageFlagsNV    usage;
+} VkOpticalFlowImageFormatInfoNV;
+
+typedef struct VkOpticalFlowImageFormatPropertiesNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkFormat           format;
+} VkOpticalFlowImageFormatPropertiesNV;
+
+typedef struct VkOpticalFlowSessionCreateInfoNV {
+    VkStructureType                      sType;
+    void*                                pNext;
+    uint32_t                             width;
+    uint32_t                             height;
+    VkFormat                             imageFormat;
+    VkFormat                             flowVectorFormat;
+    VkFormat                             costFormat;
+    VkOpticalFlowGridSizeFlagsNV         outputGridSize;
+    VkOpticalFlowGridSizeFlagsNV         hintGridSize;
+    VkOpticalFlowPerformanceLevelNV      performanceLevel;
+    VkOpticalFlowSessionCreateFlagsNV    flags;
+} VkOpticalFlowSessionCreateInfoNV;
+
+typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           id;
+    uint32_t           size;
+    const void*        pPrivateData;
+} VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+typedef struct VkOpticalFlowExecuteInfoNV {
+    VkStructureType                sType;
+    void*                          pNext;
+    VkOpticalFlowExecuteFlagsNV    flags;
+    uint32_t                       regionCount;
+    const VkRect2D*                pRegions;
+} VkOpticalFlowExecuteInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout);
+typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+    VkPhysicalDevice                            physicalDevice,
+    const VkOpticalFlowImageFormatInfoNV*       pOpticalFlowImageFormatInfo,
+    uint32_t*                                   pFormatCount,
+    VkOpticalFlowImageFormatPropertiesNV*       pImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV(
+    VkDevice                                    device,
+    const VkOpticalFlowSessionCreateInfoNV*     pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkOpticalFlowSessionNV*                     pSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV(
+    VkDevice                                    device,
+    VkOpticalFlowSessionNV                      session,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV(
+    VkDevice                                    device,
+    VkOpticalFlowSessionNV                      session,
+    VkOpticalFlowSessionBindingPointNV          bindingPoint,
+    VkImageView                                 view,
+    VkImageLayout                               layout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV(
+    VkCommandBuffer                             commandBuffer,
+    VkOpticalFlowSessionNV                      session,
+    const VkOpticalFlowExecuteInfoNV*           pExecuteInfo);
+#endif
+
+
+#define VK_EXT_legacy_dithering 1
+#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1
+#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering"
+typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           legacyDithering;
+} VkPhysicalDeviceLegacyDitheringFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_protected_access 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access"
+typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineProtectedAccess;
+} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+
+
+#define VK_QCOM_tile_properties 1
+#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1
+#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties"
+typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           tileProperties;
+} VkPhysicalDeviceTilePropertiesFeaturesQCOM;
+
+typedef struct VkTilePropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent3D         tileSize;
+    VkExtent2D         apronSize;
+    VkOffset2D         origin;
+} VkTilePropertiesQCOM;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    uint32_t*                                   pPropertiesCount,
+    VkTilePropertiesQCOM*                       pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM(
+    VkDevice                                    device,
+    const VkRenderingInfo*                      pRenderingInfo,
+    VkTilePropertiesQCOM*                       pProperties);
+#endif
+
+
+#define VK_SEC_amigo_profiling 1
+#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1
+#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling"
+typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           amigoProfiling;
+} VkPhysicalDeviceAmigoProfilingFeaturesSEC;
+
+typedef struct VkAmigoProfilingSubmitInfoSEC {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           firstDrawTimestamp;
+    uint64_t           swapBufferTimestamp;
+} VkAmigoProfilingSubmitInfoSEC;
+
+
+
+#define VK_QCOM_multiview_per_view_viewports 1
+#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1
+#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports"
+typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiviewPerViewViewports;
+} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+
+
+#define VK_NV_ray_tracing_invocation_reorder 1
+#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1
+#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder"
+
+typedef enum VkRayTracingInvocationReorderModeNV {
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0,
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1,
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkRayTracingInvocationReorderModeNV;
+typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkRayTracingInvocationReorderModeNV    rayTracingInvocationReorderReorderingHint;
+} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+
+typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingInvocationReorder;
+} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+
+
+
+#define VK_EXT_mutable_descriptor_type 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type"
+
+
+#define VK_ARM_shader_core_builtins 1
+#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2
+#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins"
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderCoreBuiltins;
+} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           shaderCoreMask;
+    uint32_t           shaderCoreCount;
+    uint32_t           shaderWarpsPerCore;
+} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+
+
+#define VK_KHR_acceleration_structure 1
+#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13
+#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure"
+
+typedef enum VkBuildAccelerationStructureModeKHR {
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0,
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1,
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkBuildAccelerationStructureModeKHR;
+
+typedef enum VkAccelerationStructureCreateFlagBitsKHR {
+    VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001,
+    VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008,
+    VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004,
+    VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCreateFlagBitsKHR;
+typedef VkFlags VkAccelerationStructureCreateFlagsKHR;
+typedef struct VkAccelerationStructureBuildRangeInfoKHR {
+    uint32_t    primitiveCount;
+    uint32_t    primitiveOffset;
+    uint32_t    firstVertex;
+    uint32_t    transformOffset;
+} VkAccelerationStructureBuildRangeInfoKHR;
+
+typedef struct VkAccelerationStructureGeometryTrianglesDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkFormat                         vertexFormat;
+    VkDeviceOrHostAddressConstKHR    vertexData;
+    VkDeviceSize                     vertexStride;
+    uint32_t                         maxVertex;
+    VkIndexType                      indexType;
+    VkDeviceOrHostAddressConstKHR    indexData;
+    VkDeviceOrHostAddressConstKHR    transformData;
+} VkAccelerationStructureGeometryTrianglesDataKHR;
+
+typedef struct VkAccelerationStructureGeometryAabbsDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    data;
+    VkDeviceSize                     stride;
+} VkAccelerationStructureGeometryAabbsDataKHR;
+
+typedef struct VkAccelerationStructureGeometryInstancesDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkBool32                         arrayOfPointers;
+    VkDeviceOrHostAddressConstKHR    data;
+} VkAccelerationStructureGeometryInstancesDataKHR;
+
+typedef union VkAccelerationStructureGeometryDataKHR {
+    VkAccelerationStructureGeometryTrianglesDataKHR    triangles;
+    VkAccelerationStructureGeometryAabbsDataKHR        aabbs;
+    VkAccelerationStructureGeometryInstancesDataKHR    instances;
+} VkAccelerationStructureGeometryDataKHR;
+
+typedef struct VkAccelerationStructureGeometryKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkGeometryTypeKHR                         geometryType;
+    VkAccelerationStructureGeometryDataKHR    geometry;
+    VkGeometryFlagsKHR                        flags;
+} VkAccelerationStructureGeometryKHR;
+
+typedef struct VkAccelerationStructureBuildGeometryInfoKHR {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    VkAccelerationStructureTypeKHR                      type;
+    VkBuildAccelerationStructureFlagsKHR                flags;
+    VkBuildAccelerationStructureModeKHR                 mode;
+    VkAccelerationStructureKHR                          srcAccelerationStructure;
+    VkAccelerationStructureKHR                          dstAccelerationStructure;
+    uint32_t                                            geometryCount;
+    const VkAccelerationStructureGeometryKHR*           pGeometries;
+    const VkAccelerationStructureGeometryKHR* const*    ppGeometries;
+    VkDeviceOrHostAddressKHR                            scratchData;
+} VkAccelerationStructureBuildGeometryInfoKHR;
+
+typedef struct VkAccelerationStructureCreateInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkAccelerationStructureCreateFlagsKHR    createFlags;
+    VkBuffer                                 buffer;
+    VkDeviceSize                             offset;
+    VkDeviceSize                             size;
+    VkAccelerationStructureTypeKHR           type;
+    VkDeviceAddress                          deviceAddress;
+} VkAccelerationStructureCreateInfoKHR;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    uint32_t                             accelerationStructureCount;
+    const VkAccelerationStructureKHR*    pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureKHR;
+
+typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           accelerationStructure;
+    VkBool32           accelerationStructureCaptureReplay;
+    VkBool32           accelerationStructureIndirectBuild;
+    VkBool32           accelerationStructureHostCommands;
+    VkBool32           descriptorBindingAccelerationStructureUpdateAfterBind;
+} VkPhysicalDeviceAccelerationStructureFeaturesKHR;
+
+typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           maxGeometryCount;
+    uint64_t           maxInstanceCount;
+    uint64_t           maxPrimitiveCount;
+    uint32_t           maxPerStageDescriptorAccelerationStructures;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindAccelerationStructures;
+    uint32_t           maxDescriptorSetAccelerationStructures;
+    uint32_t           maxDescriptorSetUpdateAfterBindAccelerationStructures;
+    uint32_t           minAccelerationStructureScratchOffsetAlignment;
+} VkPhysicalDeviceAccelerationStructurePropertiesKHR;
+
+typedef struct VkAccelerationStructureDeviceAddressInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkAccelerationStructureKHR    accelerationStructure;
+} VkAccelerationStructureDeviceAddressInfoKHR;
+
+typedef struct VkAccelerationStructureVersionInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    const uint8_t*     pVersionData;
+} VkAccelerationStructureVersionInfoKHR;
+
+typedef struct VkCopyAccelerationStructureToMemoryInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkAccelerationStructureKHR            src;
+    VkDeviceOrHostAddressKHR              dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyAccelerationStructureToMemoryInfoKHR;
+
+typedef struct VkCopyMemoryToAccelerationStructureInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceOrHostAddressConstKHR         src;
+    VkAccelerationStructureKHR            dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyMemoryToAccelerationStructureInfoKHR;
+
+typedef struct VkCopyAccelerationStructureInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkAccelerationStructureKHR            src;
+    VkAccelerationStructureKHR            dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyAccelerationStructureInfoKHR;
+
+typedef struct VkAccelerationStructureBuildSizesInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       accelerationStructureSize;
+    VkDeviceSize       updateScratchSize;
+    VkDeviceSize       buildScratchSize;
+} VkAccelerationStructureBuildSizesInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice                                           device, const VkAccelerationStructureCreateInfoKHR*        pCreateInfo, const VkAllocationCallbacks*       pAllocator, VkAccelerationStructureKHR*                        pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer                                    commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer                  commandBuffer, uint32_t                                           infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress*             pIndirectDeviceAddresses, const uint32_t*                    pIndirectStrides, const uint32_t* const*             ppMaxPrimitiveCounts);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice                                           device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType  queryType, size_t       dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice                                            device, VkAccelerationStructureBuildTypeKHR                 buildType, const VkAccelerationStructureBuildGeometryInfoKHR*  pBuildInfo, const uint32_t*  pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR*           pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoKHR* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureKHR*                 pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkAccelerationStructureKHR                  accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkDeviceAddress*                      pIndirectDeviceAddresses,
+    const uint32_t*                             pIndirectStrides,
+    const uint32_t* const*                      ppMaxPrimitiveCounts);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyAccelerationStructureInfoKHR*   pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR(
+    VkDevice                                    device,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureKHR*           pAccelerationStructures,
+    VkQueryType                                 queryType,
+    size_t                                      dataSize,
+    void*                                       pData,
+    size_t                                      stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyAccelerationStructureInfoKHR*   pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureKHR*           pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureVersionInfoKHR* pVersionInfo,
+    VkAccelerationStructureCompatibilityKHR*    pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR(
+    VkDevice                                    device,
+    VkAccelerationStructureBuildTypeKHR         buildType,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo,
+    const uint32_t*                             pMaxPrimitiveCounts,
+    VkAccelerationStructureBuildSizesInfoKHR*   pSizeInfo);
+#endif
+
+
+#define VK_KHR_ray_tracing_pipeline 1
+#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1
+#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline"
+
+typedef enum VkShaderGroupShaderKHR {
+    VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0,
+    VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1,
+    VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2,
+    VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3,
+    VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkShaderGroupShaderKHR;
+typedef struct VkRayTracingShaderGroupCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRayTracingShaderGroupTypeKHR    type;
+    uint32_t                          generalShader;
+    uint32_t                          closestHitShader;
+    uint32_t                          anyHitShader;
+    uint32_t                          intersectionShader;
+    const void*                       pShaderGroupCaptureReplayHandle;
+} VkRayTracingShaderGroupCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxPipelineRayPayloadSize;
+    uint32_t           maxPipelineRayHitAttributeSize;
+} VkRayTracingPipelineInterfaceCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineCreateInfoKHR {
+    VkStructureType                                      sType;
+    const void*                                          pNext;
+    VkPipelineCreateFlags                                flags;
+    uint32_t                                             stageCount;
+    const VkPipelineShaderStageCreateInfo*               pStages;
+    uint32_t                                             groupCount;
+    const VkRayTracingShaderGroupCreateInfoKHR*          pGroups;
+    uint32_t                                             maxPipelineRayRecursionDepth;
+    const VkPipelineLibraryCreateInfoKHR*                pLibraryInfo;
+    const VkRayTracingPipelineInterfaceCreateInfoKHR*    pLibraryInterface;
+    const VkPipelineDynamicStateCreateInfo*              pDynamicState;
+    VkPipelineLayout                                     layout;
+    VkPipeline                                           basePipelineHandle;
+    int32_t                                              basePipelineIndex;
+} VkRayTracingPipelineCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingPipeline;
+    VkBool32           rayTracingPipelineShaderGroupHandleCaptureReplay;
+    VkBool32           rayTracingPipelineShaderGroupHandleCaptureReplayMixed;
+    VkBool32           rayTracingPipelineTraceRaysIndirect;
+    VkBool32           rayTraversalPrimitiveCulling;
+} VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
+
+typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderGroupHandleSize;
+    uint32_t           maxRayRecursionDepth;
+    uint32_t           maxShaderGroupStride;
+    uint32_t           shaderGroupBaseAlignment;
+    uint32_t           shaderGroupHandleCaptureReplaySize;
+    uint32_t           maxRayDispatchInvocationCount;
+    uint32_t           shaderGroupHandleAlignment;
+    uint32_t           maxRayHitAttributeSize;
+} VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
+
+typedef struct VkStridedDeviceAddressRegionKHR {
+    VkDeviceAddress    deviceAddress;
+    VkDeviceSize       stride;
+    VkDeviceSize       size;
+} VkStridedDeviceAddressRegionKHR;
+
+typedef struct VkTraceRaysIndirectCommandKHR {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkTraceRaysIndirectCommandKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress);
+typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkStridedDeviceAddressRegionKHR*      pRaygenShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pMissShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pHitShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pCallableShaderBindingTable,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoKHR*    pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkStridedDeviceAddressRegionKHR*      pRaygenShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pMissShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pHitShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pCallableShaderBindingTable,
+    VkDeviceAddress                             indirectDeviceAddress);
+
+VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    group,
+    VkShaderGroupShaderKHR                      groupShader);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    pipelineStackSize);
+#endif
+
+
+#define VK_KHR_ray_query 1
+#define VK_KHR_RAY_QUERY_SPEC_VERSION     1
+#define VK_KHR_RAY_QUERY_EXTENSION_NAME   "VK_KHR_ray_query"
+typedef struct VkPhysicalDeviceRayQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayQuery;
+} VkPhysicalDeviceRayQueryFeaturesKHR;
+
+
+
+#define VK_EXT_mesh_shader 1
+#define VK_EXT_MESH_SHADER_SPEC_VERSION   1
+#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           taskShader;
+    VkBool32           meshShader;
+    VkBool32           multiviewMeshShader;
+    VkBool32           primitiveFragmentShadingRateMeshShader;
+    VkBool32           meshShaderQueries;
+} VkPhysicalDeviceMeshShaderFeaturesEXT;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxTaskWorkGroupTotalCount;
+    uint32_t           maxTaskWorkGroupCount[3];
+    uint32_t           maxTaskWorkGroupInvocations;
+    uint32_t           maxTaskWorkGroupSize[3];
+    uint32_t           maxTaskPayloadSize;
+    uint32_t           maxTaskSharedMemorySize;
+    uint32_t           maxTaskPayloadAndSharedMemorySize;
+    uint32_t           maxMeshWorkGroupTotalCount;
+    uint32_t           maxMeshWorkGroupCount[3];
+    uint32_t           maxMeshWorkGroupInvocations;
+    uint32_t           maxMeshWorkGroupSize[3];
+    uint32_t           maxMeshSharedMemorySize;
+    uint32_t           maxMeshPayloadAndSharedMemorySize;
+    uint32_t           maxMeshOutputMemorySize;
+    uint32_t           maxMeshPayloadAndOutputMemorySize;
+    uint32_t           maxMeshOutputComponents;
+    uint32_t           maxMeshOutputVertices;
+    uint32_t           maxMeshOutputPrimitives;
+    uint32_t           maxMeshOutputLayers;
+    uint32_t           maxMeshMultiviewViewCount;
+    uint32_t           meshOutputPerVertexGranularity;
+    uint32_t           meshOutputPerPrimitiveGranularity;
+    uint32_t           maxPreferredTaskWorkGroupInvocations;
+    uint32_t           maxPreferredMeshWorkGroupInvocations;
+    VkBool32           prefersLocalInvocationVertexOutput;
+    VkBool32           prefersLocalInvocationPrimitiveOutput;
+    VkBool32           prefersCompactVertexOutput;
+    VkBool32           prefersCompactPrimitiveOutput;
+} VkPhysicalDeviceMeshShaderPropertiesEXT;
+
+typedef struct VkDrawMeshTasksIndirectCommandEXT {
+    uint32_t    groupCountX;
+    uint32_t    groupCountY;
+    uint32_t    groupCountZ;
+} VkDrawMeshTasksIndirectCommandEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_directfb.h b/host/libs/graphics_detector/include/vulkan/vulkan_directfb.h
new file mode 100644
index 0000000..ab3504e
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_directfb.h
@@ -0,0 +1,54 @@
+#ifndef VULKAN_DIRECTFB_H_
+#define VULKAN_DIRECTFB_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_directfb_surface 1
+#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1
+#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface"
+typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT;
+typedef struct VkDirectFBSurfaceCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDirectFBSurfaceCreateFlagsEXT    flags;
+    IDirectFB*                         dfb;
+    IDirectFBSurface*                  surface;
+} VkDirectFBSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(
+    VkInstance                                  instance,
+    const VkDirectFBSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    IDirectFB*                                  dfb);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_enums.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_enums.hpp
new file mode 100644
index 0000000..eb91d24
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_enums.hpp
@@ -0,0 +1,6939 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_ENUMS_HPP
+#  define VULKAN_ENUMS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  template <typename EnumType, EnumType value>
+  struct CppType
+  {};
+
+  //=============
+  //=== ENUMs ===
+  //=============
+
+
+  //=== VK_VERSION_1_0 ===
+
+  enum class Result
+  {
+    eSuccess = VK_SUCCESS,
+    eNotReady = VK_NOT_READY,
+    eTimeout = VK_TIMEOUT,
+    eEventSet = VK_EVENT_SET,
+    eEventReset = VK_EVENT_RESET,
+    eIncomplete = VK_INCOMPLETE,
+    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+    eErrorUnknown = VK_ERROR_UNKNOWN,
+    eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+    eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    eErrorFragmentation = VK_ERROR_FRAGMENTATION,
+    eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED,
+    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+    eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR,
+    eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR,
+    eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR,
+    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
+    eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eThreadIdleKHR = VK_THREAD_IDLE_KHR,
+    eThreadDoneKHR = VK_THREAD_DONE_KHR,
+    eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
+    eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
+    eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
+    eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+    eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
+    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+    eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+    eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
+    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+    eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT,
+    ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT
+  };
+
+  enum class StructureType
+  {
+    eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+    eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+    eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+    eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+    eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+    eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+    eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+    eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+    eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+    eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+    eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+    eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+    eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+    eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+    eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+    eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+    ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+    ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+    ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+    ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+    ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+    ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+    ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+    ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+    ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+    ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+    ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+    eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+    eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+    ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+    eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+    eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+    eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+    eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+    eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+    eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+    eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+    eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+    eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+    eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+    eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+    eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+    eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+    eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+    eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+    eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+    eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+    eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
+    eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
+    ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
+    ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
+    eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
+    eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
+    ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
+    ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
+    ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
+    eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+    eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+    eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+    eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+    eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+    eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+    eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+    eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+    ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+    ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+    ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+    eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+    ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+    ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+    eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+    eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+    ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+    eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+    ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+    eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+    ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+    eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+    ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+    ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+    eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+    eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+    eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+    ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+    eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+    eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+    ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+    eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+    eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+    eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+    eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+    ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+    eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+    eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+    eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+    ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
+    ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
+    ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
+    ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
+    ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
+    ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
+    eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
+    ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
+    ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
+    eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
+    eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
+    eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
+    eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
+    eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
+    eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
+    eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
+    ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
+    ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
+    ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
+    eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
+    eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
+    eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
+    eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
+    eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
+    eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
+    eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
+    eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
+    eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
+    eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
+    eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+    ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
+    ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
+    ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
+    ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
+    eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
+    eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
+    ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
+    eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO,
+    eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
+    ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
+    ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
+    eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
+    ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
+    ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
+    ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
+    eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+    ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
+    ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
+    eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
+    eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
+    eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+    ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+    eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
+    eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
+    eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
+    eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
+    eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
+    eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
+    eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+    eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+    eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+    eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+    eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+    eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+    eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+    eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+    eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+    eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR,
+    eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR,
+    eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR,
+    eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR,
+    eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR,
+    eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR,
+    eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR,
+    eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR,
+    eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR,
+    eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR,
+    eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR,
+    eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR,
+    eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR,
+    ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR,
+    eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR,
+    eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR,
+    eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR,
+    eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR,
+    eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR,
+    eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+    eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+    eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+    ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
+    ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
+    ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
+    eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX,
+    eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX,
+    eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX,
+    eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+    eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT,
+    eVideoEncodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
+    eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
+    eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT,
+    eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT,
+    eVideoEncodeH264NaluSliceInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT,
+    eVideoEncodeH264EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+    eVideoEncodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT,
+    eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT,
+    eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT,
+    eVideoEncodeH264ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT,
+    eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT,
+    eVideoEncodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
+    eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
+    eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT,
+    eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT,
+    eVideoEncodeH265NaluSliceSegmentInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT,
+    eVideoEncodeH265EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+    eVideoEncodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT,
+    eVideoEncodeH265ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT,
+    eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT,
+    eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eVideoDecodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR,
+    eVideoDecodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR,
+    eVideoDecodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR,
+    eVideoDecodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR,
+    eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR,
+    eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+    eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+    eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT,
+    eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
+    eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX,
+#if defined( VK_USE_PLATFORM_GGP )
+    eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
+#endif /*VK_USE_PLATFORM_GGP*/
+    ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
+    eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
+    eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+#if defined( VK_USE_PLATFORM_VI_NN )
+    eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+#endif /*VK_USE_PLATFORM_VI_NN*/
+    eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
+    ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
+    ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT,
+    ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT,
+    ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
+    eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+    eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+    eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
+    eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
+    eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+    eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+    ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+    eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
+    ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
+    eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
+    ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+    ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+    eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+    eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+    eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+    eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+    ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+    ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+    ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+    ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
+    ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
+    ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
+    eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+    eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
+    eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+    ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
+    ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
+    eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
+    ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
+    eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
+    ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
+    ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
+    ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
+    eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
+    eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
+    eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR,
+    eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR,
+    eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR,
+    eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR,
+    eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR,
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+    eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+    eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+    eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
+    eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
+    eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
+    eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
+    eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
+    eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
+    eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
+    eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
+    eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
+    eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+    ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
+    eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+    ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+    ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+    eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
+    eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
+    eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
+    eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
+    eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
+    eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
+    eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
+    eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
+    eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
+    eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
+    ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
+    ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
+    eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
+    eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
+    ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
+    ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
+    eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
+    eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
+    eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
+    ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
+    ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
+    ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
+    eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
+    ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
+    eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
+    eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
+    eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+    eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT,
+    eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
+    eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
+    ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
+    ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
+    ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
+    eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
+    eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
+    eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
+    eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
+    eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
+    eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
+    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
+    eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
+    eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
+    eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
+    ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
+    ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
+    eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
+    eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+    eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
+    ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
+    ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
+    ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
+    eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
+    ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
+    eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR,
+    eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR,
+    eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR,
+    eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR,
+    eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR,
+    eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
+    ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
+    eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
+    eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
+    ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
+    ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_GGP )
+    ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
+#endif /*VK_USE_PLATFORM_GGP*/
+    ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
+    ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
+    ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
+    ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
+    ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
+    eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
+    eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
+    ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
+    eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+    eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
+    ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
+    ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
+    ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
+    ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
+    ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
+    eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
+    eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+    eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+    ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
+    eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+    eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+    ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
+    ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
+    ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
+    ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
+    ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
+    ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
+    ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
+    ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
+    ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
+    eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
+    eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
+    ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
+    ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
+    eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
+    ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR,
+    ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
+    eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
+    ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
+    eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
+    ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
+    ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
+    ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
+    ePipelineRasterizationProvokingVertexStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
+    eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
+    eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
+    ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+    ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
+    ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+    ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
+    ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
+    ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
+    ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
+    ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
+    ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+    ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
+    eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
+    eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT,
+    eSurfacePresentModeCompatibilityEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT,
+    ePhysicalDeviceSwapchainMaintenance1FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT,
+    eSwapchainPresentFenceInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT,
+    eSwapchainPresentModesCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT,
+    eSwapchainPresentModeInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT,
+    eSwapchainPresentScalingCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT,
+    eReleaseSwapchainImagesInfoEXT = VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT,
+    ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
+    eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
+    eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
+    eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
+    eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
+    eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
+    eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
+    ePhysicalDeviceInheritedViewportScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV,
+    eCommandBufferInheritanceViewportScissorInfoNV = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV,
+    ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
+    eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
+    eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
+    ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
+    eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
+    eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
+    ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
+    ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
+    eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
+    ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
+    ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
+    ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+    ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV,
+    eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV,
+    eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV,
+    ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR,
+    ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR,
+    eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR,
+    eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR,
+    eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR,
+    eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
+    eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+    eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT,
+    eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT,
+    eExportMetalDeviceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT,
+    eExportMetalCommandQueueInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT,
+    eExportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT,
+    eImportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT,
+    eExportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT,
+    eImportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT,
+    eExportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT,
+    eImportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT,
+    eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT,
+    eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT,
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+    eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+    eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
+    ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT,
+    ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT,
+    ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT,
+    eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT,
+    eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT,
+    eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT,
+    eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT,
+    eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT,
+    eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT,
+    ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT,
+    eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT,
+    ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD,
+    ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
+    ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR,
+    ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
+    ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
+    ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
+    ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
+    eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV,
+    ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV,
+    eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV,
+    ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT,
+    ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT,
+    ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
+    eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
+    ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
+    ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT,
+    eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT,
+    eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT,
+    eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT,
+    eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT,
+    ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT,
+    ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+    ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT,
+    eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT,
+    eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT,
+    ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+    eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+    ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
+    eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
+    eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
+    ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT,
+    ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT,
+    eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT,
+    ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
+    ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT,
+    ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA,
+    eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eBufferCollectionCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA,
+    eImportMemoryBufferCollectionFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA,
+    eBufferCollectionImageCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA,
+    eBufferCollectionPropertiesFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA,
+    eBufferConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA,
+    eBufferCollectionBufferCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA,
+    eImageConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA,
+    eImageFormatConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA,
+    eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA,
+    eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI,
+    ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI,
+    ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI,
+    ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI,
+    eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV,
+    ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV,
+    ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT,
+    ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT,
+    ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT,
+    eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT,
+    eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT,
+    ePhysicalDeviceExtendedDynamicState2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+    eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX,
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+    ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
+    ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
+    ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT,
+    ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR,
+    ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT,
+    eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT,
+    ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
+    ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
+    ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
+    eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
+    eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
+    eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
+    eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT,
+    eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT,
+    ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT,
+    ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT,
+    eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT,
+    eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT,
+    eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT,
+    ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI,
+    ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI,
+    ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT,
+    eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT,
+    ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
+    ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE,
+    eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE,
+    eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE,
+    ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT,
+    ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM,
+    ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM,
+    eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM,
+    ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV,
+    ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV,
+    ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV,
+    ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV,
+    ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV,
+    ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT,
+    ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM,
+    ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM,
+    eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM,
+    ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT,
+    ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT,
+    ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT,
+    eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT,
+    eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT,
+    eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT,
+    eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG,
+    eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG,
+    ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT,
+    ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT,
+    ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT,
+    eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT,
+    ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+    ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV,
+    ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV,
+    eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV,
+    eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV,
+    eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV,
+    eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV,
+    eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
+    ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
+    ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
+    ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
+    eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
+    ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
+    eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC,
+    ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM,
+    ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV,
+    ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV,
+    ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+    eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
+    ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM,
+    ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM,
+    eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+    eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+    eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+    eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+    eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV,
+    eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+    eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+    eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+    eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
+    eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
+    eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+    eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+    eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
+    eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
+    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+    eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR,
+    eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
+    eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
+    eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
+    eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
+    eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
+    eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+    eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
+    eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
+    eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+    eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+    eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+    eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
+    eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+    eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
+    eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
+    eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+    eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+    eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
+    eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+    eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
+    eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
+    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+    eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR,
+    eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+    eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+    eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
+    eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
+    eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
+    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
+    eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
+    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+    eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+    eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+    eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
+    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+    eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+    eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
+    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+    ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+    ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+    ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+    ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+    ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR,
+    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+    ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+    ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
+    ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
+    ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+    ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+    ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
+    ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
+    ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
+    ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+    ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR,
+    ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
+    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+    ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+    ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+    ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
+    ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
+    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+    ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
+    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+    ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM,
+    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+    ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+    ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR,
+    ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+    ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
+    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
+    ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
+    ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
+    ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
+    ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
+    ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+    ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+    ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
+    ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+    ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+    ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
+    ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
+    ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT,
+    ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
+    ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+    ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
+    eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+    eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
+    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+    eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR,
+    eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR,
+    eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+    eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+    eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+    eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+    eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
+    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+    eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+    eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+    eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+    eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+    eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+    eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
+    eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+    eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+    eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+    eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+    eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+    eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
+  };
+
+  enum class PipelineCacheHeaderVersion
+  {
+    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+  };
+
+  enum class ObjectType
+  {
+    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+    eInstance = VK_OBJECT_TYPE_INSTANCE,
+    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+    eDevice = VK_OBJECT_TYPE_DEVICE,
+    eQueue = VK_OBJECT_TYPE_QUEUE,
+    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+    eFence = VK_OBJECT_TYPE_FENCE,
+    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+    eBuffer = VK_OBJECT_TYPE_BUFFER,
+    eImage = VK_OBJECT_TYPE_IMAGE,
+    eEvent = VK_OBJECT_TYPE_EVENT,
+    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+    ePipeline = VK_OBJECT_TYPE_PIPELINE,
+    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+    eSampler = VK_OBJECT_TYPE_SAMPLER,
+    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+    eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
+    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+    eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR,
+    eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR,
+    eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX,
+    eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX,
+    eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+    eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+    eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
+    ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
+    eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
+    eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
+    eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
+    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
+    ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
+    eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
+  };
+
+  enum class VendorId
+  {
+    eVIV = VK_VENDOR_ID_VIV,
+    eVSI = VK_VENDOR_ID_VSI,
+    eKazan = VK_VENDOR_ID_KAZAN,
+    eCodeplay = VK_VENDOR_ID_CODEPLAY,
+    eMESA = VK_VENDOR_ID_MESA,
+    ePocl = VK_VENDOR_ID_POCL
+  };
+
+  enum class Format
+  {
+    eUndefined = VK_FORMAT_UNDEFINED,
+    eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
+    eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+    eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+    eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
+    eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
+    eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+    eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+    eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+    eR8Unorm = VK_FORMAT_R8_UNORM,
+    eR8Snorm = VK_FORMAT_R8_SNORM,
+    eR8Uscaled = VK_FORMAT_R8_USCALED,
+    eR8Sscaled = VK_FORMAT_R8_SSCALED,
+    eR8Uint = VK_FORMAT_R8_UINT,
+    eR8Sint = VK_FORMAT_R8_SINT,
+    eR8Srgb = VK_FORMAT_R8_SRGB,
+    eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
+    eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
+    eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
+    eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
+    eR8G8Uint = VK_FORMAT_R8G8_UINT,
+    eR8G8Sint = VK_FORMAT_R8G8_SINT,
+    eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
+    eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
+    eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
+    eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
+    eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
+    eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
+    eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
+    eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
+    eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
+    eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
+    eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
+    eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
+    eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
+    eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
+    eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
+    eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
+    eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
+    eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
+    eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
+    eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
+    eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
+    eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
+    eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
+    eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
+    eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
+    eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
+    eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
+    eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
+    eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
+    eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+    eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+    eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
+    eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
+    eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
+    eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
+    eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+    eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+    eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
+    eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+    eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
+    eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
+    eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
+    eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+    eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
+    eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+    eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
+    eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
+    eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
+    eR16Unorm = VK_FORMAT_R16_UNORM,
+    eR16Snorm = VK_FORMAT_R16_SNORM,
+    eR16Uscaled = VK_FORMAT_R16_USCALED,
+    eR16Sscaled = VK_FORMAT_R16_SSCALED,
+    eR16Uint = VK_FORMAT_R16_UINT,
+    eR16Sint = VK_FORMAT_R16_SINT,
+    eR16Sfloat = VK_FORMAT_R16_SFLOAT,
+    eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
+    eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
+    eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
+    eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
+    eR16G16Uint = VK_FORMAT_R16G16_UINT,
+    eR16G16Sint = VK_FORMAT_R16G16_SINT,
+    eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
+    eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
+    eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
+    eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
+    eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
+    eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
+    eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
+    eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
+    eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
+    eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
+    eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
+    eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
+    eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
+    eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
+    eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
+    eR32Uint = VK_FORMAT_R32_UINT,
+    eR32Sint = VK_FORMAT_R32_SINT,
+    eR32Sfloat = VK_FORMAT_R32_SFLOAT,
+    eR32G32Uint = VK_FORMAT_R32G32_UINT,
+    eR32G32Sint = VK_FORMAT_R32G32_SINT,
+    eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
+    eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
+    eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
+    eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
+    eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
+    eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
+    eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
+    eR64Uint = VK_FORMAT_R64_UINT,
+    eR64Sint = VK_FORMAT_R64_SINT,
+    eR64Sfloat = VK_FORMAT_R64_SFLOAT,
+    eR64G64Uint = VK_FORMAT_R64G64_UINT,
+    eR64G64Sint = VK_FORMAT_R64G64_SINT,
+    eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
+    eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
+    eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
+    eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
+    eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
+    eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
+    eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
+    eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+    eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+    eD16Unorm = VK_FORMAT_D16_UNORM,
+    eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
+    eD32Sfloat = VK_FORMAT_D32_SFLOAT,
+    eS8Uint = VK_FORMAT_S8_UINT,
+    eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
+    eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
+    eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
+    eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+    eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+    eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+    eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+    eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
+    eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
+    eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
+    eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
+    eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
+    eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
+    eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
+    eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
+    eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
+    eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
+    eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
+    eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
+    eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+    eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+    eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+    eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+    eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+    eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+    eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
+    eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
+    eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+    eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+    eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+    eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+    eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+    eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+    eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+    eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+    eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+    eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+    eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+    eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+    eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+    eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+    eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+    eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+    eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+    eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+    eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+    eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+    eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+    eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+    eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+    eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+    eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+    eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+    eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+    eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+    eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+    eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
+    eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
+    eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
+    eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
+    eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
+    eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
+    eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM,
+    eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16,
+    eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM,
+    eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16,
+    eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16,
+    eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
+    eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
+    eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
+    eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK,
+    eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK,
+    eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK,
+    eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK,
+    eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK,
+    eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK,
+    eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK,
+    eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK,
+    eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK,
+    eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK,
+    eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK,
+    ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
+    ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+    eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
+    eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
+    eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
+    eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
+    eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
+    eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
+    eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
+    eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
+    eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
+    eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
+    eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
+    eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
+    eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
+    eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
+    eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
+    eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
+    eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
+    eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
+    eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
+    eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
+    eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
+    eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
+    eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT,
+    eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
+    eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
+    eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
+    eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+    eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+    eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
+    eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT,
+    eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
+    eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
+    eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
+    eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
+    eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
+    eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
+    eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR
+  };
+
+  enum class FormatFeatureFlagBits : VkFormatFeatureFlags
+  {
+    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR,
+    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+    eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+    eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
+    eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
+  };
+
+  using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
+
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags allFlags = 
+          FormatFeatureFlagBits::eSampledImage
+        | FormatFeatureFlagBits::eStorageImage
+        | FormatFeatureFlagBits::eStorageImageAtomic
+        | FormatFeatureFlagBits::eUniformTexelBuffer
+        | FormatFeatureFlagBits::eStorageTexelBuffer
+        | FormatFeatureFlagBits::eStorageTexelBufferAtomic
+        | FormatFeatureFlagBits::eVertexBuffer
+        | FormatFeatureFlagBits::eColorAttachment
+        | FormatFeatureFlagBits::eColorAttachmentBlend
+        | FormatFeatureFlagBits::eDepthStencilAttachment
+        | FormatFeatureFlagBits::eBlitSrc
+        | FormatFeatureFlagBits::eBlitDst
+        | FormatFeatureFlagBits::eSampledImageFilterLinear
+        | FormatFeatureFlagBits::eTransferSrc
+        | FormatFeatureFlagBits::eTransferDst
+        | FormatFeatureFlagBits::eMidpointChromaSamples
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable
+        | FormatFeatureFlagBits::eDisjoint
+        | FormatFeatureFlagBits::eCositedChromaSamples
+        | FormatFeatureFlagBits::eSampledImageFilterMinmax
+        | FormatFeatureFlagBits::eVideoDecodeOutputKHR
+        | FormatFeatureFlagBits::eVideoDecodeDpbKHR
+        | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR
+        | FormatFeatureFlagBits::eSampledImageFilterCubicEXT
+        | FormatFeatureFlagBits::eFragmentDensityMapEXT
+        | FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | FormatFeatureFlagBits::eVideoEncodeInputKHR
+        | FormatFeatureFlagBits::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+;
+  };
+
+  enum class ImageCreateFlagBits : VkImageCreateFlags
+  {
+    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+    eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+    eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+    eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+    eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+    eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT,
+    e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT,
+    eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM,
+    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+    eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
+    eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+    eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+    eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+    eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
+  };
+
+  using ImageCreateFlags = Flags<ImageCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCreateFlags allFlags = 
+          ImageCreateFlagBits::eSparseBinding
+        | ImageCreateFlagBits::eSparseResidency
+        | ImageCreateFlagBits::eSparseAliased
+        | ImageCreateFlagBits::eMutableFormat
+        | ImageCreateFlagBits::eCubeCompatible
+        | ImageCreateFlagBits::eAlias
+        | ImageCreateFlagBits::eSplitInstanceBindRegions
+        | ImageCreateFlagBits::e2DArrayCompatible
+        | ImageCreateFlagBits::eBlockTexelViewCompatible
+        | ImageCreateFlagBits::eExtendedUsage
+        | ImageCreateFlagBits::eProtected
+        | ImageCreateFlagBits::eDisjoint
+        | ImageCreateFlagBits::eCornerSampledNV
+        | ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT
+        | ImageCreateFlagBits::eSubsampledEXT
+        | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT
+        | ImageCreateFlagBits::e2DViewCompatibleEXT
+        | ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM;
+  };
+
+  enum class ImageTiling
+  {
+    eOptimal = VK_IMAGE_TILING_OPTIMAL,
+    eLinear = VK_IMAGE_TILING_LINEAR,
+    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
+  };
+
+  enum class ImageType
+  {
+    e1D = VK_IMAGE_TYPE_1D,
+    e2D = VK_IMAGE_TYPE_2D,
+    e3D = VK_IMAGE_TYPE_3D
+  };
+
+  enum class ImageUsageFlagBits : VkImageUsageFlags
+  {
+    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+    eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR,
+    eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR,
+    eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
+    eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI,
+    eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM,
+    eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM,
+    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
+  };
+
+  using ImageUsageFlags = Flags<ImageUsageFlagBits>;
+
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = 
+          ImageUsageFlagBits::eTransferSrc
+        | ImageUsageFlagBits::eTransferDst
+        | ImageUsageFlagBits::eSampled
+        | ImageUsageFlagBits::eStorage
+        | ImageUsageFlagBits::eColorAttachment
+        | ImageUsageFlagBits::eDepthStencilAttachment
+        | ImageUsageFlagBits::eTransientAttachment
+        | ImageUsageFlagBits::eInputAttachment
+        | ImageUsageFlagBits::eVideoDecodeDstKHR
+        | ImageUsageFlagBits::eVideoDecodeSrcKHR
+        | ImageUsageFlagBits::eVideoDecodeDpbKHR
+        | ImageUsageFlagBits::eFragmentDensityMapEXT
+        | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | ImageUsageFlagBits::eVideoEncodeDstKHR
+        | ImageUsageFlagBits::eVideoEncodeSrcKHR
+        | ImageUsageFlagBits::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT
+        | ImageUsageFlagBits::eInvocationMaskHUAWEI
+        | ImageUsageFlagBits::eSampleWeightQCOM
+        | ImageUsageFlagBits::eSampleBlockMatchQCOM;
+  };
+
+  enum class InstanceCreateFlagBits : VkInstanceCreateFlags
+  {
+    eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR
+  };
+
+  using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<InstanceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR InstanceCreateFlags allFlags = 
+          InstanceCreateFlagBits::eEnumeratePortabilityKHR;
+  };
+
+  enum class InternalAllocationType
+  {
+    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+  };
+
+  enum class MemoryHeapFlagBits : VkMemoryHeapFlags
+  {
+    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+    eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
+  };
+
+  using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryHeapFlags allFlags = 
+          MemoryHeapFlagBits::eDeviceLocal
+        | MemoryHeapFlagBits::eMultiInstance;
+  };
+
+  enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
+  {
+    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+    eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
+    eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
+    eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD,
+    eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV
+  };
+
+  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryPropertyFlags allFlags = 
+          MemoryPropertyFlagBits::eDeviceLocal
+        | MemoryPropertyFlagBits::eHostVisible
+        | MemoryPropertyFlagBits::eHostCoherent
+        | MemoryPropertyFlagBits::eHostCached
+        | MemoryPropertyFlagBits::eLazilyAllocated
+        | MemoryPropertyFlagBits::eProtected
+        | MemoryPropertyFlagBits::eDeviceCoherentAMD
+        | MemoryPropertyFlagBits::eDeviceUncachedAMD
+        | MemoryPropertyFlagBits::eRdmaCapableNV;
+  };
+
+  enum class PhysicalDeviceType
+  {
+    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+  };
+
+  enum class QueueFlagBits : VkQueueFlags
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+    eProtected = VK_QUEUE_PROTECTED_BIT,
+    eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV
+  };
+
+  using QueueFlags = Flags<QueueFlagBits>;
+
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueueFlags allFlags = 
+          QueueFlagBits::eGraphics
+        | QueueFlagBits::eCompute
+        | QueueFlagBits::eTransfer
+        | QueueFlagBits::eSparseBinding
+        | QueueFlagBits::eProtected
+        | QueueFlagBits::eVideoDecodeKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | QueueFlagBits::eVideoEncodeKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | QueueFlagBits::eOpticalFlowNV;
+  };
+
+  enum class SampleCountFlagBits : VkSampleCountFlags
+  {
+    e1 = VK_SAMPLE_COUNT_1_BIT,
+    e2 = VK_SAMPLE_COUNT_2_BIT,
+    e4 = VK_SAMPLE_COUNT_4_BIT,
+    e8 = VK_SAMPLE_COUNT_8_BIT,
+    e16 = VK_SAMPLE_COUNT_16_BIT,
+    e32 = VK_SAMPLE_COUNT_32_BIT,
+    e64 = VK_SAMPLE_COUNT_64_BIT
+  };
+
+  using SampleCountFlags = Flags<SampleCountFlagBits>;
+
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SampleCountFlags allFlags = 
+          SampleCountFlagBits::e1
+        | SampleCountFlagBits::e2
+        | SampleCountFlagBits::e4
+        | SampleCountFlagBits::e8
+        | SampleCountFlagBits::e16
+        | SampleCountFlagBits::e32
+        | SampleCountFlagBits::e64;
+  };
+
+  enum class SystemAllocationScope
+  {
+    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+  };
+
+  enum class DeviceCreateFlagBits : VkDeviceCreateFlags
+  {};
+
+  using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DeviceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineStageFlagBits : VkPipelineStageFlags
+  {
+    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eNone = VK_PIPELINE_STAGE_NONE,
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+    eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+    eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+    eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV
+  };
+
+  using PipelineStageFlags = Flags<PipelineStageFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags allFlags = 
+          PipelineStageFlagBits::eTopOfPipe
+        | PipelineStageFlagBits::eDrawIndirect
+        | PipelineStageFlagBits::eVertexInput
+        | PipelineStageFlagBits::eVertexShader
+        | PipelineStageFlagBits::eTessellationControlShader
+        | PipelineStageFlagBits::eTessellationEvaluationShader
+        | PipelineStageFlagBits::eGeometryShader
+        | PipelineStageFlagBits::eFragmentShader
+        | PipelineStageFlagBits::eEarlyFragmentTests
+        | PipelineStageFlagBits::eLateFragmentTests
+        | PipelineStageFlagBits::eColorAttachmentOutput
+        | PipelineStageFlagBits::eComputeShader
+        | PipelineStageFlagBits::eTransfer
+        | PipelineStageFlagBits::eBottomOfPipe
+        | PipelineStageFlagBits::eHost
+        | PipelineStageFlagBits::eAllGraphics
+        | PipelineStageFlagBits::eAllCommands
+        | PipelineStageFlagBits::eNone
+        | PipelineStageFlagBits::eTransformFeedbackEXT
+        | PipelineStageFlagBits::eConditionalRenderingEXT
+        | PipelineStageFlagBits::eAccelerationStructureBuildKHR
+        | PipelineStageFlagBits::eRayTracingShaderKHR
+        | PipelineStageFlagBits::eFragmentDensityProcessEXT
+        | PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR
+        | PipelineStageFlagBits::eCommandPreprocessNV
+        | PipelineStageFlagBits::eTaskShaderEXT
+        | PipelineStageFlagBits::eMeshShaderEXT;
+  };
+
+  enum class MemoryMapFlagBits : VkMemoryMapFlags
+  {};
+
+  using MemoryMapFlags = Flags<MemoryMapFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryMapFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags =  {};
+  };
+
+  enum class ImageAspectFlagBits : VkImageAspectFlags
+  {
+    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+    ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    eNone = VK_IMAGE_ASPECT_NONE,
+    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
+    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
+    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
+    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
+    eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR,
+    ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
+    ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
+    ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
+  };
+
+  using ImageAspectFlags = Flags<ImageAspectFlagBits>;
+
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageAspectFlags allFlags = 
+          ImageAspectFlagBits::eColor
+        | ImageAspectFlagBits::eDepth
+        | ImageAspectFlagBits::eStencil
+        | ImageAspectFlagBits::eMetadata
+        | ImageAspectFlagBits::ePlane0
+        | ImageAspectFlagBits::ePlane1
+        | ImageAspectFlagBits::ePlane2
+        | ImageAspectFlagBits::eNone
+        | ImageAspectFlagBits::eMemoryPlane0EXT
+        | ImageAspectFlagBits::eMemoryPlane1EXT
+        | ImageAspectFlagBits::eMemoryPlane2EXT
+        | ImageAspectFlagBits::eMemoryPlane3EXT;
+  };
+
+  enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
+  {
+    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+  };
+
+  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
+
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SparseImageFormatFlags allFlags = 
+          SparseImageFormatFlagBits::eSingleMiptail
+        | SparseImageFormatFlagBits::eAlignedMipSize
+        | SparseImageFormatFlagBits::eNonstandardBlockSize;
+  };
+
+  enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
+  {
+    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+  };
+
+  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
+
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SparseMemoryBindFlags allFlags = 
+          SparseMemoryBindFlagBits::eMetadata;
+  };
+
+  enum class FenceCreateFlagBits : VkFenceCreateFlags
+  {
+    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+  };
+
+  using FenceCreateFlags = Flags<FenceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FenceCreateFlags allFlags = 
+          FenceCreateFlagBits::eSignaled;
+  };
+
+  enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags
+  {};
+
+  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
+
+
+  template <> struct FlagTraits<SemaphoreCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreCreateFlags allFlags =  {};
+  };
+
+  enum class EventCreateFlagBits : VkEventCreateFlags
+  {
+    eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT,
+    eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR
+  };
+
+  using EventCreateFlags = Flags<EventCreateFlagBits>;
+
+
+  template <> struct FlagTraits<EventCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR EventCreateFlags allFlags = 
+          EventCreateFlagBits::eDeviceOnly;
+  };
+
+  enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
+  {
+    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT,
+    eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT,
+    eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT,
+    eClusterCullingShaderInvocationsHUAWEI = VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI
+  };
+
+  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
+
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPipelineStatisticFlags allFlags = 
+          QueryPipelineStatisticFlagBits::eInputAssemblyVertices
+        | QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives
+        | QueryPipelineStatisticFlagBits::eVertexShaderInvocations
+        | QueryPipelineStatisticFlagBits::eGeometryShaderInvocations
+        | QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives
+        | QueryPipelineStatisticFlagBits::eClippingInvocations
+        | QueryPipelineStatisticFlagBits::eClippingPrimitives
+        | QueryPipelineStatisticFlagBits::eFragmentShaderInvocations
+        | QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches
+        | QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations
+        | QueryPipelineStatisticFlagBits::eComputeShaderInvocations
+        | QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT
+        | QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT
+        | QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI;
+  };
+
+  enum class QueryResultFlagBits : VkQueryResultFlags
+  {
+    e64 = VK_QUERY_RESULT_64_BIT,
+    eWait = VK_QUERY_RESULT_WAIT_BIT,
+    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+    ePartial = VK_QUERY_RESULT_PARTIAL_BIT,
+    eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR
+  };
+
+  using QueryResultFlags = Flags<QueryResultFlagBits>;
+
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryResultFlags allFlags = 
+          QueryResultFlagBits::e64
+        | QueryResultFlagBits::eWait
+        | QueryResultFlagBits::eWithAvailability
+        | QueryResultFlagBits::ePartial
+        | QueryResultFlagBits::eWithStatusKHR;
+  };
+
+  enum class QueryType
+  {
+    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
+    eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR,
+    eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
+    ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
+    eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
+    eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
+    eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
+    ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT,
+    ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT,
+    eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR,
+    eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR,
+    eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT,
+    eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT
+  };
+
+  enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags
+  {};
+
+  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<QueryPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags =  {};
+  };
+
+  enum class BufferCreateFlagBits : VkBufferCreateFlags
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+    eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
+    eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    eDescriptorBufferCaptureReplayEXT = VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
+    eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
+  };
+
+  using BufferCreateFlags = Flags<BufferCreateFlagBits>;
+
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferCreateFlags allFlags = 
+          BufferCreateFlagBits::eSparseBinding
+        | BufferCreateFlagBits::eSparseResidency
+        | BufferCreateFlagBits::eSparseAliased
+        | BufferCreateFlagBits::eProtected
+        | BufferCreateFlagBits::eDeviceAddressCaptureReplay
+        | BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT;
+  };
+
+  enum class BufferUsageFlagBits : VkBufferUsageFlags
+  {
+    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+    eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
+    eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
+    eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+    eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+    eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
+    eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
+    eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
+    eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT,
+    eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT,
+    ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT,
+    eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT,
+    eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT,
+    eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+    eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+    eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
+  };
+
+  using BufferUsageFlags = Flags<BufferUsageFlagBits>;
+
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags allFlags = 
+          BufferUsageFlagBits::eTransferSrc
+        | BufferUsageFlagBits::eTransferDst
+        | BufferUsageFlagBits::eUniformTexelBuffer
+        | BufferUsageFlagBits::eStorageTexelBuffer
+        | BufferUsageFlagBits::eUniformBuffer
+        | BufferUsageFlagBits::eStorageBuffer
+        | BufferUsageFlagBits::eIndexBuffer
+        | BufferUsageFlagBits::eVertexBuffer
+        | BufferUsageFlagBits::eIndirectBuffer
+        | BufferUsageFlagBits::eShaderDeviceAddress
+        | BufferUsageFlagBits::eVideoDecodeSrcKHR
+        | BufferUsageFlagBits::eVideoDecodeDstKHR
+        | BufferUsageFlagBits::eTransformFeedbackBufferEXT
+        | BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT
+        | BufferUsageFlagBits::eConditionalRenderingEXT
+        | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR
+        | BufferUsageFlagBits::eAccelerationStructureStorageKHR
+        | BufferUsageFlagBits::eShaderBindingTableKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | BufferUsageFlagBits::eVideoEncodeDstKHR
+        | BufferUsageFlagBits::eVideoEncodeSrcKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | BufferUsageFlagBits::eSamplerDescriptorBufferEXT
+        | BufferUsageFlagBits::eResourceDescriptorBufferEXT
+        | BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT
+        | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT
+        | BufferUsageFlagBits::eMicromapStorageEXT;
+  };
+
+  enum class SharingMode
+  {
+    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+    eConcurrent = VK_SHARING_MODE_CONCURRENT
+  };
+
+  enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags
+  {};
+
+  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
+
+
+  template <> struct FlagTraits<BufferViewCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferViewCreateFlags allFlags =  {};
+  };
+
+  enum class ImageLayout
+  {
+    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+    eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+    eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+    eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+    eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL,
+    eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
+    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+    eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR,
+    eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR,
+    eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR,
+    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
+    eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+    eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR,
+    eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR,
+    eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT,
+    eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR,
+    eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
+    eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
+  };
+
+  enum class ComponentSwizzle
+  {
+    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+    eZero = VK_COMPONENT_SWIZZLE_ZERO,
+    eOne = VK_COMPONENT_SWIZZLE_ONE,
+    eR = VK_COMPONENT_SWIZZLE_R,
+    eG = VK_COMPONENT_SWIZZLE_G,
+    eB = VK_COMPONENT_SWIZZLE_B,
+    eA = VK_COMPONENT_SWIZZLE_A
+  };
+
+  enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
+  {
+    eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
+  };
+
+  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ImageViewCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageViewCreateFlags allFlags = 
+          ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT
+        | ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT;
+  };
+
+  enum class ImageViewType
+  {
+    e1D = VK_IMAGE_VIEW_TYPE_1D,
+    e2D = VK_IMAGE_VIEW_TYPE_2D,
+    e3D = VK_IMAGE_VIEW_TYPE_3D,
+    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+  };
+
+  enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
+  {};
+
+  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ShaderModuleCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderModuleCreateFlags allFlags =  {};
+  };
+
+  enum class BlendFactor
+  {
+    eZero = VK_BLEND_FACTOR_ZERO,
+    eOne = VK_BLEND_FACTOR_ONE,
+    eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
+    eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+    eDstColor = VK_BLEND_FACTOR_DST_COLOR,
+    eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+    eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
+    eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+    eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
+    eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+    eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
+    eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+    eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
+    eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+    eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
+    eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
+    eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
+    eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
+    eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
+  };
+
+  enum class BlendOp
+  {
+    eAdd = VK_BLEND_OP_ADD,
+    eSubtract = VK_BLEND_OP_SUBTRACT,
+    eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
+    eMin = VK_BLEND_OP_MIN,
+    eMax = VK_BLEND_OP_MAX,
+    eZeroEXT = VK_BLEND_OP_ZERO_EXT,
+    eSrcEXT = VK_BLEND_OP_SRC_EXT,
+    eDstEXT = VK_BLEND_OP_DST_EXT,
+    eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
+    eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
+    eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
+    eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
+    eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
+    eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
+    eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
+    eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
+    eXorEXT = VK_BLEND_OP_XOR_EXT,
+    eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
+    eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
+    eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
+    eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
+    eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
+    eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
+    eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
+    eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
+    eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
+    eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
+    eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
+    eInvertEXT = VK_BLEND_OP_INVERT_EXT,
+    eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
+    eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
+    eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
+    eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
+    eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
+    ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
+    eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
+    eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
+    eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
+    eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
+    eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
+    ePlusEXT = VK_BLEND_OP_PLUS_EXT,
+    ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
+    ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
+    ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
+    eMinusEXT = VK_BLEND_OP_MINUS_EXT,
+    eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
+    eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
+    eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
+    eRedEXT = VK_BLEND_OP_RED_EXT,
+    eGreenEXT = VK_BLEND_OP_GREEN_EXT,
+    eBlueEXT = VK_BLEND_OP_BLUE_EXT
+  };
+
+  enum class ColorComponentFlagBits : VkColorComponentFlags
+  {
+    eR = VK_COLOR_COMPONENT_R_BIT,
+    eG = VK_COLOR_COMPONENT_G_BIT,
+    eB = VK_COLOR_COMPONENT_B_BIT,
+    eA = VK_COLOR_COMPONENT_A_BIT
+  };
+
+  using ColorComponentFlags = Flags<ColorComponentFlagBits>;
+
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ColorComponentFlags allFlags = 
+          ColorComponentFlagBits::eR
+        | ColorComponentFlagBits::eG
+        | ColorComponentFlagBits::eB
+        | ColorComponentFlagBits::eA;
+  };
+
+  enum class CompareOp
+  {
+    eNever = VK_COMPARE_OP_NEVER,
+    eLess = VK_COMPARE_OP_LESS,
+    eEqual = VK_COMPARE_OP_EQUAL,
+    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+    eGreater = VK_COMPARE_OP_GREATER,
+    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+    eAlways = VK_COMPARE_OP_ALWAYS
+  };
+
+  enum class CullModeFlagBits : VkCullModeFlags
+  {
+    eNone = VK_CULL_MODE_NONE,
+    eFront = VK_CULL_MODE_FRONT_BIT,
+    eBack = VK_CULL_MODE_BACK_BIT,
+    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+  };
+
+  using CullModeFlags = Flags<CullModeFlagBits>;
+
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CullModeFlags allFlags = 
+          CullModeFlagBits::eNone
+        | CullModeFlagBits::eFront
+        | CullModeFlagBits::eBack
+        | CullModeFlagBits::eFrontAndBack;
+  };
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
+    eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
+    ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+    eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+    eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+    eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+    eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+    eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+    eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+    eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+    eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+    eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
+    eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+    eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+    ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+    eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+    eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
+    eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+    eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+    eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+    eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
+    eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+    eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
+    ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
+    eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
+    eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
+    eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT,
+    eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT,
+    ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT,
+    eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT,
+    eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT,
+    eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT,
+    eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT,
+    eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT,
+    eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT,
+    eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT,
+    eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT,
+    eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT,
+    eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT,
+    eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT,
+    eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT,
+    eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT,
+    eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT,
+    eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT,
+    eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT,
+    eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT,
+    eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT,
+    eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV,
+    eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV,
+    eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV,
+    eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV,
+    eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV,
+    eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV,
+    eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV,
+    eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV,
+    eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV,
+    eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV,
+    eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+    eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
+    eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+    eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+    eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+    eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+    eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+    ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
+    ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+    eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
+    eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+    eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
+    eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+    eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+    eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
+  };
+
+  enum class FrontFace
+  {
+    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    eClockwise = VK_FRONT_FACE_CLOCKWISE
+  };
+
+  enum class LogicOp
+  {
+    eClear = VK_LOGIC_OP_CLEAR,
+    eAnd = VK_LOGIC_OP_AND,
+    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+    eCopy = VK_LOGIC_OP_COPY,
+    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+    eNoOp = VK_LOGIC_OP_NO_OP,
+    eXor = VK_LOGIC_OP_XOR,
+    eOr = VK_LOGIC_OP_OR,
+    eNor = VK_LOGIC_OP_NOR,
+    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+    eInvert = VK_LOGIC_OP_INVERT,
+    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+    eNand = VK_LOGIC_OP_NAND,
+    eSet = VK_LOGIC_OP_SET
+  };
+
+  enum class PipelineCreateFlagBits : VkPipelineCreateFlags
+  {
+    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+    eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
+    eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
+    eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+    eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+    eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+    eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
+    eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
+    eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+    eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+    eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+    eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
+    eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
+    eDescriptorBufferEXT = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,
+    eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT,
+    eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT,
+    eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV,
+    eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT,
+    eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT,
+    eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT,
+    eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
+    eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
+    eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
+    eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+    eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR
+  };
+
+  using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = 
+          PipelineCreateFlagBits::eDisableOptimization
+        | PipelineCreateFlagBits::eAllowDerivatives
+        | PipelineCreateFlagBits::eDerivative
+        | PipelineCreateFlagBits::eViewIndexFromDeviceIndex
+        | PipelineCreateFlagBits::eDispatchBase
+        | PipelineCreateFlagBits::eFailOnPipelineCompileRequired
+        | PipelineCreateFlagBits::eEarlyReturnOnFailure
+        | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR
+        | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT
+        | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR
+        | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR
+        | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR
+        | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR
+        | PipelineCreateFlagBits::eDeferCompileNV
+        | PipelineCreateFlagBits::eCaptureStatisticsKHR
+        | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR
+        | PipelineCreateFlagBits::eIndirectBindableNV
+        | PipelineCreateFlagBits::eLibraryKHR
+        | PipelineCreateFlagBits::eDescriptorBufferEXT
+        | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT
+        | PipelineCreateFlagBits::eLinkTimeOptimizationEXT
+        | PipelineCreateFlagBits::eRayTracingAllowMotionNV
+        | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT
+        | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT
+        | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT
+        | PipelineCreateFlagBits::eNoProtectedAccessEXT
+        | PipelineCreateFlagBits::eProtectedAccessOnlyEXT;
+  };
+
+  enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
+  {
+    eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT,
+    eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT,
+    eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
+    eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
+  };
+
+  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineShaderStageCreateFlags allFlags = 
+          PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize
+        | PipelineShaderStageCreateFlagBits::eRequireFullSubgroups;
+  };
+
+  enum class PolygonMode
+  {
+    eFill = VK_POLYGON_MODE_FILL,
+    eLine = VK_POLYGON_MODE_LINE,
+    ePoint = VK_POLYGON_MODE_POINT,
+    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+  };
+
+  enum class PrimitiveTopology
+  {
+    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+  };
+
+  enum class ShaderStageFlagBits : VkShaderStageFlags
+  {
+    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+    eAll = VK_SHADER_STAGE_ALL,
+    eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+    eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+    eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+    eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
+    eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+    eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+    eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT,
+    eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT,
+    eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI,
+    eClusterCullingHUAWEI = VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI,
+    eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+    eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
+    eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
+    eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+    eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
+    eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+    eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+    eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV
+  };
+
+  using ShaderStageFlags = Flags<ShaderStageFlagBits>;
+
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderStageFlags allFlags = 
+          ShaderStageFlagBits::eVertex
+        | ShaderStageFlagBits::eTessellationControl
+        | ShaderStageFlagBits::eTessellationEvaluation
+        | ShaderStageFlagBits::eGeometry
+        | ShaderStageFlagBits::eFragment
+        | ShaderStageFlagBits::eCompute
+        | ShaderStageFlagBits::eAllGraphics
+        | ShaderStageFlagBits::eAll
+        | ShaderStageFlagBits::eRaygenKHR
+        | ShaderStageFlagBits::eAnyHitKHR
+        | ShaderStageFlagBits::eClosestHitKHR
+        | ShaderStageFlagBits::eMissKHR
+        | ShaderStageFlagBits::eIntersectionKHR
+        | ShaderStageFlagBits::eCallableKHR
+        | ShaderStageFlagBits::eTaskEXT
+        | ShaderStageFlagBits::eMeshEXT
+        | ShaderStageFlagBits::eSubpassShadingHUAWEI
+        | ShaderStageFlagBits::eClusterCullingHUAWEI;
+  };
+
+  enum class StencilOp
+  {
+    eKeep = VK_STENCIL_OP_KEEP,
+    eZero = VK_STENCIL_OP_ZERO,
+    eReplace = VK_STENCIL_OP_REPLACE,
+    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+    eInvert = VK_STENCIL_OP_INVERT,
+    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+  };
+
+  enum class VertexInputRate
+  {
+    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+  };
+
+  enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags
+  {};
+
+  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineDynamicStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDynamicStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags
+  {};
+
+  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineInputAssemblyStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineInputAssemblyStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags
+  {};
+
+  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineMultisampleStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineMultisampleStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags
+  {};
+
+  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags
+  {};
+
+  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineTessellationStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineTessellationStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags
+  {};
+
+  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineVertexInputStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineVertexInputStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags
+  {};
+
+  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineViewportStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportStateCreateFlags allFlags =  {};
+  };
+
+  enum class BorderColor
+  {
+    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
+    eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT
+  };
+
+  enum class Filter
+  {
+    eNearest = VK_FILTER_NEAREST,
+    eLinear = VK_FILTER_LINEAR,
+    eCubicEXT = VK_FILTER_CUBIC_EXT,
+    eCubicIMG = VK_FILTER_CUBIC_IMG
+  };
+
+  enum class SamplerAddressMode
+  {
+    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
+  };
+
+  enum class SamplerCreateFlagBits : VkSamplerCreateFlags
+  {
+    eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
+    eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT,
+    eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM
+  };
+
+  using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
+
+
+  template <> struct FlagTraits<SamplerCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SamplerCreateFlags allFlags = 
+          SamplerCreateFlagBits::eSubsampledEXT
+        | SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT
+        | SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | SamplerCreateFlagBits::eNonSeamlessCubeMapEXT
+        | SamplerCreateFlagBits::eImageProcessingQCOM;
+  };
+
+  enum class SamplerMipmapMode
+  {
+    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+  };
+
+  enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
+  {
+    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+    eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+    eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
+    eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE,
+    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+  };
+
+  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolCreateFlags allFlags = 
+          DescriptorPoolCreateFlagBits::eFreeDescriptorSet
+        | DescriptorPoolCreateFlagBits::eUpdateAfterBind
+        | DescriptorPoolCreateFlagBits::eHostOnlyEXT;
+  };
+
+  enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
+  {
+    eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+    eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,
+    eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT,
+    eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
+    eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
+    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+  };
+
+  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = 
+          DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool
+        | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR
+        | DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT
+        | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT
+        | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT;
+  };
+
+  enum class DescriptorType
+  {
+    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
+    eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
+    eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM,
+    eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM,
+    eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
+    eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+    eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE
+  };
+
+  enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags
+  {};
+
+  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorPoolResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolResetFlags allFlags =  {};
+  };
+
+  enum class AccessFlagBits : VkAccessFlags
+  {
+    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eNone = VK_ACCESS_NONE,
+    eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
+    eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eNoneKHR = VK_ACCESS_NONE_KHR,
+    eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV
+  };
+
+  using AccessFlags = Flags<AccessFlagBits>;
+
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags allFlags = 
+          AccessFlagBits::eIndirectCommandRead
+        | AccessFlagBits::eIndexRead
+        | AccessFlagBits::eVertexAttributeRead
+        | AccessFlagBits::eUniformRead
+        | AccessFlagBits::eInputAttachmentRead
+        | AccessFlagBits::eShaderRead
+        | AccessFlagBits::eShaderWrite
+        | AccessFlagBits::eColorAttachmentRead
+        | AccessFlagBits::eColorAttachmentWrite
+        | AccessFlagBits::eDepthStencilAttachmentRead
+        | AccessFlagBits::eDepthStencilAttachmentWrite
+        | AccessFlagBits::eTransferRead
+        | AccessFlagBits::eTransferWrite
+        | AccessFlagBits::eHostRead
+        | AccessFlagBits::eHostWrite
+        | AccessFlagBits::eMemoryRead
+        | AccessFlagBits::eMemoryWrite
+        | AccessFlagBits::eNone
+        | AccessFlagBits::eTransformFeedbackWriteEXT
+        | AccessFlagBits::eTransformFeedbackCounterReadEXT
+        | AccessFlagBits::eTransformFeedbackCounterWriteEXT
+        | AccessFlagBits::eConditionalRenderingReadEXT
+        | AccessFlagBits::eColorAttachmentReadNoncoherentEXT
+        | AccessFlagBits::eAccelerationStructureReadKHR
+        | AccessFlagBits::eAccelerationStructureWriteKHR
+        | AccessFlagBits::eFragmentDensityMapReadEXT
+        | AccessFlagBits::eFragmentShadingRateAttachmentReadKHR
+        | AccessFlagBits::eCommandPreprocessReadNV
+        | AccessFlagBits::eCommandPreprocessWriteNV;
+  };
+
+  enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
+  {
+    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+  };
+
+  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
+
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AttachmentDescriptionFlags allFlags = 
+          AttachmentDescriptionFlagBits::eMayAlias;
+  };
+
+  enum class AttachmentLoadOp
+  {
+    eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
+    eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
+    eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+    eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT
+  };
+
+  enum class AttachmentStoreOp
+  {
+    eStore = VK_ATTACHMENT_STORE_OP_STORE,
+    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+    eNone = VK_ATTACHMENT_STORE_OP_NONE,
+    eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT,
+    eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR,
+    eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
+  };
+
+  enum class DependencyFlagBits : VkDependencyFlags
+  {
+    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+    eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT,
+    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
+    eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
+  };
+
+  using DependencyFlags = Flags<DependencyFlagBits>;
+
+
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DependencyFlags allFlags = 
+          DependencyFlagBits::eByRegion
+        | DependencyFlagBits::eDeviceGroup
+        | DependencyFlagBits::eViewLocal
+        | DependencyFlagBits::eFeedbackLoopEXT;
+  };
+
+  enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
+  {
+    eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+    eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
+  };
+
+  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
+
+
+  template <> struct FlagTraits<FramebufferCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FramebufferCreateFlags allFlags = 
+          FramebufferCreateFlagBits::eImageless;
+  };
+
+  enum class PipelineBindPoint
+  {
+    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+    eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
+    eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI,
+    eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
+  };
+
+  enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
+  {
+    eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
+  };
+
+  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
+
+
+  template <> struct FlagTraits<RenderPassCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR RenderPassCreateFlags allFlags = 
+          RenderPassCreateFlagBits::eTransformQCOM;
+  };
+
+  enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
+  {
+    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
+    eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
+    eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM,
+    eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT,
+    eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+  };
+
+  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
+
+
+  template <> struct FlagTraits<SubpassDescriptionFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubpassDescriptionFlags allFlags = 
+          SubpassDescriptionFlagBits::ePerViewAttributesNVX
+        | SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX
+        | SubpassDescriptionFlagBits::eFragmentRegionQCOM
+        | SubpassDescriptionFlagBits::eShaderResolveQCOM
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT
+        | SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT;
+  };
+
+  enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
+  {
+    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+    eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+  };
+
+  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolCreateFlags allFlags = 
+          CommandPoolCreateFlagBits::eTransient
+        | CommandPoolCreateFlagBits::eResetCommandBuffer
+        | CommandPoolCreateFlagBits::eProtected;
+  };
+
+  enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
+  {
+    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
+
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolResetFlags allFlags = 
+          CommandPoolResetFlagBits::eReleaseResources;
+  };
+
+  enum class CommandBufferLevel
+  {
+    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+  };
+
+  enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
+  {
+    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
+
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferResetFlags allFlags = 
+          CommandBufferResetFlagBits::eReleaseResources;
+  };
+
+  enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
+  {
+    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+  };
+
+  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
+
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferUsageFlags allFlags = 
+          CommandBufferUsageFlagBits::eOneTimeSubmit
+        | CommandBufferUsageFlagBits::eRenderPassContinue
+        | CommandBufferUsageFlagBits::eSimultaneousUse;
+  };
+
+  enum class QueryControlFlagBits : VkQueryControlFlags
+  {
+    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+  };
+
+  using QueryControlFlags = Flags<QueryControlFlagBits>;
+
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryControlFlags allFlags = 
+          QueryControlFlagBits::ePrecise;
+  };
+
+  enum class IndexType
+  {
+    eUint16 = VK_INDEX_TYPE_UINT16,
+    eUint32 = VK_INDEX_TYPE_UINT32,
+    eNoneKHR = VK_INDEX_TYPE_NONE_KHR,
+    eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
+    eNoneNV = VK_INDEX_TYPE_NONE_NV
+  };
+
+  enum class StencilFaceFlagBits : VkStencilFaceFlags
+  {
+    eFront = VK_STENCIL_FACE_FRONT_BIT,
+    eBack = VK_STENCIL_FACE_BACK_BIT,
+    eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
+    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+  };
+
+  using StencilFaceFlags = Flags<StencilFaceFlagBits>;
+
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StencilFaceFlags allFlags = 
+          StencilFaceFlagBits::eFront
+        | StencilFaceFlagBits::eBack
+        | StencilFaceFlagBits::eFrontAndBack;
+  };
+
+  enum class SubpassContents
+  {
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
+  };
+
+  //=== VK_VERSION_1_1 ===
+
+  enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
+  {
+    eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+    eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+    eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+    eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+    eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+    eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+    eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+    ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
+  };
+
+  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
+
+
+  template <> struct FlagTraits<SubgroupFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = 
+          SubgroupFeatureFlagBits::eBasic
+        | SubgroupFeatureFlagBits::eVote
+        | SubgroupFeatureFlagBits::eArithmetic
+        | SubgroupFeatureFlagBits::eBallot
+        | SubgroupFeatureFlagBits::eShuffle
+        | SubgroupFeatureFlagBits::eShuffleRelative
+        | SubgroupFeatureFlagBits::eClustered
+        | SubgroupFeatureFlagBits::eQuad
+        | SubgroupFeatureFlagBits::ePartitionedNV;
+  };
+
+  enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
+  {
+    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
+  };
+  using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
+
+  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
+  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+
+
+  template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PeerMemoryFeatureFlags allFlags = 
+          PeerMemoryFeatureFlagBits::eCopySrc
+        | PeerMemoryFeatureFlagBits::eCopyDst
+        | PeerMemoryFeatureFlagBits::eGenericSrc
+        | PeerMemoryFeatureFlagBits::eGenericDst;
+  };
+
+  enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
+  {
+    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+    eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT
+  };
+  using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
+
+  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
+  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+
+
+  template <> struct FlagTraits<MemoryAllocateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryAllocateFlags allFlags = 
+          MemoryAllocateFlagBits::eDeviceMask
+        | MemoryAllocateFlagBits::eDeviceAddress
+        | MemoryAllocateFlagBits::eDeviceAddressCaptureReplay;
+  };
+
+  enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags
+  {};
+
+  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
+  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+
+
+  template <> struct FlagTraits<CommandPoolTrimFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolTrimFlags allFlags =  {};
+  };
+
+  enum class PointClippingBehavior
+  {
+    eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
+  };
+  using PointClippingBehaviorKHR = PointClippingBehavior;
+
+  enum class TessellationDomainOrigin
+  {
+    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
+  };
+  using TessellationDomainOriginKHR = TessellationDomainOrigin;
+
+  enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
+  {
+    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+  };
+
+  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DeviceQueueCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = 
+          DeviceQueueCreateFlagBits::eProtected;
+  };
+
+  enum class SamplerYcbcrModelConversion
+  {
+    eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
+  };
+  using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
+
+  enum class SamplerYcbcrRange
+  {
+    eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
+  };
+  using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
+
+  enum class ChromaLocation
+  {
+    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
+    eMidpoint = VK_CHROMA_LOCATION_MIDPOINT
+  };
+  using ChromaLocationKHR = ChromaLocation;
+
+  enum class DescriptorUpdateTemplateType
+  {
+    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
+  };
+  using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
+
+  enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags
+  {};
+
+  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
+  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+
+
+  template <> struct FlagTraits<DescriptorUpdateTemplateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorUpdateTemplateCreateFlags allFlags =  {};
+  };
+
+  enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV
+  };
+  using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
+
+  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
+  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlags allFlags = 
+          ExternalMemoryHandleTypeFlagBits::eOpaqueFd
+        | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32
+        | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalMemoryHandleTypeFlagBits::eD3D11Texture
+        | ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt
+        | ExternalMemoryHandleTypeFlagBits::eD3D12Heap
+        | ExternalMemoryHandleTypeFlagBits::eD3D12Resource
+        | ExternalMemoryHandleTypeFlagBits::eDmaBufEXT
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+        | ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+        | ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT
+        | ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+        | ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+        | ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV;
+  };
+
+  enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
+
+  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
+  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlags allFlags = 
+          ExternalMemoryFeatureFlagBits::eDedicatedOnly
+        | ExternalMemoryFeatureFlagBits::eExportable
+        | ExternalMemoryFeatureFlagBits::eImportable;
+  };
+
+  enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
+  };
+  using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
+
+  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
+  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceHandleTypeFlags allFlags = 
+          ExternalFenceHandleTypeFlagBits::eOpaqueFd
+        | ExternalFenceHandleTypeFlagBits::eOpaqueWin32
+        | ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalFenceHandleTypeFlagBits::eSyncFd;
+  };
+
+  enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
+
+  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
+  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceFeatureFlags allFlags = 
+          ExternalFenceFeatureFlagBits::eExportable
+        | ExternalFenceFeatureFlagBits::eImportable;
+  };
+
+  enum class FenceImportFlagBits : VkFenceImportFlags
+  {
+    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
+  };
+  using FenceImportFlagBitsKHR = FenceImportFlagBits;
+
+  using FenceImportFlags = Flags<FenceImportFlagBits>;
+  using FenceImportFlagsKHR = FenceImportFlags;
+
+
+  template <> struct FlagTraits<FenceImportFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FenceImportFlags allFlags = 
+          FenceImportFlagBits::eTemporary;
+  };
+
+  enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
+  {
+    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
+  };
+  using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
+
+  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
+  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+
+
+  template <> struct FlagTraits<SemaphoreImportFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreImportFlags allFlags = 
+          SemaphoreImportFlagBits::eTemporary;
+  };
+
+  enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT
+  };
+  using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
+
+  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
+  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreHandleTypeFlags allFlags = 
+          ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd
+        | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32
+        | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence
+        | ExternalSemaphoreHandleTypeFlagBits::eSyncFd
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+        | ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+;
+  };
+
+  enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
+
+  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
+  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreFeatureFlags allFlags = 
+          ExternalSemaphoreFeatureFlagBits::eExportable
+        | ExternalSemaphoreFeatureFlagBits::eImportable;
+  };
+
+  //=== VK_VERSION_1_2 ===
+
+  enum class DriverId
+  {
+    eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY,
+    eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+    eMesaRadv = VK_DRIVER_ID_MESA_RADV,
+    eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+    eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+    eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+    eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+    eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY,
+    eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+    eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
+    eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+    eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
+    eMoltenvk = VK_DRIVER_ID_MOLTENVK,
+    eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY,
+    eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY,
+    eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY,
+    eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP,
+    eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV,
+    eMesaPanvk = VK_DRIVER_ID_MESA_PANVK,
+    eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY,
+    eMesaVenus = VK_DRIVER_ID_MESA_VENUS,
+    eMesaDozen = VK_DRIVER_ID_MESA_DOZEN,
+    eMesaNvk = VK_DRIVER_ID_MESA_NVK,
+    eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA
+  };
+  using DriverIdKHR = DriverId;
+
+  enum class ShaderFloatControlsIndependence
+  {
+    e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+    eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+    eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE
+  };
+  using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
+
+  enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
+  {
+    eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+    ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+    eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT
+  };
+  using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
+
+  using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
+  using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
+
+
+  template <> struct FlagTraits<DescriptorBindingFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorBindingFlags allFlags = 
+          DescriptorBindingFlagBits::eUpdateAfterBind
+        | DescriptorBindingFlagBits::eUpdateUnusedWhilePending
+        | DescriptorBindingFlagBits::ePartiallyBound
+        | DescriptorBindingFlagBits::eVariableDescriptorCount;
+  };
+
+  enum class ResolveModeFlagBits : VkResolveModeFlags
+  {
+    eNone = VK_RESOLVE_MODE_NONE,
+    eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+    eAverage = VK_RESOLVE_MODE_AVERAGE_BIT,
+    eMin = VK_RESOLVE_MODE_MIN_BIT,
+    eMax = VK_RESOLVE_MODE_MAX_BIT
+  };
+  using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
+
+  using ResolveModeFlags = Flags<ResolveModeFlagBits>;
+  using ResolveModeFlagsKHR = ResolveModeFlags;
+
+
+  template <> struct FlagTraits<ResolveModeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ResolveModeFlags allFlags = 
+          ResolveModeFlagBits::eNone
+        | ResolveModeFlagBits::eSampleZero
+        | ResolveModeFlagBits::eAverage
+        | ResolveModeFlagBits::eMin
+        | ResolveModeFlagBits::eMax;
+  };
+
+  enum class SamplerReductionMode
+  {
+    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+    eMin = VK_SAMPLER_REDUCTION_MODE_MIN,
+    eMax = VK_SAMPLER_REDUCTION_MODE_MAX
+  };
+  using SamplerReductionModeEXT = SamplerReductionMode;
+
+  enum class SemaphoreType
+  {
+    eBinary = VK_SEMAPHORE_TYPE_BINARY,
+    eTimeline = VK_SEMAPHORE_TYPE_TIMELINE
+  };
+  using SemaphoreTypeKHR = SemaphoreType;
+
+  enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
+  {
+    eAny = VK_SEMAPHORE_WAIT_ANY_BIT
+  };
+  using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
+
+  using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
+  using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
+
+
+  template <> struct FlagTraits<SemaphoreWaitFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreWaitFlags allFlags = 
+          SemaphoreWaitFlagBits::eAny;
+  };
+
+  //=== VK_VERSION_1_3 ===
+
+  enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags
+  {
+    eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
+    eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT,
+    eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT
+  };
+  using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits;
+
+  using PipelineCreationFeedbackFlags = Flags<PipelineCreationFeedbackFlagBits>;
+  using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags;
+
+
+  template <> struct FlagTraits<PipelineCreationFeedbackFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreationFeedbackFlags allFlags = 
+          PipelineCreationFeedbackFlagBits::eValid
+        | PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit
+        | PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration;
+  };
+
+  enum class ToolPurposeFlagBits : VkToolPurposeFlags
+  {
+    eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT,
+    eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT,
+    eTracing = VK_TOOL_PURPOSE_TRACING_BIT,
+    eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT,
+    eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT,
+    eDebugReportingEXT = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
+    eDebugMarkersEXT = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
+  };
+  using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits;
+
+  using ToolPurposeFlags = Flags<ToolPurposeFlagBits>;
+  using ToolPurposeFlagsEXT = ToolPurposeFlags;
+
+
+  template <> struct FlagTraits<ToolPurposeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ToolPurposeFlags allFlags = 
+          ToolPurposeFlagBits::eValidation
+        | ToolPurposeFlagBits::eProfiling
+        | ToolPurposeFlagBits::eTracing
+        | ToolPurposeFlagBits::eAdditionalFeatures
+        | ToolPurposeFlagBits::eModifyingFeatures
+        | ToolPurposeFlagBits::eDebugReportingEXT
+        | ToolPurposeFlagBits::eDebugMarkersEXT;
+  };
+
+  enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags
+  {};
+  using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits;
+
+  using PrivateDataSlotCreateFlags = Flags<PrivateDataSlotCreateFlagBits>;
+  using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags;
+
+
+  template <> struct FlagTraits<PrivateDataSlotCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PrivateDataSlotCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineStageFlagBits2 : VkPipelineStageFlags2
+  {
+    eNone = VK_PIPELINE_STAGE_2_NONE,
+    eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
+    eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_2_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
+    eCopy = VK_PIPELINE_STAGE_2_COPY_BIT,
+    eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT,
+    eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT,
+    eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT,
+    eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT,
+    eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT,
+    ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT,
+    eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
+    eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
+    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT,
+    eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT,
+    eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI,
+    eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,
+    eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR,
+    eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT,
+    eClusterCullingShaderHUAWEI = VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI,
+    eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
+    eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT
+  };
+  using PipelineStageFlagBits2KHR = PipelineStageFlagBits2;
+
+  using PipelineStageFlags2 = Flags<PipelineStageFlagBits2>;
+  using PipelineStageFlags2KHR = PipelineStageFlags2;
+
+
+  template <> struct FlagTraits<PipelineStageFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags2 allFlags = 
+          PipelineStageFlagBits2::eNone
+        | PipelineStageFlagBits2::eTopOfPipe
+        | PipelineStageFlagBits2::eDrawIndirect
+        | PipelineStageFlagBits2::eVertexInput
+        | PipelineStageFlagBits2::eVertexShader
+        | PipelineStageFlagBits2::eTessellationControlShader
+        | PipelineStageFlagBits2::eTessellationEvaluationShader
+        | PipelineStageFlagBits2::eGeometryShader
+        | PipelineStageFlagBits2::eFragmentShader
+        | PipelineStageFlagBits2::eEarlyFragmentTests
+        | PipelineStageFlagBits2::eLateFragmentTests
+        | PipelineStageFlagBits2::eColorAttachmentOutput
+        | PipelineStageFlagBits2::eComputeShader
+        | PipelineStageFlagBits2::eAllTransfer
+        | PipelineStageFlagBits2::eBottomOfPipe
+        | PipelineStageFlagBits2::eHost
+        | PipelineStageFlagBits2::eAllGraphics
+        | PipelineStageFlagBits2::eAllCommands
+        | PipelineStageFlagBits2::eCopy
+        | PipelineStageFlagBits2::eResolve
+        | PipelineStageFlagBits2::eBlit
+        | PipelineStageFlagBits2::eClear
+        | PipelineStageFlagBits2::eIndexInput
+        | PipelineStageFlagBits2::eVertexAttributeInput
+        | PipelineStageFlagBits2::ePreRasterizationShaders
+        | PipelineStageFlagBits2::eVideoDecodeKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | PipelineStageFlagBits2::eVideoEncodeKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | PipelineStageFlagBits2::eTransformFeedbackEXT
+        | PipelineStageFlagBits2::eConditionalRenderingEXT
+        | PipelineStageFlagBits2::eCommandPreprocessNV
+        | PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR
+        | PipelineStageFlagBits2::eAccelerationStructureBuildKHR
+        | PipelineStageFlagBits2::eRayTracingShaderKHR
+        | PipelineStageFlagBits2::eFragmentDensityProcessEXT
+        | PipelineStageFlagBits2::eTaskShaderEXT
+        | PipelineStageFlagBits2::eMeshShaderEXT
+        | PipelineStageFlagBits2::eSubpassShadingHUAWEI
+        | PipelineStageFlagBits2::eInvocationMaskHUAWEI
+        | PipelineStageFlagBits2::eAccelerationStructureCopyKHR
+        | PipelineStageFlagBits2::eMicromapBuildEXT
+        | PipelineStageFlagBits2::eClusterCullingShaderHUAWEI
+        | PipelineStageFlagBits2::eOpticalFlowNV;
+  };
+
+  enum class AccessFlagBits2 : VkAccessFlags2
+  {
+    eNone = VK_ACCESS_2_NONE,
+    eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_2_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_2_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_2_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT,
+    eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT,
+    eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT,
+    eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT,
+    eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR,
+    eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR,
+    eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
+    eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eDescriptorBufferReadEXT = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT,
+    eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI,
+    eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR,
+    eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT,
+    eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT,
+    eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV,
+    eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
+  };
+  using AccessFlagBits2KHR = AccessFlagBits2;
+
+  using AccessFlags2 = Flags<AccessFlagBits2>;
+  using AccessFlags2KHR = AccessFlags2;
+
+
+  template <> struct FlagTraits<AccessFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags2 allFlags = 
+          AccessFlagBits2::eNone
+        | AccessFlagBits2::eIndirectCommandRead
+        | AccessFlagBits2::eIndexRead
+        | AccessFlagBits2::eVertexAttributeRead
+        | AccessFlagBits2::eUniformRead
+        | AccessFlagBits2::eInputAttachmentRead
+        | AccessFlagBits2::eShaderRead
+        | AccessFlagBits2::eShaderWrite
+        | AccessFlagBits2::eColorAttachmentRead
+        | AccessFlagBits2::eColorAttachmentWrite
+        | AccessFlagBits2::eDepthStencilAttachmentRead
+        | AccessFlagBits2::eDepthStencilAttachmentWrite
+        | AccessFlagBits2::eTransferRead
+        | AccessFlagBits2::eTransferWrite
+        | AccessFlagBits2::eHostRead
+        | AccessFlagBits2::eHostWrite
+        | AccessFlagBits2::eMemoryRead
+        | AccessFlagBits2::eMemoryWrite
+        | AccessFlagBits2::eShaderSampledRead
+        | AccessFlagBits2::eShaderStorageRead
+        | AccessFlagBits2::eShaderStorageWrite
+        | AccessFlagBits2::eVideoDecodeReadKHR
+        | AccessFlagBits2::eVideoDecodeWriteKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | AccessFlagBits2::eVideoEncodeReadKHR
+        | AccessFlagBits2::eVideoEncodeWriteKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | AccessFlagBits2::eTransformFeedbackWriteEXT
+        | AccessFlagBits2::eTransformFeedbackCounterReadEXT
+        | AccessFlagBits2::eTransformFeedbackCounterWriteEXT
+        | AccessFlagBits2::eConditionalRenderingReadEXT
+        | AccessFlagBits2::eCommandPreprocessReadNV
+        | AccessFlagBits2::eCommandPreprocessWriteNV
+        | AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR
+        | AccessFlagBits2::eAccelerationStructureReadKHR
+        | AccessFlagBits2::eAccelerationStructureWriteKHR
+        | AccessFlagBits2::eFragmentDensityMapReadEXT
+        | AccessFlagBits2::eColorAttachmentReadNoncoherentEXT
+        | AccessFlagBits2::eDescriptorBufferReadEXT
+        | AccessFlagBits2::eInvocationMaskReadHUAWEI
+        | AccessFlagBits2::eShaderBindingTableReadKHR
+        | AccessFlagBits2::eMicromapReadEXT
+        | AccessFlagBits2::eMicromapWriteEXT
+        | AccessFlagBits2::eOpticalFlowReadNV
+        | AccessFlagBits2::eOpticalFlowWriteNV;
+  };
+
+  enum class SubmitFlagBits : VkSubmitFlags
+  {
+    eProtected = VK_SUBMIT_PROTECTED_BIT
+  };
+  using SubmitFlagBitsKHR = SubmitFlagBits;
+
+  using SubmitFlags = Flags<SubmitFlagBits>;
+  using SubmitFlagsKHR = SubmitFlags;
+
+
+  template <> struct FlagTraits<SubmitFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubmitFlags allFlags = 
+          SubmitFlagBits::eProtected;
+  };
+
+  enum class RenderingFlagBits : VkRenderingFlags
+  {
+    eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
+    eSuspending = VK_RENDERING_SUSPENDING_BIT,
+    eResuming = VK_RENDERING_RESUMING_BIT,
+    eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT
+  };
+  using RenderingFlagBitsKHR = RenderingFlagBits;
+
+  using RenderingFlags = Flags<RenderingFlagBits>;
+  using RenderingFlagsKHR = RenderingFlags;
+
+
+  template <> struct FlagTraits<RenderingFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = 
+          RenderingFlagBits::eContentsSecondaryCommandBuffers
+        | RenderingFlagBits::eSuspending
+        | RenderingFlagBits::eResuming
+        | RenderingFlagBits::eEnableLegacyDitheringEXT;
+  };
+
+  enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2
+  {
+    eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT,
+    eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT,
+    eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT,
+    eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT,
+    eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT,
+    eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR,
+    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV,
+    eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM,
+    eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM,
+    eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM,
+    eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM,
+    eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV,
+    eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV,
+    eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
+  };
+  using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2;
+
+  using FormatFeatureFlags2 = Flags<FormatFeatureFlagBits2>;
+  using FormatFeatureFlags2KHR = FormatFeatureFlags2;
+
+
+  template <> struct FlagTraits<FormatFeatureFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags2 allFlags = 
+          FormatFeatureFlagBits2::eSampledImage
+        | FormatFeatureFlagBits2::eStorageImage
+        | FormatFeatureFlagBits2::eStorageImageAtomic
+        | FormatFeatureFlagBits2::eUniformTexelBuffer
+        | FormatFeatureFlagBits2::eStorageTexelBuffer
+        | FormatFeatureFlagBits2::eStorageTexelBufferAtomic
+        | FormatFeatureFlagBits2::eVertexBuffer
+        | FormatFeatureFlagBits2::eColorAttachment
+        | FormatFeatureFlagBits2::eColorAttachmentBlend
+        | FormatFeatureFlagBits2::eDepthStencilAttachment
+        | FormatFeatureFlagBits2::eBlitSrc
+        | FormatFeatureFlagBits2::eBlitDst
+        | FormatFeatureFlagBits2::eSampledImageFilterLinear
+        | FormatFeatureFlagBits2::eSampledImageFilterCubic
+        | FormatFeatureFlagBits2::eTransferSrc
+        | FormatFeatureFlagBits2::eTransferDst
+        | FormatFeatureFlagBits2::eSampledImageFilterMinmax
+        | FormatFeatureFlagBits2::eMidpointChromaSamples
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable
+        | FormatFeatureFlagBits2::eDisjoint
+        | FormatFeatureFlagBits2::eCositedChromaSamples
+        | FormatFeatureFlagBits2::eStorageReadWithoutFormat
+        | FormatFeatureFlagBits2::eStorageWriteWithoutFormat
+        | FormatFeatureFlagBits2::eSampledImageDepthComparison
+        | FormatFeatureFlagBits2::eVideoDecodeOutputKHR
+        | FormatFeatureFlagBits2::eVideoDecodeDpbKHR
+        | FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR
+        | FormatFeatureFlagBits2::eFragmentDensityMapEXT
+        | FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | FormatFeatureFlagBits2::eVideoEncodeInputKHR
+        | FormatFeatureFlagBits2::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | FormatFeatureFlagBits2::eLinearColorAttachmentNV
+        | FormatFeatureFlagBits2::eWeightImageQCOM
+        | FormatFeatureFlagBits2::eWeightSampledImageQCOM
+        | FormatFeatureFlagBits2::eBlockMatchingQCOM
+        | FormatFeatureFlagBits2::eBoxFilterSampledQCOM
+        | FormatFeatureFlagBits2::eOpticalFlowImageNV
+        | FormatFeatureFlagBits2::eOpticalFlowVectorNV
+        | FormatFeatureFlagBits2::eOpticalFlowCostNV;
+  };
+
+  //=== VK_KHR_surface ===
+
+  enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
+  {
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+  };
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceTransformFlagsKHR allFlags = 
+          SurfaceTransformFlagBitsKHR::eIdentity
+        | SurfaceTransformFlagBitsKHR::eRotate90
+        | SurfaceTransformFlagBitsKHR::eRotate180
+        | SurfaceTransformFlagBitsKHR::eRotate270
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirror
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270
+        | SurfaceTransformFlagBitsKHR::eInherit;
+  };
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+  };
+
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+    eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
+    eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
+    eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
+    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
+  };
+
+  enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CompositeAlphaFlagsKHR allFlags = 
+          CompositeAlphaFlagBitsKHR::eOpaque
+        | CompositeAlphaFlagBitsKHR::ePreMultiplied
+        | CompositeAlphaFlagBitsKHR::ePostMultiplied
+        | CompositeAlphaFlagBitsKHR::eInherit;
+  };
+
+  //=== VK_KHR_swapchain ===
+
+  enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
+  {
+    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+    eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
+    eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR,
+    eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT
+  };
+
+  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SwapchainCreateFlagsKHR allFlags = 
+          SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions
+        | SwapchainCreateFlagBitsKHR::eProtected
+        | SwapchainCreateFlagBitsKHR::eMutableFormat
+        | SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT;
+  };
+
+  enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
+  {
+    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+  };
+
+  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceGroupPresentModeFlagsKHR allFlags = 
+          DeviceGroupPresentModeFlagBitsKHR::eLocal
+        | DeviceGroupPresentModeFlagBitsKHR::eRemote
+        | DeviceGroupPresentModeFlagBitsKHR::eSum
+        | DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice;
+  };
+
+  //=== VK_KHR_display ===
+
+  enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayPlaneAlphaFlagsKHR allFlags = 
+          DisplayPlaneAlphaFlagBitsKHR::eOpaque
+        | DisplayPlaneAlphaFlagBitsKHR::eGlobal
+        | DisplayPlaneAlphaFlagBitsKHR::ePerPixel
+        | DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied;
+  };
+
+  enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR
+  {};
+
+  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplayModeCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayModeCreateFlagsKHR allFlags =  {};
+  };
+
+  enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR
+  {};
+
+  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplaySurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplaySurfaceCreateFlagsKHR allFlags =  {};
+  };
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+  enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR
+  {};
+
+  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<XlibSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR XlibSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+  enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR
+  {};
+
+  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<XcbSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR XcbSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+  enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR
+  {};
+
+  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<WaylandSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR WaylandSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+  enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR
+  {};
+
+  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AndroidSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AndroidSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+  enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR
+  {};
+
+  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<Win32SurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR Win32SurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+  enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugReportFlagsEXT allFlags = 
+          DebugReportFlagBitsEXT::eInformation
+        | DebugReportFlagBitsEXT::eWarning
+        | DebugReportFlagBitsEXT::ePerformanceWarning
+        | DebugReportFlagBitsEXT::eError
+        | DebugReportFlagBitsEXT::eDebug;
+  };
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT,
+    eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT,
+    eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
+    eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
+    eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
+    eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
+  };
+
+  //=== VK_AMD_rasterization_order ===
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  //=== VK_KHR_video_queue ===
+
+  enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR
+  {
+    eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT,
+    eEncodeH265EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR,
+    eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
+  };
+
+  using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCodecOperationFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = 
+          VideoCodecOperationFlagBitsKHR::eNone
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | VideoCodecOperationFlagBitsKHR::eEncodeH264EXT
+        | VideoCodecOperationFlagBitsKHR::eEncodeH265EXT
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | VideoCodecOperationFlagBitsKHR::eDecodeH264
+        | VideoCodecOperationFlagBitsKHR::eDecodeH265;
+  };
+
+  enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR
+  {
+    eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR,
+    eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR,
+    e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR,
+    e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR,
+    e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR
+  };
+
+  using VideoChromaSubsamplingFlagsKHR = Flags<VideoChromaSubsamplingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoChromaSubsamplingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoChromaSubsamplingFlagsKHR allFlags = 
+          VideoChromaSubsamplingFlagBitsKHR::eInvalid
+        | VideoChromaSubsamplingFlagBitsKHR::eMonochrome
+        | VideoChromaSubsamplingFlagBitsKHR::e420
+        | VideoChromaSubsamplingFlagBitsKHR::e422
+        | VideoChromaSubsamplingFlagBitsKHR::e444;
+  };
+
+  enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR
+  {
+    eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
+    e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR,
+    e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR,
+    e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR
+  };
+
+  using VideoComponentBitDepthFlagsKHR = Flags<VideoComponentBitDepthFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoComponentBitDepthFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoComponentBitDepthFlagsKHR allFlags = 
+          VideoComponentBitDepthFlagBitsKHR::eInvalid
+        | VideoComponentBitDepthFlagBitsKHR::e8
+        | VideoComponentBitDepthFlagBitsKHR::e10
+        | VideoComponentBitDepthFlagBitsKHR::e12;
+  };
+
+  enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR
+  {
+    eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR,
+    eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR
+  };
+
+  using VideoCapabilityFlagsKHR = Flags<VideoCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCapabilityFlagsKHR allFlags = 
+          VideoCapabilityFlagBitsKHR::eProtectedContent
+        | VideoCapabilityFlagBitsKHR::eSeparateReferenceImages;
+  };
+
+  enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR
+  {
+    eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR
+  };
+
+  using VideoSessionCreateFlagsKHR = Flags<VideoSessionCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoSessionCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = 
+          VideoSessionCreateFlagBitsKHR::eProtectedContent;
+  };
+
+  enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR
+  {
+    eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR,
+    eEncodeRateControlLayer = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  };
+
+  using VideoCodingControlFlagsKHR = Flags<VideoCodingControlFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCodingControlFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodingControlFlagsKHR allFlags = 
+          VideoCodingControlFlagBitsKHR::eReset
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | VideoCodingControlFlagBitsKHR::eEncodeRateControl
+        | VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+;
+  };
+
+  enum class QueryResultStatusKHR
+  {
+    eError = VK_QUERY_RESULT_STATUS_ERROR_KHR,
+    eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR,
+    eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR
+  };
+
+  enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR
+  {};
+
+  using VideoSessionParametersCreateFlagsKHR = Flags<VideoSessionParametersCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoSessionParametersCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR
+  {};
+
+  using VideoBeginCodingFlagsKHR = Flags<VideoBeginCodingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoBeginCodingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoBeginCodingFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR
+  {};
+
+  using VideoEndCodingFlagsKHR = Flags<VideoEndCodingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEndCodingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEndCodingFlagsKHR allFlags =  {};
+  };
+
+  //=== VK_KHR_video_decode_queue ===
+
+  enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR
+  {
+    eDpbAndOutputCoincide = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR,
+    eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR
+  };
+
+  using VideoDecodeCapabilityFlagsKHR = Flags<VideoDecodeCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeCapabilityFlagsKHR allFlags = 
+          VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide
+        | VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct;
+  };
+
+  enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR
+  {
+    eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR,
+    eTranscoding = VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR,
+    eOffline = VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR,
+    eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR
+  };
+
+  using VideoDecodeUsageFlagsKHR = Flags<VideoDecodeUsageFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeUsageFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeUsageFlagsKHR allFlags = 
+          VideoDecodeUsageFlagBitsKHR::eDefault
+        | VideoDecodeUsageFlagBitsKHR::eTranscoding
+        | VideoDecodeUsageFlagBitsKHR::eOffline
+        | VideoDecodeUsageFlagBitsKHR::eStreaming;
+  };
+
+  enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
+  {};
+
+  using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeFlagsKHR allFlags =  {};
+  };
+
+  //=== VK_EXT_transform_feedback ===
+
+  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkPipelineRasterizationStateStreamCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationStateStreamCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateStreamCreateFlagsEXT allFlags =  {};
+  };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+
+  enum class VideoEncodeH264CapabilityFlagBitsEXT : VkVideoEncodeH264CapabilityFlagsEXT
+  {
+    eDirect8X8InferenceEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT,
+    eDirect8X8InferenceDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT,
+    eSeparateColourPlane = VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+    eQpprimeYZeroTransformBypass = VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT,
+    eScalingLists = VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT,
+    eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+    eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT,
+    eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT,
+    ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT,
+    eWeightedPred = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+    eWeightedBipredExplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT,
+    eWeightedBipredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT,
+    eWeightedPredNoTable = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+    eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT,
+    eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT,
+    eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT,
+    eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT,
+    eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT,
+    eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT,
+    eDisableDirectSpatialMvPred = VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT,
+    eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT,
+    eSliceMbCount = VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT,
+    eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT,
+    eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+    eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
+  };
+
+  using VideoEncodeH264CapabilityFlagsEXT = Flags<VideoEncodeH264CapabilityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264CapabilityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT allFlags = 
+          VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane
+        | VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass
+        | VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists
+        | VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance
+        | VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset
+        | VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable
+        | VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8
+        | VideoEncodeH264CapabilityFlagBitsEXT::eCabac
+        | VideoEncodeH264CapabilityFlagBitsEXT::eCavlc
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred
+        | VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount
+        | VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType
+        | VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List;
+  };
+
+  enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
+    eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH264InputModeFlagsEXT = Flags<VideoEncodeH264InputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264InputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264InputModeFlagsEXT allFlags = 
+          VideoEncodeH264InputModeFlagBitsEXT::eFrame
+        | VideoEncodeH264InputModeFlagBitsEXT::eSlice
+        | VideoEncodeH264InputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
+    eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH264OutputModeFlagsEXT = Flags<VideoEncodeH264OutputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264OutputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT allFlags = 
+          VideoEncodeH264OutputModeFlagBitsEXT::eFrame
+        | VideoEncodeH264OutputModeFlagBitsEXT::eSlice
+        | VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH264RateControlStructureEXT
+  {
+    eUnknown = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
+    eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+    eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+
+  enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT
+  {
+    eSeparateColourPlane = VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+    eScalingLists = VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT,
+    eSampleAdaptiveOffsetEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT,
+    ePcmEnable = VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT,
+    eSpsTemporalMvpEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT,
+    eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+    eInitQpMinus26 = VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT,
+    eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT,
+    eSignDataHidingEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT,
+    eTransformSkipEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT,
+    eTransformSkipDisabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT,
+    ePpsSliceChromaQpOffsetsPresent = VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT,
+    eWeightedPred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+    eWeightedBipred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT,
+    eWeightedPredNoTable = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+    eTransquantBypassEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT,
+    eEntropyCodingSyncEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT,
+    eDeblockingFilterOverrideEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT,
+    eMultipleTilePerFrame = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT,
+    eMultipleSlicePerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT,
+    eMultipleTilePerSlice = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT,
+    eSliceSegmentCtbCount = VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT,
+    eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT,
+    eDependentSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT,
+    eDifferentSliceType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+    eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
+  };
+
+  using VideoEncodeH265CapabilityFlagsEXT = Flags<VideoEncodeH265CapabilityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265CapabilityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT allFlags = 
+          VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane
+        | VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance
+        | VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26
+        | VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount
+        | VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType
+        | VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List;
+  };
+
+  enum class VideoEncodeH265InputModeFlagBitsEXT : VkVideoEncodeH265InputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT,
+    eSliceSegment = VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH265InputModeFlagsEXT = Flags<VideoEncodeH265InputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265InputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265InputModeFlagsEXT allFlags = 
+          VideoEncodeH265InputModeFlagBitsEXT::eFrame
+        | VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment
+        | VideoEncodeH265InputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH265OutputModeFlagBitsEXT : VkVideoEncodeH265OutputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT,
+    eSliceSegment = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH265OutputModeFlagsEXT = Flags<VideoEncodeH265OutputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265OutputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT allFlags = 
+          VideoEncodeH265OutputModeFlagBitsEXT::eFrame
+        | VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment
+        | VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH265CtbSizeFlagBitsEXT : VkVideoEncodeH265CtbSizeFlagsEXT
+  {
+    e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT,
+    e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT,
+    e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT
+  };
+
+  using VideoEncodeH265CtbSizeFlagsEXT = Flags<VideoEncodeH265CtbSizeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265CtbSizeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT allFlags = 
+          VideoEncodeH265CtbSizeFlagBitsEXT::e16
+        | VideoEncodeH265CtbSizeFlagBitsEXT::e32
+        | VideoEncodeH265CtbSizeFlagBitsEXT::e64;
+  };
+
+  enum class VideoEncodeH265TransformBlockSizeFlagBitsEXT : VkVideoEncodeH265TransformBlockSizeFlagsEXT
+  {
+    e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT,
+    e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT,
+    e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT,
+    e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT
+  };
+
+  using VideoEncodeH265TransformBlockSizeFlagsEXT = Flags<VideoEncodeH265TransformBlockSizeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265TransformBlockSizeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT allFlags = 
+          VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32;
+  };
+
+  enum class VideoEncodeH265RateControlStructureEXT
+  {
+    eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
+    eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+    eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+
+  enum class VideoDecodeH264PictureLayoutFlagBitsKHR : VkVideoDecodeH264PictureLayoutFlagsKHR
+  {
+    eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR,
+    eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR,
+    eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR
+  };
+
+  using VideoDecodeH264PictureLayoutFlagsKHR = Flags<VideoDecodeH264PictureLayoutFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeH264PictureLayoutFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeH264PictureLayoutFlagsKHR allFlags = 
+          VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive
+        | VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines
+        | VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes;
+  };
+
+  //=== VK_AMD_shader_info ===
+
+  enum class ShaderInfoTypeAMD
+  {
+    eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+    eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
+    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
+  };
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+  enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP
+  {};
+
+  using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
+
+
+  template <> struct FlagTraits<StreamDescriptorSurfaceCreateFlagBitsGGP>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StreamDescriptorSurfaceCreateFlagsGGP allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
+
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlagsNV allFlags = 
+          ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32
+        | ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt
+        | ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image
+        | ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt;
+  };
+
+  enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
+
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlagsNV allFlags = 
+          ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly
+        | ExternalMemoryFeatureFlagBitsNV::eExportable
+        | ExternalMemoryFeatureFlagBitsNV::eImportable;
+  };
+
+  //=== VK_EXT_validation_flags ===
+
+  enum class ValidationCheckEXT
+  {
+    eAll = VK_VALIDATION_CHECK_ALL_EXT,
+    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+  enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN
+  {};
+
+  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
+
+
+  template <> struct FlagTraits<ViSurfaceCreateFlagBitsNN>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ViSurfaceCreateFlagsNN allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_pipeline_robustness ===
+
+  enum class PipelineRobustnessBufferBehaviorEXT
+  {
+    eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT,
+    eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT,
+    eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT,
+    eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT
+  };
+
+  enum class PipelineRobustnessImageBehaviorEXT
+  {
+    eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT,
+    eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT,
+    eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT,
+    eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT
+  };
+
+  //=== VK_EXT_conditional_rendering ===
+
+  enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
+  {
+    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
+  };
+
+  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ConditionalRenderingFlagsEXT allFlags = 
+          ConditionalRenderingFlagBitsEXT::eInverted;
+  };
+
+  //=== VK_EXT_display_surface_counter ===
+
+  enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
+  {
+    eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT
+  };
+
+  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceCounterFlagsEXT allFlags = 
+          SurfaceCounterFlagBitsEXT::eVblank;
+  };
+
+  //=== VK_EXT_display_control ===
+
+  enum class DisplayPowerStateEXT
+  {
+    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+  };
+
+  enum class DeviceEventTypeEXT
+  {
+    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+  };
+
+  enum class DisplayEventTypeEXT
+  {
+    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+  };
+
+  //=== VK_NV_viewport_swizzle ===
+
+  enum class ViewportCoordinateSwizzleNV
+  {
+    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+  };
+
+  enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV
+  {};
+
+  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineViewportSwizzleStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportSwizzleStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_discard_rectangles ===
+
+  enum class DiscardRectangleModeEXT
+  {
+    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+  };
+
+  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT
+  {};
+
+  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineDiscardRectangleStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDiscardRectangleStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_conservative_rasterization ===
+
+  enum class ConservativeRasterizationModeEXT
+  {
+    eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+    eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
+    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+  };
+
+  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationConservativeStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationConservativeStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_depth_clip_enable ===
+
+  enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationDepthClipStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_KHR_performance_query ===
+
+  enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
+  {
+    ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
+    eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR
+  };
+
+  using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PerformanceCounterDescriptionFlagsKHR allFlags = 
+          PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting
+        | PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted;
+  };
+
+  enum class PerformanceCounterScopeKHR
+  {
+    eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+    eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+    eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+    eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+    eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR,
+    eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR
+  };
+
+  enum class PerformanceCounterStorageKHR
+  {
+    eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+    eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
+    eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
+    eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
+    eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
+    eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
+  };
+
+  enum class PerformanceCounterUnitKHR
+  {
+    eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+    ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
+    eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
+    eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
+    eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
+    eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
+    eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
+    eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
+    eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
+    eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
+    eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
+  };
+
+  enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
+  {};
+
+  using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AcquireProfilingLockFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AcquireProfilingLockFlagsKHR allFlags =  {};
+  };
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+  enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK
+  {};
+
+  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
+
+
+  template <> struct FlagTraits<IOSSurfaceCreateFlagBitsMVK>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IOSSurfaceCreateFlagsMVK allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+  enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK
+  {};
+
+  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
+
+
+  template <> struct FlagTraits<MacOSSurfaceCreateFlagBitsMVK>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MacOSSurfaceCreateFlagsMVK allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+  enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
+  {
+    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+    eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+    eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+  };
+
+  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT allFlags = 
+          DebugUtilsMessageSeverityFlagBitsEXT::eVerbose
+        | DebugUtilsMessageSeverityFlagBitsEXT::eInfo
+        | DebugUtilsMessageSeverityFlagBitsEXT::eWarning
+        | DebugUtilsMessageSeverityFlagBitsEXT::eError;
+  };
+
+  enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
+  {
+    eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+    eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
+    eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT
+  };
+
+  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageTypeFlagsEXT allFlags = 
+          DebugUtilsMessageTypeFlagBitsEXT::eGeneral
+        | DebugUtilsMessageTypeFlagBitsEXT::eValidation
+        | DebugUtilsMessageTypeFlagBitsEXT::ePerformance
+        | DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding;
+  };
+
+  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT
+  {};
+
+  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessengerCallbackDataFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCallbackDataFlagsEXT allFlags =  {};
+  };
+
+  enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT
+  {};
+
+  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessengerCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_blend_operation_advanced ===
+
+  enum class BlendOverlapEXT
+  {
+    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+  };
+
+  //=== VK_NV_fragment_coverage_to_color ===
+
+  enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageToColorStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageToColorStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_KHR_acceleration_structure ===
+
+  enum class AccelerationStructureTypeKHR
+  {
+    eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
+    eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR
+  };
+  using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
+
+  enum class AccelerationStructureBuildTypeKHR
+  {
+    eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
+    eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
+    eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
+  };
+
+  enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
+  {
+    eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
+    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR
+  };
+  using GeometryFlagBitsNV = GeometryFlagBitsKHR;
+
+  using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+  using GeometryFlagsNV = GeometryFlagsKHR;
+
+
+  template <> struct FlagTraits<GeometryFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryFlagsKHR allFlags = 
+          GeometryFlagBitsKHR::eOpaque
+        | GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation;
+  };
+
+  enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
+  {
+    eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+    eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
+    eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+    eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+    eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT,
+    eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT,
+    eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+    eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV
+  };
+  using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
+
+  using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+  using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
+
+
+  template <> struct FlagTraits<GeometryInstanceFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryInstanceFlagsKHR allFlags = 
+          GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable
+        | GeometryInstanceFlagBitsKHR::eTriangleFlipFacing
+        | GeometryInstanceFlagBitsKHR::eForceOpaque
+        | GeometryInstanceFlagBitsKHR::eForceNoOpaque
+        | GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT
+        | GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT;
+  };
+
+  enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
+  {
+    eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+    eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+    eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV,
+    eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT,
+    eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT,
+    eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT
+  };
+  using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
+
+  using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+  using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
+
+
+  template <> struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BuildAccelerationStructureFlagsKHR allFlags = 
+          BuildAccelerationStructureFlagBitsKHR::eAllowUpdate
+        | BuildAccelerationStructureFlagBitsKHR::eAllowCompaction
+        | BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace
+        | BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild
+        | BuildAccelerationStructureFlagBitsKHR::eLowMemory
+        | BuildAccelerationStructureFlagBitsKHR::eMotionNV
+        | BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT
+        | BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT
+        | BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT;
+  };
+
+  enum class CopyAccelerationStructureModeKHR
+  {
+    eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+    eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+    eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
+    eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR
+  };
+  using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
+
+  enum class GeometryTypeKHR
+  {
+    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+    eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR,
+    eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR
+  };
+  using GeometryTypeNV = GeometryTypeKHR;
+
+  enum class AccelerationStructureCompatibilityKHR
+  {
+    eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR,
+    eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR
+  };
+
+  enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR
+  {
+    eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
+    eDescriptorBufferCaptureReplayEXT = VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV
+  };
+
+  using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AccelerationStructureCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureCreateFlagsKHR allFlags = 
+          AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay
+        | AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT
+        | AccelerationStructureCreateFlagBitsKHR::eMotionNV;
+  };
+
+  enum class BuildAccelerationStructureModeKHR
+  {
+    eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR,
+    eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR
+  };
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+
+  enum class CoverageModulationModeNV
+  {
+    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+  };
+
+  enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageModulationStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageModulationStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_validation_cache ===
+
+  enum class ValidationCacheHeaderVersionEXT
+  {
+    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
+  };
+
+  enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT
+  {};
+
+  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ValidationCacheCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ValidationCacheCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_NV_shading_rate_image ===
+
+  enum class ShadingRatePaletteEntryNV
+  {
+    eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
+    e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
+    e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
+    e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
+  };
+
+  enum class CoarseSampleOrderTypeNV
+  {
+    eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
+    eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
+    ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
+    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
+  };
+
+  //=== VK_NV_ray_tracing ===
+
+  enum class AccelerationStructureMemoryRequirementsTypeNV
+  {
+    eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
+    eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
+    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
+  };
+
+  //=== VK_AMD_pipeline_compiler_control ===
+
+  enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
+  {};
+
+  using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
+
+
+  template <> struct FlagTraits<PipelineCompilerControlFlagBitsAMD>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags =  {};
+  };
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+  enum class TimeDomainEXT
+  {
+    eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
+    eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
+    eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
+    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
+  };
+
+  //=== VK_KHR_global_priority ===
+
+  enum class QueueGlobalPriorityKHR
+  {
+    eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR,
+    eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR,
+    eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR,
+    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR
+  };
+  using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR;
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+
+  enum class MemoryOverallocationBehaviorAMD
+  {
+    eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
+    eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
+    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
+  };
+
+  //=== VK_INTEL_performance_query ===
+
+  enum class PerformanceConfigurationTypeINTEL
+  {
+    eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
+  };
+
+  enum class QueryPoolSamplingModeINTEL
+  {
+    eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
+  };
+
+  enum class PerformanceOverrideTypeINTEL
+  {
+    eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
+    eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
+  };
+
+  enum class PerformanceParameterTypeINTEL
+  {
+    eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
+    eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
+  };
+
+  enum class PerformanceValueTypeINTEL
+  {
+    eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
+    eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
+    eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
+    eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
+    eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA
+  {};
+
+  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImagePipeSurfaceCreateFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImagePipeSurfaceCreateFlagsFUCHSIA allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+  enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT
+  {};
+
+  using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<MetalSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MetalSurfaceCreateFlagsEXT allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+  enum class FragmentShadingRateCombinerOpKHR
+  {
+    eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
+    eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR,
+    eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR,
+    eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR,
+    eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
+  };
+
+  //=== VK_AMD_shader_core_properties2 ===
+
+  enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
+  {};
+
+  using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
+
+
+  template <> struct FlagTraits<ShaderCorePropertiesFlagBitsAMD>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCorePropertiesFlagsAMD allFlags =  {};
+  };
+
+  //=== VK_EXT_validation_features ===
+
+  enum class ValidationFeatureEnableEXT
+  {
+    eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+    eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
+    eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
+    eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
+    eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
+  };
+
+  enum class ValidationFeatureDisableEXT
+  {
+    eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+    eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
+    eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
+    eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
+    eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
+    eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
+    eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
+    eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT
+  };
+
+  //=== VK_NV_cooperative_matrix ===
+
+  enum class ScopeNV
+  {
+    eDevice = VK_SCOPE_DEVICE_NV,
+    eWorkgroup = VK_SCOPE_WORKGROUP_NV,
+    eSubgroup = VK_SCOPE_SUBGROUP_NV,
+    eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
+  };
+
+  enum class ComponentTypeNV
+  {
+    eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
+    eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
+    eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
+    eSint8 = VK_COMPONENT_TYPE_SINT8_NV,
+    eSint16 = VK_COMPONENT_TYPE_SINT16_NV,
+    eSint32 = VK_COMPONENT_TYPE_SINT32_NV,
+    eSint64 = VK_COMPONENT_TYPE_SINT64_NV,
+    eUint8 = VK_COMPONENT_TYPE_UINT8_NV,
+    eUint16 = VK_COMPONENT_TYPE_UINT16_NV,
+    eUint32 = VK_COMPONENT_TYPE_UINT32_NV,
+    eUint64 = VK_COMPONENT_TYPE_UINT64_NV
+  };
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+  enum class CoverageReductionModeNV
+  {
+    eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
+    eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
+  };
+
+  enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageReductionStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageReductionStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_provoking_vertex ===
+
+  enum class ProvokingVertexModeEXT
+  {
+    eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT,
+    eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+  enum class FullScreenExclusiveEXT
+  {
+    eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
+    eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
+    eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
+    eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+  enum class HeadlessSurfaceCreateFlagBitsEXT : VkHeadlessSurfaceCreateFlagsEXT
+  {};
+
+  using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<HeadlessSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR HeadlessSurfaceCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_line_rasterization ===
+
+  enum class LineRasterizationModeEXT
+  {
+    eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
+    eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
+    eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
+    eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
+  };
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+  enum class PipelineExecutableStatisticFormatKHR
+  {
+    eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
+    eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
+    eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
+    eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
+  };
+
+  //=== VK_EXT_surface_maintenance1 ===
+
+  enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT
+  {
+    eOneToOne = VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT,
+    eAspectRatioStretch = VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT,
+    eStretch = VK_PRESENT_SCALING_STRETCH_BIT_EXT
+  };
+
+  using PresentScalingFlagsEXT = Flags<PresentScalingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PresentScalingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PresentScalingFlagsEXT allFlags = 
+          PresentScalingFlagBitsEXT::eOneToOne
+        | PresentScalingFlagBitsEXT::eAspectRatioStretch
+        | PresentScalingFlagBitsEXT::eStretch;
+  };
+
+  enum class PresentGravityFlagBitsEXT : VkPresentGravityFlagsEXT
+  {
+    eMin = VK_PRESENT_GRAVITY_MIN_BIT_EXT,
+    eMax = VK_PRESENT_GRAVITY_MAX_BIT_EXT,
+    eCentered = VK_PRESENT_GRAVITY_CENTERED_BIT_EXT
+  };
+
+  using PresentGravityFlagsEXT = Flags<PresentGravityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PresentGravityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PresentGravityFlagsEXT allFlags = 
+          PresentGravityFlagBitsEXT::eMin
+        | PresentGravityFlagBitsEXT::eMax
+        | PresentGravityFlagBitsEXT::eCentered;
+  };
+
+  //=== VK_NV_device_generated_commands ===
+
+  enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
+  {
+    eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
+  };
+
+  using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<IndirectStateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectStateFlagsNV allFlags = 
+          IndirectStateFlagBitsNV::eFlagFrontface;
+  };
+
+  enum class IndirectCommandsTokenTypeNV
+  {
+    eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
+    eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
+    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
+    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
+    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
+    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
+    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
+    eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV,
+    eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV
+  };
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
+  {
+    eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
+  };
+
+  using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
+
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV allFlags = 
+          IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess
+        | IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences
+        | IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences;
+  };
+
+  //=== VK_EXT_device_memory_report ===
+
+  enum class DeviceMemoryReportEventTypeEXT
+  {
+    eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,
+    eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,
+    eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,
+    eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,
+    eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT
+  };
+
+  enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT
+  {};
+
+  using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DeviceMemoryReportFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceMemoryReportFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_pipeline_creation_cache_control ===
+
+  enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
+  {
+    eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT,
+    eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
+  };
+
+  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineCacheCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCacheCreateFlags allFlags = 
+          PipelineCacheCreateFlagBits::eExternallySynchronized;
+  };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+  enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR
+  {
+    ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR
+  };
+
+  using VideoEncodeCapabilityFlagsKHR = Flags<VideoEncodeCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = 
+          VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes;
+  };
+
+  enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR,
+    eTranscoding = VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR,
+    eStreaming = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR,
+    eRecording = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR,
+    eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR
+  };
+
+  using VideoEncodeUsageFlagsKHR = Flags<VideoEncodeUsageFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeUsageFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeUsageFlagsKHR allFlags = 
+          VideoEncodeUsageFlagBitsKHR::eDefault
+        | VideoEncodeUsageFlagBitsKHR::eTranscoding
+        | VideoEncodeUsageFlagBitsKHR::eStreaming
+        | VideoEncodeUsageFlagBitsKHR::eRecording
+        | VideoEncodeUsageFlagBitsKHR::eConferencing;
+  };
+
+  enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR,
+    eCamera = VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR,
+    eDesktop = VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR,
+    eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR
+  };
+
+  using VideoEncodeContentFlagsKHR = Flags<VideoEncodeContentFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeContentFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeContentFlagsKHR allFlags = 
+          VideoEncodeContentFlagBitsKHR::eDefault
+        | VideoEncodeContentFlagBitsKHR::eCamera
+        | VideoEncodeContentFlagBitsKHR::eDesktop
+        | VideoEncodeContentFlagBitsKHR::eRendered;
+  };
+
+  enum class VideoEncodeTuningModeKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR,
+    eHighQuality = VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR,
+    eLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR,
+    eUltraLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR,
+    eLossless = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR
+  };
+
+  enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR
+  {
+    eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR,
+    eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR,
+    eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR
+  };
+
+  using VideoEncodeRateControlModeFlagsKHR = Flags<VideoEncodeRateControlModeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeRateControlModeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlModeFlagsKHR allFlags = 
+          VideoEncodeRateControlModeFlagBitsKHR::eNone
+        | VideoEncodeRateControlModeFlagBitsKHR::eCbr
+        | VideoEncodeRateControlModeFlagBitsKHR::eVbr;
+  };
+
+  enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
+  {};
+
+  using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR
+  {};
+
+  using VideoEncodeRateControlFlagsKHR = Flags<VideoEncodeRateControlFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeRateControlFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlFlagsKHR allFlags =  {};
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+
+  enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
+  {
+    eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
+    eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
+    eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV,
+    eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV
+  };
+
+  using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
+
+
+  template <> struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceDiagnosticsConfigFlagsNV allFlags = 
+          DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting;
+  };
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+  enum class ExportMetalObjectTypeFlagBitsEXT : VkExportMetalObjectTypeFlagsEXT
+  {
+    eMetalDevice = VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT,
+    eMetalCommandQueue = VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT,
+    eMetalBuffer = VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT,
+    eMetalTexture = VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT,
+    eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT,
+    eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT
+  };
+
+  using ExportMetalObjectTypeFlagsEXT = Flags<ExportMetalObjectTypeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ExportMetalObjectTypeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExportMetalObjectTypeFlagsEXT allFlags = 
+          ExportMetalObjectTypeFlagBitsEXT::eMetalDevice
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalTexture
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_graphics_pipeline_library ===
+
+  enum class GraphicsPipelineLibraryFlagBitsEXT : VkGraphicsPipelineLibraryFlagsEXT
+  {
+    eVertexInputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT,
+    ePreRasterizationShaders = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT,
+    eFragmentShader = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT,
+    eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT
+  };
+
+  using GraphicsPipelineLibraryFlagsEXT = Flags<GraphicsPipelineLibraryFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<GraphicsPipelineLibraryFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GraphicsPipelineLibraryFlagsEXT allFlags = 
+          GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface
+        | GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders
+        | GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader
+        | GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface;
+  };
+
+  enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags
+  {
+    eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT
+  };
+
+  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineLayoutCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineLayoutCreateFlags allFlags = 
+          PipelineLayoutCreateFlagBits::eIndependentSetsEXT;
+  };
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+  enum class FragmentShadingRateNV
+  {
+    e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV,
+    e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV,
+    e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV,
+    eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV
+  };
+
+  enum class FragmentShadingRateTypeNV
+  {
+    eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV,
+    eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV
+  };
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+
+  enum class AccelerationStructureMotionInstanceTypeNV
+  {
+    eStatic = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV,
+    eMatrixMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV,
+    eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV
+  };
+
+  enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV
+  {};
+
+  using AccelerationStructureMotionInfoFlagsNV = Flags<AccelerationStructureMotionInfoFlagBitsNV>;
+
+
+  template <> struct FlagTraits<AccelerationStructureMotionInfoFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInfoFlagsNV allFlags =  {};
+  };
+
+  enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV
+  {};
+
+  using AccelerationStructureMotionInstanceFlagsNV = Flags<AccelerationStructureMotionInstanceFlagBitsNV>;
+
+
+  template <> struct FlagTraits<AccelerationStructureMotionInstanceFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInstanceFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_image_compression_control ===
+
+  enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT
+  {
+    eDefault = VK_IMAGE_COMPRESSION_DEFAULT_EXT,
+    eFixedRateDefault = VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT,
+    eFixedRateExplicit = VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT,
+    eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT
+  };
+
+  using ImageCompressionFlagsEXT = Flags<ImageCompressionFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ImageCompressionFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFlagsEXT allFlags = 
+          ImageCompressionFlagBitsEXT::eDefault
+        | ImageCompressionFlagBitsEXT::eFixedRateDefault
+        | ImageCompressionFlagBitsEXT::eFixedRateExplicit
+        | ImageCompressionFlagBitsEXT::eDisabled;
+  };
+
+  enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT
+  {
+    eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT,
+    e1Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT,
+    e2Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT,
+    e3Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT,
+    e4Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT,
+    e5Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT,
+    e6Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT,
+    e7Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT,
+    e8Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT,
+    e9Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT,
+    e10Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT,
+    e11Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT,
+    e12Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT,
+    e13Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT,
+    e14Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT,
+    e15Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT,
+    e16Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT,
+    e17Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT,
+    e18Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT,
+    e19Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT,
+    e20Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT,
+    e21Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT,
+    e22Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT,
+    e23Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT,
+    e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT
+  };
+
+  using ImageCompressionFixedRateFlagsEXT = Flags<ImageCompressionFixedRateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ImageCompressionFixedRateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFixedRateFlagsEXT allFlags = 
+          ImageCompressionFixedRateFlagBitsEXT::eNone
+        | ImageCompressionFixedRateFlagBitsEXT::e1Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e2Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e3Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e4Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e5Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e6Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e7Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e8Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e9Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e10Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e11Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e12Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e13Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e14Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e15Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e16Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e17Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e18Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e19Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e20Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e21Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e22Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e23Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e24Bpc;
+  };
+
+  //=== VK_EXT_device_fault ===
+
+  enum class DeviceFaultAddressTypeEXT
+  {
+    eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT,
+    eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT,
+    eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT,
+    eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT,
+    eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT,
+    eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT,
+    eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT
+  };
+
+  enum class DeviceFaultVendorBinaryHeaderVersionEXT
+  {
+    eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+  enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT
+  {};
+
+  using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DirectFBSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DirectFBSurfaceCreateFlagsEXT allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+  enum class RayTracingShaderGroupTypeKHR
+  {
+    eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+    eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
+  };
+  using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
+
+  enum class ShaderGroupShaderKHR
+  {
+    eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR,
+    eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR,
+    eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR,
+    eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR
+  };
+
+  //=== VK_EXT_device_address_binding_report ===
+
+  enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT
+  {
+    eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT
+  };
+
+  using DeviceAddressBindingFlagsEXT = Flags<DeviceAddressBindingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DeviceAddressBindingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceAddressBindingFlagsEXT allFlags = 
+          DeviceAddressBindingFlagBitsEXT::eInternalObject;
+  };
+
+  enum class DeviceAddressBindingTypeEXT
+  {
+    eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT,
+    eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+  enum class ImageConstraintsInfoFlagBitsFUCHSIA : VkImageConstraintsInfoFlagsFUCHSIA
+  {
+    eCpuReadRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA,
+    eCpuReadOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA,
+    eCpuWriteRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA,
+    eCpuWriteOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA,
+    eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA
+  };
+
+  using ImageConstraintsInfoFlagsFUCHSIA = Flags<ImageConstraintsInfoFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImageConstraintsInfoFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA allFlags = 
+          ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional;
+  };
+
+  enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA
+  {};
+
+  using ImageFormatConstraintsFlagsFUCHSIA = Flags<ImageFormatConstraintsFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImageFormatConstraintsFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageFormatConstraintsFlagsFUCHSIA allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+  enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX
+  {};
+
+  using ScreenSurfaceCreateFlagsQNX = Flags<ScreenSurfaceCreateFlagBitsQNX>;
+
+
+  template <> struct FlagTraits<ScreenSurfaceCreateFlagBitsQNX>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ScreenSurfaceCreateFlagsQNX allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_opacity_micromap ===
+
+  enum class MicromapTypeEXT
+  {
+    eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT
+  };
+
+  enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT
+  {
+    ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT,
+    ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT,
+    eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT
+  };
+
+  using BuildMicromapFlagsEXT = Flags<BuildMicromapFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<BuildMicromapFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BuildMicromapFlagsEXT allFlags = 
+          BuildMicromapFlagBitsEXT::ePreferFastTrace
+        | BuildMicromapFlagBitsEXT::ePreferFastBuild
+        | BuildMicromapFlagBitsEXT::eAllowCompaction;
+  };
+
+  enum class CopyMicromapModeEXT
+  {
+    eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT,
+    eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT,
+    eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT,
+    eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT
+  };
+
+  enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT
+  {
+    eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
+  };
+
+  using MicromapCreateFlagsEXT = Flags<MicromapCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<MicromapCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MicromapCreateFlagsEXT allFlags = 
+          MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay;
+  };
+
+  enum class BuildMicromapModeEXT
+  {
+    eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT
+  };
+
+  enum class OpacityMicromapFormatEXT
+  {
+    e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT,
+    e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT
+  };
+
+  enum class OpacityMicromapSpecialIndexEXT
+  {
+    eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT,
+    eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT,
+    eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT,
+    eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT
+  };
+
+  //=== VK_NV_memory_decompression ===
+
+  enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV
+  {
+    eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV
+  };
+
+  using MemoryDecompressionMethodFlagsNV = Flags<MemoryDecompressionMethodFlagBitsNV>;
+
+
+  template <> struct FlagTraits<MemoryDecompressionMethodFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = 
+          MemoryDecompressionMethodFlagBitsNV::eGdeflate10;
+  };
+
+  //=== VK_EXT_subpass_merge_feedback ===
+
+  enum class SubpassMergeStatusEXT
+  {
+    eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT,
+    eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT,
+    eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT,
+    eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT,
+    eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT,
+    eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT,
+    eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT,
+    eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT,
+    eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT,
+    eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT,
+    eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT,
+    eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT,
+    eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT,
+    eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT
+  };
+
+  //=== VK_LUNARG_direct_driver_loading ===
+
+  enum class DirectDriverLoadingModeLUNARG
+  {
+    eExclusive = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG,
+    eInclusive = VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG
+  };
+
+  enum class DirectDriverLoadingFlagBitsLUNARG : VkDirectDriverLoadingFlagsLUNARG
+  {};
+
+  using DirectDriverLoadingFlagsLUNARG = Flags<DirectDriverLoadingFlagBitsLUNARG>;
+
+
+  template <> struct FlagTraits<DirectDriverLoadingFlagBitsLUNARG>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags =  {};
+  };
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+
+  enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags
+  {
+    eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM
+  };
+
+  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineColorBlendStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineColorBlendStateCreateFlags allFlags = 
+          PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT;
+  };
+
+  enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags
+  {
+    eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+  };
+
+  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineDepthStencilStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDepthStencilStateCreateFlags allFlags = 
+          PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT
+        | PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT;
+  };
+
+  //=== VK_NV_optical_flow ===
+
+  enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV,
+    eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV,
+    eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV,
+    eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV,
+    eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV,
+    eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV
+  };
+
+  using OpticalFlowUsageFlagsNV = Flags<OpticalFlowUsageFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowUsageFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowUsageFlagsNV allFlags = 
+          OpticalFlowUsageFlagBitsNV::eUnknown
+        | OpticalFlowUsageFlagBitsNV::eInput
+        | OpticalFlowUsageFlagBitsNV::eOutput
+        | OpticalFlowUsageFlagBitsNV::eHint
+        | OpticalFlowUsageFlagBitsNV::eCost
+        | OpticalFlowUsageFlagBitsNV::eGlobalFlow;
+  };
+
+  enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV,
+    e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV,
+    e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV,
+    e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV,
+    e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV
+  };
+
+  using OpticalFlowGridSizeFlagsNV = Flags<OpticalFlowGridSizeFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowGridSizeFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowGridSizeFlagsNV allFlags = 
+          OpticalFlowGridSizeFlagBitsNV::eUnknown
+        | OpticalFlowGridSizeFlagBitsNV::e1X1
+        | OpticalFlowGridSizeFlagBitsNV::e2X2
+        | OpticalFlowGridSizeFlagBitsNV::e4X4
+        | OpticalFlowGridSizeFlagBitsNV::e8X8;
+  };
+
+  enum class OpticalFlowPerformanceLevelNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV,
+    eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV,
+    eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV,
+    eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV
+  };
+
+  enum class OpticalFlowSessionBindingPointNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV,
+    eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV,
+    eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV,
+    eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV,
+    eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV,
+    eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV,
+    eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV,
+    eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV,
+    eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV
+  };
+
+  enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV
+  {
+    eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV,
+    eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV,
+    eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV,
+    eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV,
+    eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV
+  };
+
+  using OpticalFlowSessionCreateFlagsNV = Flags<OpticalFlowSessionCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowSessionCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowSessionCreateFlagsNV allFlags = 
+          OpticalFlowSessionCreateFlagBitsNV::eEnableHint
+        | OpticalFlowSessionCreateFlagBitsNV::eEnableCost
+        | OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow
+        | OpticalFlowSessionCreateFlagBitsNV::eAllowRegions
+        | OpticalFlowSessionCreateFlagBitsNV::eBothDirections;
+  };
+
+  enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV
+  {
+    eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV
+  };
+
+  using OpticalFlowExecuteFlagsNV = Flags<OpticalFlowExecuteFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowExecuteFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = 
+          OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
+  };
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+
+  enum class RayTracingInvocationReorderModeNV
+  {
+    eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV,
+    eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV
+  };
+
+
+
+  //=========================
+  //=== Index Type Traits ===
+  //=========================
+
+  template<typename T>
+  struct IndexTypeValue
+  {};
+
+
+  template <>
+  struct IndexTypeValue<uint16_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint16>
+  {
+    using Type = uint16_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint32_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint32>
+  {
+    using Type = uint32_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint8_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint8EXT>
+  {
+    using Type = uint8_t;
+  };
+
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_format_traits.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_format_traits.hpp
new file mode 100644
index 0000000..07f4bd7
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_format_traits.hpp
@@ -0,0 +1,7617 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_FORMAT_TRAITS_HPP
+#  define VULKAN_FORMAT_TRAITS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //=====================
+  //=== Format Traits ===
+  //=====================
+
+  // The three-dimensional extent of a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array<uint8_t, 3> blockExtent( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return {{ 4, 4, 1 }};
+
+      default: return {{1, 1, 1 }};
+    }
+  }
+
+  // The texel block size in bytes.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
+
+      default : VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+
+  // The class of the format (can't be just named "class"!)
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return "D16";
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return "D24";
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return "D32";
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return "S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return "D16S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return "D24S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return "D32S8";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC1_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC1_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC1_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC1_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC2";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC2";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC3";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC3";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC4";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC4";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC5";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC5";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC6H";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC6H";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC7";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC7";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2_EAC_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2_EAC_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC_R";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC_R";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC_RG";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC_RG";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return "32-bit G8B8G8R8";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return "32-bit B8G8R8G8";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return "8-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return "8-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return "8-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return "8-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return "8-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "64-bit R10G10B10A10";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "64-bit G10B10G10R10";
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "64-bit B10G10R10G10";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "10-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "10-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "10-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "10-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "10-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "64-bit R12G12B12A12";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "64-bit G12B12G12R12";
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "64-bit B12G12R12G12";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "12-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "12-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "12-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "12-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "12-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return "64-bit G16B16G16R16";
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return "64-bit B16G16R16G16";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return "16-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return "16-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return "16-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return "16-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return "16-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return "8-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "10-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "12-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return "16-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC2_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC1_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit";
+
+      default : VULKAN_HPP_ASSERT( false ); return "";
+    }
+  }
+
+  // The number of bits in this component, if not compressed, otherwise 0.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 6;
+          case 2: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 6;
+          case 2: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return 1;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 11;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return 9;
+          case 1: return 9;
+          case 2: return 9;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return 24;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return 24;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          case 1: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          case 1: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+
+      default: return 0;
+    }
+  }
+
+  // The number of components of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
+
+      default: return 0;
+    }
+  }
+
+  // The name of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+
+      default: return "";
+    }
+  }
+
+  // The numeric format of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+
+      default: return "";
+    }
+  }
+
+  // The plane this component lies in.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+
+      default: return 0;
+    }
+  }
+
+  // True, if the components of this format are compressed, otherwise false.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+
+        return true;
+      default: return false;
+    }
+  }
+
+  // A textual description of the compression scheme, or an empty string if it is not compressed
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC";
+
+      default: return "";
+    }
+  }
+
+  // True, if this format is a compressed one.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 );
+  }
+
+  // The number of bits into which the format is packed. A single image element in this format
+  // can be stored in the same space as a scalar type of this bit width.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
+
+      default: return 0;
+    }
+  }
+
+  // The single-plane format that this plane is compatible with.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return format;
+    }
+  }
+
+  // The number of image planes of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2;
+
+      default: return 1;
+    }
+  }
+
+  // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The number of texels in a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
+
+      default: VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_fuchsia.h b/host/libs/graphics_detector/include/vulkan/vulkan_fuchsia.h
new file mode 100644
index 0000000..61774ff
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_fuchsia.h
@@ -0,0 +1,258 @@
+#ifndef VULKAN_FUCHSIA_H_
+#define VULKAN_FUCHSIA_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_FUCHSIA_imagepipe_surface 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface"
+typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA;
+typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImagePipeSurfaceCreateFlagsFUCHSIA    flags;
+    zx_handle_t                             imagePipeHandle;
+} VkImagePipeSurfaceCreateInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_FUCHSIA_external_memory 1
+#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory"
+typedef struct VkImportMemoryZirconHandleInfoFUCHSIA {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    zx_handle_t                           handle;
+} VkImportMemoryZirconHandleInfoFUCHSIA;
+
+typedef struct VkMemoryZirconHandlePropertiesFUCHSIA {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryZirconHandlePropertiesFUCHSIA;
+
+typedef struct VkMemoryGetZirconHandleInfoFUCHSIA {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetZirconHandleInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkMemoryGetZirconHandleInfoFUCHSIA*   pGetZirconHandleInfo,
+    zx_handle_t*                                pZirconHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    zx_handle_t                                 zirconHandle,
+    VkMemoryZirconHandlePropertiesFUCHSIA*      pMemoryZirconHandleProperties);
+#endif
+
+
+#define VK_FUCHSIA_external_semaphore 1
+#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore"
+typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    zx_handle_t                              zirconHandle;
+} VkImportSemaphoreZirconHandleInfoFUCHSIA;
+
+typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetZirconHandleInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo,
+    zx_handle_t*                                pZirconHandle);
+#endif
+
+
+#define VK_FUCHSIA_buffer_collection 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA)
+#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2
+#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection"
+typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA;
+
+typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA {
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008,
+    VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010,
+    VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF
+} VkImageConstraintsInfoFlagBitsFUCHSIA;
+typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA;
+typedef struct VkBufferCollectionCreateInfoFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    zx_handle_t        collectionToken;
+} VkBufferCollectionCreateInfoFUCHSIA;
+
+typedef struct VkImportMemoryBufferCollectionFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkImportMemoryBufferCollectionFUCHSIA;
+
+typedef struct VkBufferCollectionImageCreateInfoFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkBufferCollectionImageCreateInfoFUCHSIA;
+
+typedef struct VkBufferCollectionConstraintsInfoFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           minBufferCount;
+    uint32_t           maxBufferCount;
+    uint32_t           minBufferCountForCamping;
+    uint32_t           minBufferCountForDedicatedSlack;
+    uint32_t           minBufferCountForSharedSlack;
+} VkBufferCollectionConstraintsInfoFUCHSIA;
+
+typedef struct VkBufferConstraintsInfoFUCHSIA {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBufferCreateInfo                          createInfo;
+    VkFormatFeatureFlags                        requiredFormatFeatures;
+    VkBufferCollectionConstraintsInfoFUCHSIA    bufferCollectionConstraints;
+} VkBufferConstraintsInfoFUCHSIA;
+
+typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkBufferCollectionBufferCreateInfoFUCHSIA;
+
+typedef struct VkSysmemColorSpaceFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           colorSpace;
+} VkSysmemColorSpaceFUCHSIA;
+
+typedef struct VkBufferCollectionPropertiesFUCHSIA {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         memoryTypeBits;
+    uint32_t                         bufferCount;
+    uint32_t                         createInfoIndex;
+    uint64_t                         sysmemPixelFormat;
+    VkFormatFeatureFlags             formatFeatures;
+    VkSysmemColorSpaceFUCHSIA        sysmemColorSpaceIndex;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkBufferCollectionPropertiesFUCHSIA;
+
+typedef struct VkImageFormatConstraintsInfoFUCHSIA {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImageCreateInfo                       imageCreateInfo;
+    VkFormatFeatureFlags                    requiredFormatFeatures;
+    VkImageFormatConstraintsFlagsFUCHSIA    flags;
+    uint64_t                                sysmemPixelFormat;
+    uint32_t                                colorSpaceCount;
+    const VkSysmemColorSpaceFUCHSIA*        pColorSpaces;
+} VkImageFormatConstraintsInfoFUCHSIA;
+
+typedef struct VkImageConstraintsInfoFUCHSIA {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    uint32_t                                      formatConstraintsCount;
+    const VkImageFormatConstraintsInfoFUCHSIA*    pFormatConstraints;
+    VkBufferCollectionConstraintsInfoFUCHSIA      bufferCollectionConstraints;
+    VkImageConstraintsInfoFlagsFUCHSIA            flags;
+} VkImageConstraintsInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection);
+typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA(
+    VkDevice                                    device,
+    const VkBufferCollectionCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferCollectionFUCHSIA*                  pCollection);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkImageConstraintsInfoFUCHSIA*        pImageConstraintsInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkBufferConstraintsInfoFUCHSIA*       pBufferConstraintsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    VkBufferCollectionPropertiesFUCHSIA*        pProperties);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_funcs.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_funcs.hpp
new file mode 100644
index 0000000..ea100f4
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_funcs.hpp
@@ -0,0 +1,17891 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_FUNCS_HPP
+#  define VULKAN_FUNCS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+
+
+  //=== VK_VERSION_1_0 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Instance * pInstance, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkInstance *>( pInstance ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
+    uint32_t physicalDeviceCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+    if ( physicalDeviceCount < physicalDevices.size() )
+    {
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
+  }
+
+  template <typename PhysicalDeviceAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
+    uint32_t physicalDeviceCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+    if ( physicalDeviceCount < physicalDevices.size() )
+    {
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+    
+    
+    return features;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+    
+    
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetInstanceProcAddr( m_instance, pName );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeviceProcAddr( m_device, pName );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Device * pDevice, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDevice *>( pDevice ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Device device;
+    VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Device device;
+    VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ExtensionPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ExtensionPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename LayerPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename LayerPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
+    
+    
+    return queue;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueWaitIdle( m_queue );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDeviceWaitIdle( m_device );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void ** ppData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    void * pData;
+    VkResult result = d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+    
+    
+    return committedMemoryInBytes;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Event * pEvent, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkEvent *>( pEvent ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Event event;
+    VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Event event;
+    VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( DataType ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Buffer * pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBuffer *>( pBuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Image * pImage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImage *>( pImage ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Image image;
+    VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Image image;
+    VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+    
+    
+    return layout;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ImageView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImageView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t * pDataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Uint8_tAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data;
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Sampler * pSampler, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSampler *>( pSampler ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
+    
+    
+    
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DescriptorSetAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
+  }
+
+  template <typename DescriptorSetAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename DescriptorSetAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
+    uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+    PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+    for ( auto const & descriptorSet : descriptorSets )
+    {
+      uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
+  }
+
+  template <typename Dispatch, typename DescriptorSetAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
+    uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+    PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+    for ( auto const & descriptorSet : descriptorSets )
+    {
+      uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Extent2D granularity;
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+    
+    
+    return granularity;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CommandBufferAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
+  }
+
+  template <typename CommandBufferAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename CommandBufferAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
+    uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+    PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+    for ( auto const & commandBuffer : commandBuffers )
+    {
+      uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
+  }
+
+  template <typename Dispatch, typename CommandBufferAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
+    uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+    PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+    for ( auto const & commandBuffer : commandBuffers )
+    {
+      uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( DataType ), reinterpret_cast<const void *>( data.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect * pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ValuesType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( ValuesType ), reinterpret_cast<const void *>( values.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderPass( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint32_t apiVersion;
+    VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    
+    
+    return peerMemoryFeatures;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
+    
+    
+    return queue;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+    
+    
+    return externalBufferProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+    
+    
+    return externalFenceProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    
+    
+    return externalSemaphoreProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t value;
+    VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
+  }
+
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator );
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
+    VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
+    VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t data;
+    d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+    
+    
+    return data;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWaitEvents2( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
+#else
+    if ( events.size() != dependencyInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdWaitEvents2( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRendering( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+    VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
+  }
+    if ( !strides.empty() && buffers.size() != strides.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilOp( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 * pSupported, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Bool32 supported;
+    VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+    VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SurfaceFormatKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
+  }
+
+  template <typename SurfaceFormatKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PresentModeKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
+  }
+
+  template <typename PresentModeKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_swapchain ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ImageAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
+    uint32_t swapchainImageCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+    VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+    if ( swapchainImageCount < swapchainImages.size() )
+    {
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
+  }
+
+  template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
+    uint32_t swapchainImageCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+    VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+    if ( swapchainImageCount < swapchainImages.size() )
+    {
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t * pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint32_t imageIndex;
+    VkResult result = d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+    VkResult result = d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pRectCount, VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Rect2DAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
+    uint32_t rectCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+    VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+    if ( rectCount < rects.size() )
+    {
+      rects.resize( rectCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
+  }
+
+  template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
+    uint32_t rectCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+    VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+    if ( rectCount < rects.size() )
+    {
+      rects.resize( rectCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint32_t imageIndex;
+    VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_display ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayPropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t * pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
+    uint32_t displayCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+    VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+    if ( displayCount < displays.size() )
+    {
+      displays.resize( displayCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
+  }
+
+  template <typename DisplayKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
+    uint32_t displayCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+    VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+    if ( displayCount < displays.size() )
+    {
+      displays.resize( displayCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayModePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+    VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_display_swapchain ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SwapchainKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
+  }
+
+  template <typename SwapchainKHRAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename SwapchainKHRAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
+    uniqueSwapchains.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & swapchain : swapchains )
+    {
+      uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
+  }
+
+  template <typename Dispatch, typename SwapchainKHRAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
+    uniqueSwapchains.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & swapchain : swapchains )
+    {
+      uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t * connection, xcb_visualid_t visual_id, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    VkResult result = d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    VkResult result = d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), object, location, messageCode, pLayerPrefix, pMessage );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_debug_marker ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_video_queue ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
+    VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
+    VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), pVideoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
+    uint32_t videoFormatPropertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
+      {
+        videoFormatProperties.resize( videoFormatPropertyCount );
+        result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+    VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+    if ( videoFormatPropertyCount < videoFormatProperties.size() )
+    {
+      videoFormatProperties.resize( videoFormatPropertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
+  }
+
+  template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
+    uint32_t videoFormatPropertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
+      {
+        videoFormatProperties.resize( videoFormatPropertyCount );
+        result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+    VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+    if ( videoFormatPropertyCount < videoFormatProperties.size() )
+    {
+      videoFormatProperties.resize( videoFormatPropertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
+    VkResult result = d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
+    VkResult result = d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), pMemoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
+    uint32_t memoryRequirementsCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
+      {
+        memoryRequirements.resize( memoryRequirementsCount );
+        result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+    if ( memoryRequirementsCount < memoryRequirements.size() )
+    {
+      memoryRequirements.resize( memoryRequirementsCount );
+    }
+    return memoryRequirements;
+  }
+
+  template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( videoSessionMemoryRequirementsKHRAllocator );
+    uint32_t memoryRequirementsCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
+      {
+        memoryRequirements.resize( memoryRequirementsCount );
+        result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+    if ( memoryRequirementsCount < memoryRequirements.size() )
+    {
+      memoryRequirements.resize( memoryRequirementsCount );
+    }
+    return memoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), bindSessionMemoryInfoCount, reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindVideoSessionMemoryKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), bindSessionMemoryInfos.size(), reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
+    VkResult result = d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
+    VkResult result = d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_video_decode_queue ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_transform_feedback ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
+  }
+
+  //=== VK_NVX_binary_import ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CuModuleNVX module;
+    VkResult result = d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuModuleNVX *>( &module ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CuModuleNVX module;
+    VkResult result = d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuModuleNVX *>( &module ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
+    VkResult result = d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuFunctionNVX *>( &function ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
+    VkResult result = d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuFunctionNVX *>( &function ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NVX_image_view_handle ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
+    VkResult result = d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_draw_indirect_count ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  //=== VK_AMD_shader_info ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t * pInfoSize, void * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Uint8_tAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> info;
+    size_t infoSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && infoSize )
+      {
+        info.resize( infoSize );
+        result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+    VULKAN_HPP_ASSERT( infoSize <= info.size() );
+    if ( infoSize < info.size() )
+    {
+      info.resize( infoSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
+  }
+
+  template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
+    size_t infoSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && infoSize )
+      {
+        info.resize( infoSize );
+        result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+    VULKAN_HPP_ASSERT( infoSize <= info.size() );
+    if ( infoSize < info.size() )
+    {
+      info.resize( infoSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_dynamic_rendering ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderingKHR( m_commandBuffer );
+  }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    HANDLE handle;
+    VkResult result = d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_device_group ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    
+    
+    return peerMemoryFeatures;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+
+  //=== VK_KHR_device_group_creation ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+    
+    
+    return externalBufferProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    HANDLE handle;
+    VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+    VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    int fd;
+    VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
+    VkResult result = d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    
+    
+    return externalSemaphoreProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    HANDLE handle;
+    VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    int fd;
+    VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_push_descriptor ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_conditional_rendering ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+  }
+
+  //=== VK_KHR_descriptor_update_template ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_direct_mode_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
+    
+    
+    
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
+    VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_display_control ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t counterValue;
+    VkResult result = d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_GOOGLE_display_timing ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
+    VkResult result = d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
+    uint32_t presentationTimingCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentationTimingCount )
+      {
+        presentationTimings.resize( presentationTimingCount );
+        result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+    VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+    if ( presentationTimingCount < presentationTimings.size() )
+    {
+      presentationTimings.resize( presentationTimingCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
+  }
+
+  template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( pastPresentationTimingGOOGLEAllocator );
+    uint32_t presentationTimingCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentationTimingCount )
+      {
+        presentationTimings.resize( presentationTimingCount );
+        result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+    VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+    if ( presentationTimingCount < presentationTimings.size() )
+    {
+      presentationTimings.resize( presentationTimingCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_discard_rectangles ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_hdr_metadata ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
+#else
+    if ( swapchains.size() != metadata.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_create_renderpass2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_shared_presentable_image ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+    
+    
+    return externalFenceProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    HANDLE handle;
+    VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    int fd;
+    VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_performance_query ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t * pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data;
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
+    uint32_t counterCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
+      if ( ( result == VK_SUCCESS ) && counterCount )
+      {
+        counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+        result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+    VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+    if ( counterCount < counters.size() )
+    {
+      counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data( std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
+    uint32_t counterCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
+      if ( ( result == VK_SUCCESS ) && counterCount )
+      {
+        counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+        result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+    VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+    if ( counterCount < counters.size() )
+    {
+      counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint32_t numPasses;
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
+    
+    
+    return numPasses;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkReleaseProfilingLockKHR( m_device );
+  }
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
+    VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+    VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
+  }
+
+  template <typename SurfaceFormat2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        structureChains.resize( surfaceFormatCount );
+        surfaceFormats.resize( surfaceFormatCount );
+        for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+        {
+          surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+        }
+        result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      if ( surfaceFormatCount < surfaceFormats.size() )
+      {
+        structureChains.resize( surfaceFormatCount );
+      }
+      for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+      }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        structureChains.resize( surfaceFormatCount );
+        surfaceFormats.resize( surfaceFormatCount );
+        for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+        {
+          surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+        }
+        result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      if ( surfaceFormatCount < surfaceFormats.size() )
+      {
+        structureChains.resize( surfaceFormatCount );
+      }
+      for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+      }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_get_display_properties2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayProperties2KHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename DisplayModeProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
+    VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    VkResult result = d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    VkResult result = d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
+    VkResult result = d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
+    VkResult result = d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, struct AHardwareBuffer ** pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    struct AHardwareBuffer * buffer;
+    VkResult result = d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+    
+    
+    return multisampleProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+    VkResult result = d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+    VkResult result = d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
+#else
+    if ( infos.size() != pBuildRangeInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, const uint32_t * pIndirectStrides, const uint32_t * const * ppMaxPrimitiveCounts, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), pIndirectStrides, ppMaxPrimitiveCounts );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
+    VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
+    VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
+#else
+    if ( infos.size() != indirectDeviceAddresses.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
+  }
+    if ( infos.size() != indirectStrides.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
+  }
+    if ( infos.size() != pMaxPrimitiveCounts.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), indirectStrides.data(), pMaxPrimitiveCounts.data() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBuildAccelerationStructuresKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
+#else
+    if ( infos.size() != pBuildRangeInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    VkResult result = d.vkBuildAccelerationStructuresKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void * pData, size_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), sizeof( DataType ), reinterpret_cast<void *>( &data ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkDeviceAddress result = d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+    d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+    
+    
+    return compatibility;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, const uint32_t * pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetAccelerationStructureBuildSizesKHR( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), pMaxPrimitiveCounts, reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
+#else
+    if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
+    d.vkGetAccelerationStructureBuildSizesKHR( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), maxPrimitiveCounts.data(), reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+    
+    
+    return sizeInfo;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_bind_memory2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
+    VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_validation_cache ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    VkResult result = d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    VkResult result = d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t * pDataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Uint8_tAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data;
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_shading_rate_image ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size(), reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_ray_tracing ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    VkResult result = d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    VkResult result = d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_maintenance3 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_draw_indirect_count ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  //=== VK_EXT_external_memory_host ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+    VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_buffer_marker ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename TimeDomainEXTAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
+    uint32_t timeDomainCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+    VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+    if ( timeDomainCount < timeDomains.size() )
+    {
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
+  }
+
+  template <typename TimeDomainEXTAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
+    uint32_t timeDomainCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+    VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+    if ( timeDomainCount < timeDomains.size() )
+    {
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, uint64_t * pTimestamps, uint64_t * pMaxDeviation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Uint64_tAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
+    std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
+    uint64_t & maxDeviation = data.second;
+    VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename Uint64_tAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Uint64_tAllocator & uint64_tAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
+    std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
+    uint64_t & maxDeviation = data.second;
+    VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<uint64_t,uint64_t> data;
+    uint64_t & timestamp = data.first;
+    uint64_t & maxDeviation = data.second;
+    VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_mesh_shader ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  //=== VK_NV_scissor_exclusive ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CheckpointMarkerType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CheckpointDataNVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+
+  template <typename CheckpointDataNVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_timeline_semaphore ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t value;
+    VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_INTEL_performance_query ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUninitializePerformanceApiINTEL( m_device );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+    VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+    VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
+    VkResult result = d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_display_native_hdr ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
+    uint32_t fragmentShadingRateCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
+      {
+        fragmentShadingRates.resize( fragmentShadingRateCount );
+        result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+    VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+    if ( fragmentShadingRateCount < fragmentShadingRates.size() )
+    {
+      fragmentShadingRates.resize( fragmentShadingRateCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
+  }
+
+  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
+    uint32_t fragmentShadingRateCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
+      {
+        fragmentShadingRates.resize( fragmentShadingRateCount );
+        result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+    VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+    if ( fragmentShadingRateCount < fragmentShadingRates.size() )
+    {
+      fragmentShadingRates.resize( fragmentShadingRateCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_buffer_device_address ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_tooling_info ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
+  }
+
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator );
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_present_wait ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_NV_cooperative_matrix ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( cooperativeMatrixPropertiesNVAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
+    uint32_t combinationCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+    VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+    if ( combinationCount < combinations.size() )
+    {
+      combinations.resize( combinationCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
+  }
+
+  template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( framebufferMixedSamplesCombinationNVAllocator );
+    uint32_t combinationCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+    VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+    if ( combinationCount < combinations.size() )
+    {
+      combinations.resize( combinationCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PresentModeKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
+  }
+
+  template <typename PresentModeKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_buffer_device_address ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_line_rasterization ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+  }
+
+  //=== VK_EXT_host_query_reset ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+    VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+  }
+    if ( !strides.empty() && buffers.size() != strides.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+  }
+
+  //=== VK_KHR_deferred_host_operations ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+    VkResult result = d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+    VkResult result = d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+  }
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, uint32_t * pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
+    uint32_t executableCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+    VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+    if ( executableCount < properties.size() )
+    {
+      properties.resize( executableCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( pipelineExecutablePropertiesKHRAllocator );
+    uint32_t executableCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+    VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+    if ( executableCount < properties.size() )
+    {
+      properties.resize( executableCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
+    uint32_t statisticCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+    VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+    if ( statisticCount < statistics.size() )
+    {
+      statistics.resize( statisticCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
+  }
+
+  template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( pipelineExecutableStatisticKHRAllocator );
+    uint32_t statisticCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+    VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+    if ( statisticCount < statistics.size() )
+    {
+      statistics.resize( statisticCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations;
+    uint32_t internalRepresentationCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+    VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+    if ( internalRepresentationCount < internalRepresentations.size() )
+    {
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
+  }
+
+  template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
+    uint32_t internalRepresentationCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+    VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+    if ( internalRepresentationCount < internalRepresentations.size() )
+    {
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_device_generated_commands ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+    VkResult result = d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+    VkResult result = d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_acquire_drm_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, VULKAN_HPP_NAMESPACE::DisplayKHR * display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_private_data ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
+    VkResult result = d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
+    VkResult result = d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t data;
+    d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+    
+    
+    return data;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
+    d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+    
+    
+    return metalObjectsInfo;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
+    d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWaitEvents2KHR( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
+#else
+    if ( events.size() != dependencyInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdWaitEvents2KHR( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CheckpointData2NVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+
+  template <typename CheckpointData2NVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_descriptor_buffer ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
+    d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
+    
+    
+    return layoutSizeInBytes;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
+    
+    
+    return offset;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DescriptorType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DescriptorType descriptor;
+    d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
+    
+    
+    return descriptor;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t * pBufferIndices, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, setCount, pBufferIndices, reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() );
+#else
+    if ( bufferIndices.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, bufferIndices.size(), bufferIndices.data(), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+  }
+
+  //=== VK_EXT_mesh_shader ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  //=== VK_KHR_copy_commands2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_image_compression_control ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSubresourceLayout2EXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ), reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+    d.vkGetImageSubresourceLayout2EXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ), reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+    
+    
+    return layout;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+    d.vkGetImageSubresourceLayout2EXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ), reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_device_fault ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>> Device::getFaultInfoEXT( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+    VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+    VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+    VkResult result = d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+    
+    return ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), width, height, depth );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), width, height, depth );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
+  }
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetVertexInputEXT( m_commandBuffer, vertexBindingDescriptionCount, reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), vertexAttributeDescriptionCount, reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetVertexInputEXT( m_commandBuffer, vertexBindingDescriptions.size(), reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), vertexAttributeDescriptions.size(), reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    zx_handle_t zirconHandle;
+    VkResult result = d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), zirconHandle, reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
+    VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), zirconHandle, reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    zx_handle_t zirconHandle;
+    VkResult result = d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
+    VkResult result = d.vkCreateBufferCollectionFUCHSIA( m_device, reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), collection );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
+    VkResult result = d.vkCreateBufferCollectionFUCHSIA( m_device, reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferCollectionFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferCollectionFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferCollectionFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferCollectionFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
+    VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA( m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D> Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
+    VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
+  }
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+  }
+
+  //=== VK_NV_external_memory_rdma ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
+    VkResult result = d.vkGetMemoryRemoteAddressNV( m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), address );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_pipeline_properties ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelinePropertiesEXT( m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
+    VkResult result = d.vkGetPipelinePropertiesEXT( m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
+  }
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateScreenSurfaceQNX( m_instance, reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    VkResult result = d.vkCreateScreenSurfaceQNX( m_instance, reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+  }
+
+  //=== VK_EXT_multi_draw ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, uint32_t instanceCount, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDrawMultiEXT( m_commandBuffer, vertexInfo.size(), reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), instanceCount, firstInstance, vertexInfo.stride() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t * pVertexOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, uint32_t instanceCount, uint32_t firstInstance, Optional<const int32_t> vertexOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, indexInfo.size(), reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), instanceCount, firstInstance, indexInfo.stride(), static_cast<const int32_t *>( vertexOffset ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateMicromapEXT( m_device, reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+    VkResult result = d.vkCreateMicromapEXT( m_device, reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), micromap );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+    VkResult result = d.vkCreateMicromapEXT( m_device, reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBuildMicromapsEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBuildMicromapsEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyMicromapToMemoryEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkCopyMemoryToMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void * pData, size_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device, micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device, micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), sizeof( DataType ), reinterpret_cast<void *>( &data ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceMicromapCompatibilityEXT( m_device, reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+    d.vkGetDeviceMicromapCompatibilityEXT( m_device, reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+    
+    
+    return compatibility;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetMicromapBuildSizesEXT( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+    d.vkGetMicromapBuildSizesEXT( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+    
+    
+    return sizeInfo;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
+  }
+
+  //=== VK_KHR_maintenance4 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceImageMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetDeviceImageMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetDeviceImageMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
+    d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
+    
+    
+    return hostMapping;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    void * pData;
+    d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
+    
+    
+    return pData;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_copy_memory_indirect ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride, static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), imageSubresources.size(), stride, static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_memory_decompression ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDecompressMemoryIndirectCountNV( m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
+  }
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetColorBlendEquationEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetColorBlendEquationEXT( m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetColorBlendAdvancedEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetColorBlendAdvancedEXT( m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, const float * pCoverageModulationTable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+  }
+
+  //=== VK_EXT_shader_module_identifier ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+    d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+    
+    
+    return identifier;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetShaderModuleCreateInfoIdentifierEXT( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+    d.vkGetShaderModuleCreateInfoIdentifierEXT( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+    
+    
+    return identifier;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_optical_flow ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, uint32_t * pFormatCount, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), pFormatCount, reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
+    uint32_t formatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && formatCount )
+      {
+        imageFormatProperties.resize( formatCount );
+        result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+    VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+    if ( formatCount < imageFormatProperties.size() )
+    {
+      imageFormatProperties.resize( formatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+
+  template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( opticalFlowImageFormatPropertiesNVAllocator );
+    uint32_t formatCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && formatCount )
+      {
+        imageFormatProperties.resize( formatCount );
+        result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+    VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+    if ( formatCount < imageFormatProperties.size() )
+    {
+      imageFormatProperties.resize( formatCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+    VkResult result = d.vkCreateOpticalFlowSessionNV( m_device, reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), session );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+    VkResult result = d.vkCreateOpticalFlowSessionNV( m_device, reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), static_cast<VkImageView>( view ), static_cast<VkImageLayout>( layout ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), static_cast<VkImageView>( view ), static_cast<VkImageLayout>( layout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdOpticalFlowExecuteNV( m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdOpticalFlowExecuteNV( m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_QCOM_tile_properties ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, uint32_t * pPropertiesCount, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename TilePropertiesQCOMAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
+    uint32_t propertiesCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertiesCount )
+      {
+        properties.resize( propertiesCount );
+        result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+    if ( propertiesCount < properties.size() )
+    {
+      properties.resize( propertiesCount );
+    }
+    return properties;
+  }
+
+  template <typename TilePropertiesQCOMAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
+    uint32_t propertiesCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertiesCount )
+      {
+        properties.resize( propertiesCount );
+        result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+    if ( propertiesCount < properties.size() )
+    {
+      properties.resize( propertiesCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
+    d.vkGetDynamicRenderingTilePropertiesQCOM( m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
+    
+    
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_ggp.h b/host/libs/graphics_detector/include/vulkan/vulkan_ggp.h
new file mode 100644
index 0000000..19dfd22
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_ggp.h
@@ -0,0 +1,58 @@
+#ifndef VULKAN_GGP_H_
+#define VULKAN_GGP_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_GGP_stream_descriptor_surface 1
+#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1
+#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface"
+typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP;
+typedef struct VkStreamDescriptorSurfaceCreateInfoGGP {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkStreamDescriptorSurfaceCreateFlagsGGP    flags;
+    GgpStreamDescriptor                        streamDescriptor;
+} VkStreamDescriptorSurfaceCreateInfoGGP;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_GGP_frame_token 1
+#define VK_GGP_FRAME_TOKEN_SPEC_VERSION   1
+#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token"
+typedef struct VkPresentFrameTokenGGP {
+    VkStructureType    sType;
+    const void*        pNext;
+    GgpFrameToken      frameToken;
+} VkPresentFrameTokenGGP;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_handles.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_handles.hpp
new file mode 100644
index 0000000..0f15f0c
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_handles.hpp
@@ -0,0 +1,11800 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HANDLES_HPP
+#  define VULKAN_HANDLES_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //===================================
+  //=== STRUCT forward declarations ===
+  //===================================
+
+
+  //=== VK_VERSION_1_0 ===
+  struct Extent2D;
+  struct Extent3D;
+  struct Offset2D;
+  struct Offset3D;
+  struct Rect2D;
+  struct BaseInStructure;
+  struct BaseOutStructure;
+  struct BufferMemoryBarrier;
+  struct DispatchIndirectCommand;
+  struct DrawIndexedIndirectCommand;
+  struct DrawIndirectCommand;
+  struct ImageMemoryBarrier;
+  struct MemoryBarrier;
+  struct PipelineCacheHeaderVersionOne;
+  struct AllocationCallbacks;
+  struct ApplicationInfo;
+  struct FormatProperties;
+  struct ImageFormatProperties;
+  struct InstanceCreateInfo;
+  struct MemoryHeap;
+  struct MemoryType;
+  struct PhysicalDeviceFeatures;
+  struct PhysicalDeviceLimits;
+  struct PhysicalDeviceMemoryProperties;
+  struct PhysicalDeviceProperties;
+  struct PhysicalDeviceSparseProperties;
+  struct QueueFamilyProperties;
+  struct DeviceCreateInfo;
+  struct DeviceQueueCreateInfo;
+  struct ExtensionProperties;
+  struct LayerProperties;
+  struct SubmitInfo;
+  struct MappedMemoryRange;
+  struct MemoryAllocateInfo;
+  struct MemoryRequirements;
+  struct BindSparseInfo;
+  struct ImageSubresource;
+  struct SparseBufferMemoryBindInfo;
+  struct SparseImageFormatProperties;
+  struct SparseImageMemoryBind;
+  struct SparseImageMemoryBindInfo;
+  struct SparseImageMemoryRequirements;
+  struct SparseImageOpaqueMemoryBindInfo;
+  struct SparseMemoryBind;
+  struct FenceCreateInfo;
+  struct SemaphoreCreateInfo;
+  struct EventCreateInfo;
+  struct QueryPoolCreateInfo;
+  struct BufferCreateInfo;
+  struct BufferViewCreateInfo;
+  struct ImageCreateInfo;
+  struct SubresourceLayout;
+  struct ComponentMapping;
+  struct ImageSubresourceRange;
+  struct ImageViewCreateInfo;
+  struct ShaderModuleCreateInfo;
+  struct PipelineCacheCreateInfo;
+  struct ComputePipelineCreateInfo;
+  struct GraphicsPipelineCreateInfo;
+  struct PipelineColorBlendAttachmentState;
+  struct PipelineColorBlendStateCreateInfo;
+  struct PipelineDepthStencilStateCreateInfo;
+  struct PipelineDynamicStateCreateInfo;
+  struct PipelineInputAssemblyStateCreateInfo;
+  struct PipelineMultisampleStateCreateInfo;
+  struct PipelineRasterizationStateCreateInfo;
+  struct PipelineShaderStageCreateInfo;
+  struct PipelineTessellationStateCreateInfo;
+  struct PipelineVertexInputStateCreateInfo;
+  struct PipelineViewportStateCreateInfo;
+  struct SpecializationInfo;
+  struct SpecializationMapEntry;
+  struct StencilOpState;
+  struct VertexInputAttributeDescription;
+  struct VertexInputBindingDescription;
+  struct Viewport;
+  struct PipelineLayoutCreateInfo;
+  struct PushConstantRange;
+  struct SamplerCreateInfo;
+  struct CopyDescriptorSet;
+  struct DescriptorBufferInfo;
+  struct DescriptorImageInfo;
+  struct DescriptorPoolCreateInfo;
+  struct DescriptorPoolSize;
+  struct DescriptorSetAllocateInfo;
+  struct DescriptorSetLayoutBinding;
+  struct DescriptorSetLayoutCreateInfo;
+  struct WriteDescriptorSet;
+  struct AttachmentDescription;
+  struct AttachmentReference;
+  struct FramebufferCreateInfo;
+  struct RenderPassCreateInfo;
+  struct SubpassDependency;
+  struct SubpassDescription;
+  struct CommandPoolCreateInfo;
+  struct CommandBufferAllocateInfo;
+  struct CommandBufferBeginInfo;
+  struct CommandBufferInheritanceInfo;
+  struct BufferCopy;
+  struct BufferImageCopy;
+  struct ClearAttachment;
+  union ClearColorValue;
+  struct ClearDepthStencilValue;
+  struct ClearRect;
+  union ClearValue;
+  struct ImageBlit;
+  struct ImageCopy;
+  struct ImageResolve;
+  struct ImageSubresourceLayers;
+  struct RenderPassBeginInfo;
+
+  //=== VK_VERSION_1_1 ===
+  struct PhysicalDeviceSubgroupProperties;
+  struct BindBufferMemoryInfo;
+  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+  struct BindImageMemoryInfo;
+  using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+  struct PhysicalDevice16BitStorageFeatures;
+  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+  struct MemoryDedicatedRequirements;
+  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+  struct MemoryDedicatedAllocateInfo;
+  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+  struct MemoryAllocateFlagsInfo;
+  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+  struct DeviceGroupRenderPassBeginInfo;
+  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+  struct DeviceGroupCommandBufferBeginInfo;
+  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+  struct DeviceGroupSubmitInfo;
+  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+  struct DeviceGroupBindSparseInfo;
+  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+  struct BindBufferMemoryDeviceGroupInfo;
+  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+  struct BindImageMemoryDeviceGroupInfo;
+  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+  struct PhysicalDeviceGroupProperties;
+  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+  struct DeviceGroupDeviceCreateInfo;
+  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
+  struct BufferMemoryRequirementsInfo2;
+  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+  struct ImageMemoryRequirementsInfo2;
+  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+  struct ImageSparseMemoryRequirementsInfo2;
+  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+  struct MemoryRequirements2;
+  using MemoryRequirements2KHR = MemoryRequirements2;
+  struct SparseImageMemoryRequirements2;
+  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+  struct PhysicalDeviceFeatures2;
+  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+  struct PhysicalDeviceProperties2;
+  using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+  struct FormatProperties2;
+  using FormatProperties2KHR = FormatProperties2;
+  struct ImageFormatProperties2;
+  using ImageFormatProperties2KHR = ImageFormatProperties2;
+  struct PhysicalDeviceImageFormatInfo2;
+  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+  struct QueueFamilyProperties2;
+  using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+  struct PhysicalDeviceMemoryProperties2;
+  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+  struct SparseImageFormatProperties2;
+  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+  struct PhysicalDeviceSparseImageFormatInfo2;
+  using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+  struct PhysicalDevicePointClippingProperties;
+  using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+  struct RenderPassInputAttachmentAspectCreateInfo;
+  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+  struct InputAttachmentAspectReference;
+  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+  struct ImageViewUsageCreateInfo;
+  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+  struct PipelineTessellationDomainOriginStateCreateInfo;
+  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+  struct RenderPassMultiviewCreateInfo;
+  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+  struct PhysicalDeviceMultiviewFeatures;
+  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+  struct PhysicalDeviceMultiviewProperties;
+  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+  struct PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  struct PhysicalDeviceProtectedMemoryFeatures;
+  struct PhysicalDeviceProtectedMemoryProperties;
+  struct DeviceQueueInfo2;
+  struct ProtectedSubmitInfo;
+  struct SamplerYcbcrConversionCreateInfo;
+  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+  struct SamplerYcbcrConversionInfo;
+  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+  struct BindImagePlaneMemoryInfo;
+  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+  struct ImagePlaneMemoryRequirementsInfo;
+  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+  struct PhysicalDeviceSamplerYcbcrConversionFeatures;
+  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+  struct SamplerYcbcrConversionImageFormatProperties;
+  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+  struct DescriptorUpdateTemplateEntry;
+  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+  struct DescriptorUpdateTemplateCreateInfo;
+  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+  struct ExternalMemoryProperties;
+  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+  struct PhysicalDeviceExternalImageFormatInfo;
+  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+  struct ExternalImageFormatProperties;
+  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+  struct PhysicalDeviceExternalBufferInfo;
+  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+  struct ExternalBufferProperties;
+  using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+  struct PhysicalDeviceIDProperties;
+  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+  struct ExternalMemoryImageCreateInfo;
+  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+  struct ExternalMemoryBufferCreateInfo;
+  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+  struct ExportMemoryAllocateInfo;
+  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+  struct PhysicalDeviceExternalFenceInfo;
+  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+  struct ExternalFenceProperties;
+  using ExternalFencePropertiesKHR = ExternalFenceProperties;
+  struct ExportFenceCreateInfo;
+  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+  struct ExportSemaphoreCreateInfo;
+  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+  struct PhysicalDeviceExternalSemaphoreInfo;
+  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+  struct ExternalSemaphoreProperties;
+  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+  struct PhysicalDeviceMaintenance3Properties;
+  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+  struct DescriptorSetLayoutSupport;
+  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+  struct PhysicalDeviceShaderDrawParametersFeatures;
+  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+
+  //=== VK_VERSION_1_2 ===
+  struct PhysicalDeviceVulkan11Features;
+  struct PhysicalDeviceVulkan11Properties;
+  struct PhysicalDeviceVulkan12Features;
+  struct PhysicalDeviceVulkan12Properties;
+  struct ImageFormatListCreateInfo;
+  using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
+  struct RenderPassCreateInfo2;
+  using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+  struct AttachmentDescription2;
+  using AttachmentDescription2KHR = AttachmentDescription2;
+  struct AttachmentReference2;
+  using AttachmentReference2KHR = AttachmentReference2;
+  struct SubpassDescription2;
+  using SubpassDescription2KHR = SubpassDescription2;
+  struct SubpassDependency2;
+  using SubpassDependency2KHR = SubpassDependency2;
+  struct SubpassBeginInfo;
+  using SubpassBeginInfoKHR = SubpassBeginInfo;
+  struct SubpassEndInfo;
+  using SubpassEndInfoKHR = SubpassEndInfo;
+  struct PhysicalDevice8BitStorageFeatures;
+  using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
+  struct ConformanceVersion;
+  using ConformanceVersionKHR = ConformanceVersion;
+  struct PhysicalDeviceDriverProperties;
+  using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
+  struct PhysicalDeviceShaderAtomicInt64Features;
+  using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
+  struct PhysicalDeviceShaderFloat16Int8Features;
+  using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+  using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+  struct PhysicalDeviceFloatControlsProperties;
+  using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
+  struct DescriptorSetLayoutBindingFlagsCreateInfo;
+  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
+  struct PhysicalDeviceDescriptorIndexingFeatures;
+  using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+  struct PhysicalDeviceDescriptorIndexingProperties;
+  using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+  struct DescriptorSetVariableDescriptorCountAllocateInfo;
+  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
+  struct DescriptorSetVariableDescriptorCountLayoutSupport;
+  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
+  struct SubpassDescriptionDepthStencilResolve;
+  using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
+  struct PhysicalDeviceDepthStencilResolveProperties;
+  using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
+  struct PhysicalDeviceScalarBlockLayoutFeatures;
+  using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+  struct ImageStencilUsageCreateInfo;
+  using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+  struct SamplerReductionModeCreateInfo;
+  using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
+  struct PhysicalDeviceSamplerFilterMinmaxProperties;
+  using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
+  struct PhysicalDeviceVulkanMemoryModelFeatures;
+  using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
+  struct PhysicalDeviceImagelessFramebufferFeatures;
+  using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
+  struct FramebufferAttachmentsCreateInfo;
+  using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
+  struct FramebufferAttachmentImageInfo;
+  using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+  struct RenderPassAttachmentBeginInfo;
+  using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
+  struct PhysicalDeviceUniformBufferStandardLayoutFeatures;
+  using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+  struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+  using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+  struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+  using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+  struct AttachmentReferenceStencilLayout;
+  using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
+  struct AttachmentDescriptionStencilLayout;
+  using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
+  struct PhysicalDeviceHostQueryResetFeatures;
+  using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
+  struct PhysicalDeviceTimelineSemaphoreFeatures;
+  using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+  struct PhysicalDeviceTimelineSemaphoreProperties;
+  using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
+  struct SemaphoreTypeCreateInfo;
+  using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
+  struct TimelineSemaphoreSubmitInfo;
+  using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+  struct SemaphoreWaitInfo;
+  using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
+  struct SemaphoreSignalInfo;
+  using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
+  struct PhysicalDeviceBufferDeviceAddressFeatures;
+  using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
+  struct BufferDeviceAddressInfo;
+  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
+  using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+  struct BufferOpaqueCaptureAddressCreateInfo;
+  using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+  struct MemoryOpaqueCaptureAddressAllocateInfo;
+  using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
+  struct DeviceMemoryOpaqueCaptureAddressInfo;
+  using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
+
+  //=== VK_VERSION_1_3 ===
+  struct PhysicalDeviceVulkan13Features;
+  struct PhysicalDeviceVulkan13Properties;
+  struct PipelineCreationFeedbackCreateInfo;
+  using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
+  struct PipelineCreationFeedback;
+  using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
+  struct PhysicalDeviceShaderTerminateInvocationFeatures;
+  using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
+  struct PhysicalDeviceToolProperties;
+  using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
+  struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+  using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+  struct PhysicalDevicePrivateDataFeatures;
+  using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
+  struct DevicePrivateDataCreateInfo;
+  using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
+  struct PrivateDataSlotCreateInfo;
+  using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
+  struct PhysicalDevicePipelineCreationCacheControlFeatures;
+  using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;
+  struct MemoryBarrier2;
+  using MemoryBarrier2KHR = MemoryBarrier2;
+  struct BufferMemoryBarrier2;
+  using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
+  struct ImageMemoryBarrier2;
+  using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
+  struct DependencyInfo;
+  using DependencyInfoKHR = DependencyInfo;
+  struct SubmitInfo2;
+  using SubmitInfo2KHR = SubmitInfo2;
+  struct SemaphoreSubmitInfo;
+  using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
+  struct CommandBufferSubmitInfo;
+  using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
+  struct PhysicalDeviceSynchronization2Features;
+  using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
+  struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+  using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+  struct PhysicalDeviceImageRobustnessFeatures;
+  using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
+  struct CopyBufferInfo2;
+  using CopyBufferInfo2KHR = CopyBufferInfo2;
+  struct CopyImageInfo2;
+  using CopyImageInfo2KHR = CopyImageInfo2;
+  struct CopyBufferToImageInfo2;
+  using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
+  struct CopyImageToBufferInfo2;
+  using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
+  struct BlitImageInfo2;
+  using BlitImageInfo2KHR = BlitImageInfo2;
+  struct ResolveImageInfo2;
+  using ResolveImageInfo2KHR = ResolveImageInfo2;
+  struct BufferCopy2;
+  using BufferCopy2KHR = BufferCopy2;
+  struct ImageCopy2;
+  using ImageCopy2KHR = ImageCopy2;
+  struct ImageBlit2;
+  using ImageBlit2KHR = ImageBlit2;
+  struct BufferImageCopy2;
+  using BufferImageCopy2KHR = BufferImageCopy2;
+  struct ImageResolve2;
+  using ImageResolve2KHR = ImageResolve2;
+  struct PhysicalDeviceSubgroupSizeControlFeatures;
+  using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
+  struct PhysicalDeviceSubgroupSizeControlProperties;
+  using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
+  struct PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+  using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+  struct PhysicalDeviceInlineUniformBlockFeatures;
+  using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
+  struct PhysicalDeviceInlineUniformBlockProperties;
+  using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
+  struct WriteDescriptorSetInlineUniformBlock;
+  using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
+  struct DescriptorPoolInlineUniformBlockCreateInfo;
+  using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
+  struct PhysicalDeviceTextureCompressionASTCHDRFeatures;
+  using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
+  struct RenderingInfo;
+  using RenderingInfoKHR = RenderingInfo;
+  struct RenderingAttachmentInfo;
+  using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
+  struct PipelineRenderingCreateInfo;
+  using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
+  struct PhysicalDeviceDynamicRenderingFeatures;
+  using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
+  struct CommandBufferInheritanceRenderingInfo;
+  using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
+  struct PhysicalDeviceShaderIntegerDotProductFeatures;
+  using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;
+  struct PhysicalDeviceShaderIntegerDotProductProperties;
+  using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;
+  struct PhysicalDeviceTexelBufferAlignmentProperties;
+  using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;
+  struct FormatProperties3;
+  using FormatProperties3KHR = FormatProperties3;
+  struct PhysicalDeviceMaintenance4Features;
+  using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
+  struct PhysicalDeviceMaintenance4Properties;
+  using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
+  struct DeviceBufferMemoryRequirements;
+  using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
+  struct DeviceImageMemoryRequirements;
+  using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
+
+  //=== VK_KHR_surface ===
+  struct SurfaceCapabilitiesKHR;
+  struct SurfaceFormatKHR;
+
+  //=== VK_KHR_swapchain ===
+  struct SwapchainCreateInfoKHR;
+  struct PresentInfoKHR;
+  struct ImageSwapchainCreateInfoKHR;
+  struct BindImageMemorySwapchainInfoKHR;
+  struct AcquireNextImageInfoKHR;
+  struct DeviceGroupPresentCapabilitiesKHR;
+  struct DeviceGroupPresentInfoKHR;
+  struct DeviceGroupSwapchainCreateInfoKHR;
+
+  //=== VK_KHR_display ===
+  struct DisplayModeCreateInfoKHR;
+  struct DisplayModeParametersKHR;
+  struct DisplayModePropertiesKHR;
+  struct DisplayPlaneCapabilitiesKHR;
+  struct DisplayPlanePropertiesKHR;
+  struct DisplayPropertiesKHR;
+  struct DisplaySurfaceCreateInfoKHR;
+
+  //=== VK_KHR_display_swapchain ===
+  struct DisplayPresentInfoKHR;
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+  struct XlibSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+  struct XcbSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+  struct WaylandSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+  struct AndroidSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+  struct Win32SurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+  struct DebugReportCallbackCreateInfoEXT;
+
+  //=== VK_AMD_rasterization_order ===
+  struct PipelineRasterizationStateRasterizationOrderAMD;
+
+  //=== VK_EXT_debug_marker ===
+  struct DebugMarkerObjectNameInfoEXT;
+  struct DebugMarkerObjectTagInfoEXT;
+  struct DebugMarkerMarkerInfoEXT;
+
+  //=== VK_KHR_video_queue ===
+  struct QueueFamilyQueryResultStatusPropertiesKHR;
+  struct QueueFamilyVideoPropertiesKHR;
+  struct VideoProfileInfoKHR;
+  struct VideoProfileListInfoKHR;
+  struct VideoCapabilitiesKHR;
+  struct PhysicalDeviceVideoFormatInfoKHR;
+  struct VideoFormatPropertiesKHR;
+  struct VideoPictureResourceInfoKHR;
+  struct VideoReferenceSlotInfoKHR;
+  struct VideoSessionMemoryRequirementsKHR;
+  struct BindVideoSessionMemoryInfoKHR;
+  struct VideoSessionCreateInfoKHR;
+  struct VideoSessionParametersCreateInfoKHR;
+  struct VideoSessionParametersUpdateInfoKHR;
+  struct VideoBeginCodingInfoKHR;
+  struct VideoEndCodingInfoKHR;
+  struct VideoCodingControlInfoKHR;
+
+  //=== VK_KHR_video_decode_queue ===
+  struct VideoDecodeCapabilitiesKHR;
+  struct VideoDecodeUsageInfoKHR;
+  struct VideoDecodeInfoKHR;
+
+  //=== VK_NV_dedicated_allocation ===
+  struct DedicatedAllocationImageCreateInfoNV;
+  struct DedicatedAllocationBufferCreateInfoNV;
+  struct DedicatedAllocationMemoryAllocateInfoNV;
+
+  //=== VK_EXT_transform_feedback ===
+  struct PhysicalDeviceTransformFeedbackFeaturesEXT;
+  struct PhysicalDeviceTransformFeedbackPropertiesEXT;
+  struct PipelineRasterizationStateStreamCreateInfoEXT;
+
+  //=== VK_NVX_binary_import ===
+  struct CuModuleCreateInfoNVX;
+  struct CuFunctionCreateInfoNVX;
+  struct CuLaunchInfoNVX;
+
+  //=== VK_NVX_image_view_handle ===
+  struct ImageViewHandleInfoNVX;
+  struct ImageViewAddressPropertiesNVX;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+  struct VideoEncodeH264CapabilitiesEXT;
+  struct VideoEncodeH264SessionParametersCreateInfoEXT;
+  struct VideoEncodeH264SessionParametersAddInfoEXT;
+  struct VideoEncodeH264VclFrameInfoEXT;
+  struct VideoEncodeH264ReferenceListsInfoEXT;
+  struct VideoEncodeH264EmitPictureParametersInfoEXT;
+  struct VideoEncodeH264DpbSlotInfoEXT;
+  struct VideoEncodeH264NaluSliceInfoEXT;
+  struct VideoEncodeH264ProfileInfoEXT;
+  struct VideoEncodeH264RateControlInfoEXT;
+  struct VideoEncodeH264RateControlLayerInfoEXT;
+  struct VideoEncodeH264QpEXT;
+  struct VideoEncodeH264FrameSizeEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+  struct VideoEncodeH265CapabilitiesEXT;
+  struct VideoEncodeH265SessionParametersCreateInfoEXT;
+  struct VideoEncodeH265SessionParametersAddInfoEXT;
+  struct VideoEncodeH265VclFrameInfoEXT;
+  struct VideoEncodeH265EmitPictureParametersInfoEXT;
+  struct VideoEncodeH265DpbSlotInfoEXT;
+  struct VideoEncodeH265NaluSliceSegmentInfoEXT;
+  struct VideoEncodeH265ProfileInfoEXT;
+  struct VideoEncodeH265ReferenceListsInfoEXT;
+  struct VideoEncodeH265RateControlInfoEXT;
+  struct VideoEncodeH265RateControlLayerInfoEXT;
+  struct VideoEncodeH265QpEXT;
+  struct VideoEncodeH265FrameSizeEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+  struct VideoDecodeH264ProfileInfoKHR;
+  struct VideoDecodeH264CapabilitiesKHR;
+  struct VideoDecodeH264SessionParametersCreateInfoKHR;
+  struct VideoDecodeH264SessionParametersAddInfoKHR;
+  struct VideoDecodeH264PictureInfoKHR;
+  struct VideoDecodeH264DpbSlotInfoKHR;
+
+  //=== VK_AMD_texture_gather_bias_lod ===
+  struct TextureLODGatherFormatPropertiesAMD;
+
+  //=== VK_AMD_shader_info ===
+  struct ShaderResourceUsageAMD;
+  struct ShaderStatisticsInfoAMD;
+
+  //=== VK_KHR_dynamic_rendering ===
+  struct RenderingFragmentShadingRateAttachmentInfoKHR;
+  struct RenderingFragmentDensityMapAttachmentInfoEXT;
+  struct AttachmentSampleCountInfoAMD;
+  using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;
+  struct MultiviewPerViewAttributesInfoNVX;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+  struct StreamDescriptorSurfaceCreateInfoGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_corner_sampled_image ===
+  struct PhysicalDeviceCornerSampledImageFeaturesNV;
+
+  //=== VK_NV_external_memory_capabilities ===
+  struct ExternalImageFormatPropertiesNV;
+
+  //=== VK_NV_external_memory ===
+  struct ExternalMemoryImageCreateInfoNV;
+  struct ExportMemoryAllocateInfoNV;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+  struct ImportMemoryWin32HandleInfoNV;
+  struct ExportMemoryWin32HandleInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_win32_keyed_mutex ===
+  struct Win32KeyedMutexAcquireReleaseInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_validation_flags ===
+  struct ValidationFlagsEXT;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+  struct ViSurfaceCreateInfoNN;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_astc_decode_mode ===
+  struct ImageViewASTCDecodeModeEXT;
+  struct PhysicalDeviceASTCDecodeFeaturesEXT;
+
+  //=== VK_EXT_pipeline_robustness ===
+  struct PhysicalDevicePipelineRobustnessFeaturesEXT;
+  struct PhysicalDevicePipelineRobustnessPropertiesEXT;
+  struct PipelineRobustnessCreateInfoEXT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+  struct ImportMemoryWin32HandleInfoKHR;
+  struct ExportMemoryWin32HandleInfoKHR;
+  struct MemoryWin32HandlePropertiesKHR;
+  struct MemoryGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+  struct ImportMemoryFdInfoKHR;
+  struct MemoryFdPropertiesKHR;
+  struct MemoryGetFdInfoKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_keyed_mutex ===
+  struct Win32KeyedMutexAcquireReleaseInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+  struct ImportSemaphoreWin32HandleInfoKHR;
+  struct ExportSemaphoreWin32HandleInfoKHR;
+  struct D3D12FenceSubmitInfoKHR;
+  struct SemaphoreGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+  struct ImportSemaphoreFdInfoKHR;
+  struct SemaphoreGetFdInfoKHR;
+
+  //=== VK_KHR_push_descriptor ===
+  struct PhysicalDevicePushDescriptorPropertiesKHR;
+
+  //=== VK_EXT_conditional_rendering ===
+  struct ConditionalRenderingBeginInfoEXT;
+  struct PhysicalDeviceConditionalRenderingFeaturesEXT;
+  struct CommandBufferInheritanceConditionalRenderingInfoEXT;
+
+  //=== VK_KHR_incremental_present ===
+  struct PresentRegionsKHR;
+  struct PresentRegionKHR;
+  struct RectLayerKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+  struct ViewportWScalingNV;
+  struct PipelineViewportWScalingStateCreateInfoNV;
+
+  //=== VK_EXT_display_surface_counter ===
+  struct SurfaceCapabilities2EXT;
+
+  //=== VK_EXT_display_control ===
+  struct DisplayPowerInfoEXT;
+  struct DeviceEventInfoEXT;
+  struct DisplayEventInfoEXT;
+  struct SwapchainCounterCreateInfoEXT;
+
+  //=== VK_GOOGLE_display_timing ===
+  struct RefreshCycleDurationGOOGLE;
+  struct PastPresentationTimingGOOGLE;
+  struct PresentTimesInfoGOOGLE;
+  struct PresentTimeGOOGLE;
+
+  //=== VK_NVX_multiview_per_view_attributes ===
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+  //=== VK_NV_viewport_swizzle ===
+  struct ViewportSwizzleNV;
+  struct PipelineViewportSwizzleStateCreateInfoNV;
+
+  //=== VK_EXT_discard_rectangles ===
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT;
+  struct PipelineDiscardRectangleStateCreateInfoEXT;
+
+  //=== VK_EXT_conservative_rasterization ===
+  struct PhysicalDeviceConservativeRasterizationPropertiesEXT;
+  struct PipelineRasterizationConservativeStateCreateInfoEXT;
+
+  //=== VK_EXT_depth_clip_enable ===
+  struct PhysicalDeviceDepthClipEnableFeaturesEXT;
+  struct PipelineRasterizationDepthClipStateCreateInfoEXT;
+
+  //=== VK_EXT_hdr_metadata ===
+  struct HdrMetadataEXT;
+  struct XYColorEXT;
+
+  //=== VK_KHR_shared_presentable_image ===
+  struct SharedPresentSurfaceCapabilitiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+  struct ImportFenceWin32HandleInfoKHR;
+  struct ExportFenceWin32HandleInfoKHR;
+  struct FenceGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+  struct ImportFenceFdInfoKHR;
+  struct FenceGetFdInfoKHR;
+
+  //=== VK_KHR_performance_query ===
+  struct PhysicalDevicePerformanceQueryFeaturesKHR;
+  struct PhysicalDevicePerformanceQueryPropertiesKHR;
+  struct PerformanceCounterKHR;
+  struct PerformanceCounterDescriptionKHR;
+  struct QueryPoolPerformanceCreateInfoKHR;
+  union PerformanceCounterResultKHR;
+  struct AcquireProfilingLockInfoKHR;
+  struct PerformanceQuerySubmitInfoKHR;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+  struct PhysicalDeviceSurfaceInfo2KHR;
+  struct SurfaceCapabilities2KHR;
+  struct SurfaceFormat2KHR;
+
+  //=== VK_KHR_get_display_properties2 ===
+  struct DisplayProperties2KHR;
+  struct DisplayPlaneProperties2KHR;
+  struct DisplayModeProperties2KHR;
+  struct DisplayPlaneInfo2KHR;
+  struct DisplayPlaneCapabilities2KHR;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+  struct IOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+  struct MacOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+  struct DebugUtilsLabelEXT;
+  struct DebugUtilsMessengerCallbackDataEXT;
+  struct DebugUtilsMessengerCreateInfoEXT;
+  struct DebugUtilsObjectNameInfoEXT;
+  struct DebugUtilsObjectTagInfoEXT;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+  struct AndroidHardwareBufferUsageANDROID;
+  struct AndroidHardwareBufferPropertiesANDROID;
+  struct AndroidHardwareBufferFormatPropertiesANDROID;
+  struct ImportAndroidHardwareBufferInfoANDROID;
+  struct MemoryGetAndroidHardwareBufferInfoANDROID;
+  struct ExternalFormatANDROID;
+  struct AndroidHardwareBufferFormatProperties2ANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+  struct SampleLocationEXT;
+  struct SampleLocationsInfoEXT;
+  struct AttachmentSampleLocationsEXT;
+  struct SubpassSampleLocationsEXT;
+  struct RenderPassSampleLocationsBeginInfoEXT;
+  struct PipelineSampleLocationsStateCreateInfoEXT;
+  struct PhysicalDeviceSampleLocationsPropertiesEXT;
+  struct MultisamplePropertiesEXT;
+
+  //=== VK_EXT_blend_operation_advanced ===
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+  struct PipelineColorBlendAdvancedStateCreateInfoEXT;
+
+  //=== VK_NV_fragment_coverage_to_color ===
+  struct PipelineCoverageToColorStateCreateInfoNV;
+
+  //=== VK_KHR_acceleration_structure ===
+  union DeviceOrHostAddressKHR;
+  union DeviceOrHostAddressConstKHR;
+  struct AccelerationStructureBuildRangeInfoKHR;
+  struct AabbPositionsKHR;
+  using AabbPositionsNV = AabbPositionsKHR;
+  struct AccelerationStructureGeometryTrianglesDataKHR;
+  struct TransformMatrixKHR;
+  using TransformMatrixNV = TransformMatrixKHR;
+  struct AccelerationStructureBuildGeometryInfoKHR;
+  struct AccelerationStructureGeometryAabbsDataKHR;
+  struct AccelerationStructureInstanceKHR;
+  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
+  struct AccelerationStructureGeometryInstancesDataKHR;
+  union AccelerationStructureGeometryDataKHR;
+  struct AccelerationStructureGeometryKHR;
+  struct AccelerationStructureCreateInfoKHR;
+  struct WriteDescriptorSetAccelerationStructureKHR;
+  struct PhysicalDeviceAccelerationStructureFeaturesKHR;
+  struct PhysicalDeviceAccelerationStructurePropertiesKHR;
+  struct AccelerationStructureDeviceAddressInfoKHR;
+  struct AccelerationStructureVersionInfoKHR;
+  struct CopyAccelerationStructureToMemoryInfoKHR;
+  struct CopyMemoryToAccelerationStructureInfoKHR;
+  struct CopyAccelerationStructureInfoKHR;
+  struct AccelerationStructureBuildSizesInfoKHR;
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+  struct PipelineCoverageModulationStateCreateInfoNV;
+
+  //=== VK_NV_shader_sm_builtins ===
+  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+  struct DrmFormatModifierPropertiesListEXT;
+  struct DrmFormatModifierPropertiesEXT;
+  struct PhysicalDeviceImageDrmFormatModifierInfoEXT;
+  struct ImageDrmFormatModifierListCreateInfoEXT;
+  struct ImageDrmFormatModifierExplicitCreateInfoEXT;
+  struct ImageDrmFormatModifierPropertiesEXT;
+  struct DrmFormatModifierPropertiesList2EXT;
+  struct DrmFormatModifierProperties2EXT;
+
+  //=== VK_EXT_validation_cache ===
+  struct ValidationCacheCreateInfoEXT;
+  struct ShaderModuleValidationCacheCreateInfoEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_portability_subset ===
+  struct PhysicalDevicePortabilitySubsetFeaturesKHR;
+  struct PhysicalDevicePortabilitySubsetPropertiesKHR;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_shading_rate_image ===
+  struct ShadingRatePaletteNV;
+  struct PipelineViewportShadingRateImageStateCreateInfoNV;
+  struct PhysicalDeviceShadingRateImageFeaturesNV;
+  struct PhysicalDeviceShadingRateImagePropertiesNV;
+  struct CoarseSampleLocationNV;
+  struct CoarseSampleOrderCustomNV;
+  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+  //=== VK_NV_ray_tracing ===
+  struct RayTracingShaderGroupCreateInfoNV;
+  struct RayTracingPipelineCreateInfoNV;
+  struct GeometryTrianglesNV;
+  struct GeometryAABBNV;
+  struct GeometryDataNV;
+  struct GeometryNV;
+  struct AccelerationStructureInfoNV;
+  struct AccelerationStructureCreateInfoNV;
+  struct BindAccelerationStructureMemoryInfoNV;
+  struct WriteDescriptorSetAccelerationStructureNV;
+  struct AccelerationStructureMemoryRequirementsInfoNV;
+  struct PhysicalDeviceRayTracingPropertiesNV;
+
+  //=== VK_NV_representative_fragment_test ===
+  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+  struct PipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+  //=== VK_EXT_filter_cubic ===
+  struct PhysicalDeviceImageViewImageFormatInfoEXT;
+  struct FilterCubicImageViewImageFormatPropertiesEXT;
+
+  //=== VK_EXT_external_memory_host ===
+  struct ImportMemoryHostPointerInfoEXT;
+  struct MemoryHostPointerPropertiesEXT;
+  struct PhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+  //=== VK_KHR_shader_clock ===
+  struct PhysicalDeviceShaderClockFeaturesKHR;
+
+  //=== VK_AMD_pipeline_compiler_control ===
+  struct PipelineCompilerControlCreateInfoAMD;
+
+  //=== VK_EXT_calibrated_timestamps ===
+  struct CalibratedTimestampInfoEXT;
+
+  //=== VK_AMD_shader_core_properties ===
+  struct PhysicalDeviceShaderCorePropertiesAMD;
+
+  //=== VK_KHR_video_decode_h265 ===
+  struct VideoDecodeH265ProfileInfoKHR;
+  struct VideoDecodeH265CapabilitiesKHR;
+  struct VideoDecodeH265SessionParametersCreateInfoKHR;
+  struct VideoDecodeH265SessionParametersAddInfoKHR;
+  struct VideoDecodeH265PictureInfoKHR;
+  struct VideoDecodeH265DpbSlotInfoKHR;
+
+  //=== VK_KHR_global_priority ===
+  struct DeviceQueueGlobalPriorityCreateInfoKHR;
+  using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR;
+  struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+  using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+  struct QueueFamilyGlobalPriorityPropertiesKHR;
+  using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR;
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+  struct DeviceMemoryOverallocationCreateInfoAMD;
+
+  //=== VK_EXT_vertex_attribute_divisor ===
+  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+  struct VertexInputBindingDivisorDescriptionEXT;
+  struct PipelineVertexInputDivisorStateCreateInfoEXT;
+  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_frame_token ===
+  struct PresentFrameTokenGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_compute_shader_derivatives ===
+  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+  //=== VK_NV_mesh_shader ===
+  struct PhysicalDeviceMeshShaderFeaturesNV;
+  struct PhysicalDeviceMeshShaderPropertiesNV;
+  struct DrawMeshTasksIndirectCommandNV;
+
+  //=== VK_NV_shader_image_footprint ===
+  struct PhysicalDeviceShaderImageFootprintFeaturesNV;
+
+  //=== VK_NV_scissor_exclusive ===
+  struct PipelineViewportExclusiveScissorStateCreateInfoNV;
+  struct PhysicalDeviceExclusiveScissorFeaturesNV;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+  struct QueueFamilyCheckpointPropertiesNV;
+  struct CheckpointDataNV;
+
+  //=== VK_INTEL_shader_integer_functions2 ===
+  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+  //=== VK_INTEL_performance_query ===
+  union PerformanceValueDataINTEL;
+  struct PerformanceValueINTEL;
+  struct InitializePerformanceApiInfoINTEL;
+  struct QueryPoolPerformanceQueryCreateInfoINTEL;
+  using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
+  struct PerformanceMarkerInfoINTEL;
+  struct PerformanceStreamMarkerInfoINTEL;
+  struct PerformanceOverrideInfoINTEL;
+  struct PerformanceConfigurationAcquireInfoINTEL;
+
+  //=== VK_EXT_pci_bus_info ===
+  struct PhysicalDevicePCIBusInfoPropertiesEXT;
+
+  //=== VK_AMD_display_native_hdr ===
+  struct DisplayNativeHdrSurfaceCapabilitiesAMD;
+  struct SwapchainDisplayNativeHdrCreateInfoAMD;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+  struct ImagePipeSurfaceCreateInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+  struct MetalSurfaceCreateInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_fragment_density_map ===
+  struct PhysicalDeviceFragmentDensityMapFeaturesEXT;
+  struct PhysicalDeviceFragmentDensityMapPropertiesEXT;
+  struct RenderPassFragmentDensityMapCreateInfoEXT;
+
+  //=== VK_KHR_fragment_shading_rate ===
+  struct FragmentShadingRateAttachmentInfoKHR;
+  struct PipelineFragmentShadingRateStateCreateInfoKHR;
+  struct PhysicalDeviceFragmentShadingRateFeaturesKHR;
+  struct PhysicalDeviceFragmentShadingRatePropertiesKHR;
+  struct PhysicalDeviceFragmentShadingRateKHR;
+
+  //=== VK_AMD_shader_core_properties2 ===
+  struct PhysicalDeviceShaderCoreProperties2AMD;
+
+  //=== VK_AMD_device_coherent_memory ===
+  struct PhysicalDeviceCoherentMemoryFeaturesAMD;
+
+  //=== VK_EXT_shader_image_atomic_int64 ===
+  struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+  //=== VK_EXT_memory_budget ===
+  struct PhysicalDeviceMemoryBudgetPropertiesEXT;
+
+  //=== VK_EXT_memory_priority ===
+  struct PhysicalDeviceMemoryPriorityFeaturesEXT;
+  struct MemoryPriorityAllocateInfoEXT;
+
+  //=== VK_KHR_surface_protected_capabilities ===
+  struct SurfaceProtectedCapabilitiesKHR;
+
+  //=== VK_NV_dedicated_allocation_image_aliasing ===
+  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+  //=== VK_EXT_buffer_device_address ===
+  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  struct BufferDeviceAddressCreateInfoEXT;
+
+  //=== VK_EXT_validation_features ===
+  struct ValidationFeaturesEXT;
+
+  //=== VK_KHR_present_wait ===
+  struct PhysicalDevicePresentWaitFeaturesKHR;
+
+  //=== VK_NV_cooperative_matrix ===
+  struct CooperativeMatrixPropertiesNV;
+  struct PhysicalDeviceCooperativeMatrixFeaturesNV;
+  struct PhysicalDeviceCooperativeMatrixPropertiesNV;
+
+  //=== VK_NV_coverage_reduction_mode ===
+  struct PhysicalDeviceCoverageReductionModeFeaturesNV;
+  struct PipelineCoverageReductionStateCreateInfoNV;
+  struct FramebufferMixedSamplesCombinationNV;
+
+  //=== VK_EXT_fragment_shader_interlock ===
+  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+  //=== VK_EXT_ycbcr_image_arrays ===
+  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+  //=== VK_EXT_provoking_vertex ===
+  struct PhysicalDeviceProvokingVertexFeaturesEXT;
+  struct PhysicalDeviceProvokingVertexPropertiesEXT;
+  struct PipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+  struct SurfaceFullScreenExclusiveInfoEXT;
+  struct SurfaceCapabilitiesFullScreenExclusiveEXT;
+  struct SurfaceFullScreenExclusiveWin32InfoEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+  struct HeadlessSurfaceCreateInfoEXT;
+
+  //=== VK_EXT_line_rasterization ===
+  struct PhysicalDeviceLineRasterizationFeaturesEXT;
+  struct PhysicalDeviceLineRasterizationPropertiesEXT;
+  struct PipelineRasterizationLineStateCreateInfoEXT;
+
+  //=== VK_EXT_shader_atomic_float ===
+  struct PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+  //=== VK_EXT_index_type_uint8 ===
+  struct PhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+  struct PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+  struct PipelineInfoKHR;
+  using PipelineInfoEXT = PipelineInfoKHR;
+  struct PipelineExecutablePropertiesKHR;
+  struct PipelineExecutableInfoKHR;
+  union PipelineExecutableStatisticValueKHR;
+  struct PipelineExecutableStatisticKHR;
+  struct PipelineExecutableInternalRepresentationKHR;
+
+  //=== VK_EXT_shader_atomic_float2 ===
+  struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+  //=== VK_EXT_surface_maintenance1 ===
+  struct SurfacePresentModeEXT;
+  struct SurfacePresentScalingCapabilitiesEXT;
+  struct SurfacePresentModeCompatibilityEXT;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+  struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+  struct SwapchainPresentFenceInfoEXT;
+  struct SwapchainPresentModesCreateInfoEXT;
+  struct SwapchainPresentModeInfoEXT;
+  struct SwapchainPresentScalingCreateInfoEXT;
+  struct ReleaseSwapchainImagesInfoEXT;
+
+  //=== VK_NV_device_generated_commands ===
+  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+  struct GraphicsShaderGroupCreateInfoNV;
+  struct GraphicsPipelineShaderGroupsCreateInfoNV;
+  struct BindShaderGroupIndirectCommandNV;
+  struct BindIndexBufferIndirectCommandNV;
+  struct BindVertexBufferIndirectCommandNV;
+  struct SetStateFlagsIndirectCommandNV;
+  struct IndirectCommandsStreamNV;
+  struct IndirectCommandsLayoutTokenNV;
+  struct IndirectCommandsLayoutCreateInfoNV;
+  struct GeneratedCommandsInfoNV;
+  struct GeneratedCommandsMemoryRequirementsInfoNV;
+
+  //=== VK_NV_inherited_viewport_scissor ===
+  struct PhysicalDeviceInheritedViewportScissorFeaturesNV;
+  struct CommandBufferInheritanceViewportScissorInfoNV;
+
+  //=== VK_EXT_texel_buffer_alignment ===
+  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+  //=== VK_QCOM_render_pass_transform ===
+  struct RenderPassTransformBeginInfoQCOM;
+  struct CommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+  //=== VK_EXT_device_memory_report ===
+  struct PhysicalDeviceDeviceMemoryReportFeaturesEXT;
+  struct DeviceDeviceMemoryReportCreateInfoEXT;
+  struct DeviceMemoryReportCallbackDataEXT;
+
+  //=== VK_EXT_robustness2 ===
+  struct PhysicalDeviceRobustness2FeaturesEXT;
+  struct PhysicalDeviceRobustness2PropertiesEXT;
+
+  //=== VK_EXT_custom_border_color ===
+  struct SamplerCustomBorderColorCreateInfoEXT;
+  struct PhysicalDeviceCustomBorderColorPropertiesEXT;
+  struct PhysicalDeviceCustomBorderColorFeaturesEXT;
+
+  //=== VK_KHR_pipeline_library ===
+  struct PipelineLibraryCreateInfoKHR;
+
+  //=== VK_NV_present_barrier ===
+  struct PhysicalDevicePresentBarrierFeaturesNV;
+  struct SurfaceCapabilitiesPresentBarrierNV;
+  struct SwapchainPresentBarrierCreateInfoNV;
+
+  //=== VK_KHR_present_id ===
+  struct PresentIdKHR;
+  struct PhysicalDevicePresentIdFeaturesKHR;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+  struct VideoEncodeInfoKHR;
+  struct VideoEncodeCapabilitiesKHR;
+  struct VideoEncodeUsageInfoKHR;
+  struct VideoEncodeRateControlInfoKHR;
+  struct VideoEncodeRateControlLayerInfoKHR;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+  struct PhysicalDeviceDiagnosticsConfigFeaturesNV;
+  struct DeviceDiagnosticsConfigCreateInfoNV;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+  struct ExportMetalObjectCreateInfoEXT;
+  struct ExportMetalObjectsInfoEXT;
+  struct ExportMetalDeviceInfoEXT;
+  struct ExportMetalCommandQueueInfoEXT;
+  struct ExportMetalBufferInfoEXT;
+  struct ImportMetalBufferInfoEXT;
+  struct ExportMetalTextureInfoEXT;
+  struct ImportMetalTextureInfoEXT;
+  struct ExportMetalIOSurfaceInfoEXT;
+  struct ImportMetalIOSurfaceInfoEXT;
+  struct ExportMetalSharedEventInfoEXT;
+  struct ImportMetalSharedEventInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+  struct QueueFamilyCheckpointProperties2NV;
+  struct CheckpointData2NV;
+
+  //=== VK_EXT_descriptor_buffer ===
+  struct PhysicalDeviceDescriptorBufferPropertiesEXT;
+  struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+  struct PhysicalDeviceDescriptorBufferFeaturesEXT;
+  struct DescriptorAddressInfoEXT;
+  struct DescriptorBufferBindingInfoEXT;
+  struct DescriptorBufferBindingPushDescriptorBufferHandleEXT;
+  union DescriptorDataEXT;
+  struct DescriptorGetInfoEXT;
+  struct BufferCaptureDescriptorDataInfoEXT;
+  struct ImageCaptureDescriptorDataInfoEXT;
+  struct ImageViewCaptureDescriptorDataInfoEXT;
+  struct SamplerCaptureDescriptorDataInfoEXT;
+  struct OpaqueCaptureDescriptorDataCreateInfoEXT;
+  struct AccelerationStructureCaptureDescriptorDataInfoEXT;
+
+  //=== VK_EXT_graphics_pipeline_library ===
+  struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+  struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+  struct GraphicsPipelineLibraryCreateInfoEXT;
+
+  //=== VK_AMD_shader_early_and_late_fragment_tests ===
+  struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+  //=== VK_KHR_fragment_shader_barycentric ===
+  struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+  using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+  struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+  //=== VK_KHR_shader_subgroup_uniform_control_flow ===
+  struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+  struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+  struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+  struct PipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+  struct AccelerationStructureGeometryMotionTrianglesDataNV;
+  struct AccelerationStructureMotionInfoNV;
+  struct AccelerationStructureMotionInstanceNV;
+  union AccelerationStructureMotionInstanceDataNV;
+  struct AccelerationStructureMatrixMotionInstanceNV;
+  struct AccelerationStructureSRTMotionInstanceNV;
+  struct SRTDataNV;
+  struct PhysicalDeviceRayTracingMotionBlurFeaturesNV;
+
+  //=== VK_EXT_mesh_shader ===
+  struct PhysicalDeviceMeshShaderFeaturesEXT;
+  struct PhysicalDeviceMeshShaderPropertiesEXT;
+  struct DrawMeshTasksIndirectCommandEXT;
+
+  //=== VK_EXT_ycbcr_2plane_444_formats ===
+  struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+  //=== VK_EXT_fragment_density_map2 ===
+  struct PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+  struct PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+  //=== VK_QCOM_rotated_copy_commands ===
+  struct CopyCommandTransformInfoQCOM;
+
+  //=== VK_KHR_workgroup_memory_explicit_layout ===
+  struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+  //=== VK_EXT_image_compression_control ===
+  struct PhysicalDeviceImageCompressionControlFeaturesEXT;
+  struct ImageCompressionControlEXT;
+  struct SubresourceLayout2EXT;
+  struct ImageSubresource2EXT;
+  struct ImageCompressionPropertiesEXT;
+
+  //=== VK_EXT_attachment_feedback_loop_layout ===
+  struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+  //=== VK_EXT_4444_formats ===
+  struct PhysicalDevice4444FormatsFeaturesEXT;
+
+  //=== VK_EXT_device_fault ===
+  struct PhysicalDeviceFaultFeaturesEXT;
+  struct DeviceFaultCountsEXT;
+  struct DeviceFaultInfoEXT;
+  struct DeviceFaultAddressInfoEXT;
+  struct DeviceFaultVendorInfoEXT;
+  struct DeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+  //=== VK_EXT_rgba10x6_formats ===
+  struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+  struct DirectFBSurfaceCreateInfoEXT;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  struct RayTracingShaderGroupCreateInfoKHR;
+  struct RayTracingPipelineCreateInfoKHR;
+  struct PhysicalDeviceRayTracingPipelineFeaturesKHR;
+  struct PhysicalDeviceRayTracingPipelinePropertiesKHR;
+  struct StridedDeviceAddressRegionKHR;
+  struct TraceRaysIndirectCommandKHR;
+  struct RayTracingPipelineInterfaceCreateInfoKHR;
+
+  //=== VK_KHR_ray_query ===
+  struct PhysicalDeviceRayQueryFeaturesKHR;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+  struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+  struct VertexInputBindingDescription2EXT;
+  struct VertexInputAttributeDescription2EXT;
+
+  //=== VK_EXT_physical_device_drm ===
+  struct PhysicalDeviceDrmPropertiesEXT;
+
+  //=== VK_EXT_device_address_binding_report ===
+  struct PhysicalDeviceAddressBindingReportFeaturesEXT;
+  struct DeviceAddressBindingCallbackDataEXT;
+
+  //=== VK_EXT_depth_clip_control ===
+  struct PhysicalDeviceDepthClipControlFeaturesEXT;
+  struct PipelineViewportDepthClipControlCreateInfoEXT;
+
+  //=== VK_EXT_primitive_topology_list_restart ===
+  struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+  struct ImportMemoryZirconHandleInfoFUCHSIA;
+  struct MemoryZirconHandlePropertiesFUCHSIA;
+  struct MemoryGetZirconHandleInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+  struct ImportSemaphoreZirconHandleInfoFUCHSIA;
+  struct SemaphoreGetZirconHandleInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  struct BufferCollectionCreateInfoFUCHSIA;
+  struct ImportMemoryBufferCollectionFUCHSIA;
+  struct BufferCollectionImageCreateInfoFUCHSIA;
+  struct BufferConstraintsInfoFUCHSIA;
+  struct BufferCollectionBufferCreateInfoFUCHSIA;
+  struct BufferCollectionPropertiesFUCHSIA;
+  struct SysmemColorSpaceFUCHSIA;
+  struct ImageConstraintsInfoFUCHSIA;
+  struct ImageFormatConstraintsInfoFUCHSIA;
+  struct BufferCollectionConstraintsInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+  struct SubpassShadingPipelineCreateInfoHUAWEI;
+  struct PhysicalDeviceSubpassShadingFeaturesHUAWEI;
+  struct PhysicalDeviceSubpassShadingPropertiesHUAWEI;
+
+  //=== VK_HUAWEI_invocation_mask ===
+  struct PhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+  //=== VK_NV_external_memory_rdma ===
+  struct MemoryGetRemoteAddressInfoNV;
+  struct PhysicalDeviceExternalMemoryRDMAFeaturesNV;
+
+  //=== VK_EXT_pipeline_properties ===
+  struct PipelinePropertiesIdentifierEXT;
+  struct PhysicalDevicePipelinePropertiesFeaturesEXT;
+
+  //=== VK_EXT_multisampled_render_to_single_sampled ===
+  struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+  struct SubpassResolvePerformanceQueryEXT;
+  struct MultisampledRenderToSingleSampledInfoEXT;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+  struct PhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+  struct ScreenSurfaceCreateInfoQNX;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+  struct PhysicalDeviceColorWriteEnableFeaturesEXT;
+  struct PipelineColorWriteCreateInfoEXT;
+
+  //=== VK_EXT_primitives_generated_query ===
+  struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+  struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+  struct TraceRaysIndirectCommand2KHR;
+
+  //=== VK_EXT_image_view_min_lod ===
+  struct PhysicalDeviceImageViewMinLodFeaturesEXT;
+  struct ImageViewMinLodCreateInfoEXT;
+
+  //=== VK_EXT_multi_draw ===
+  struct PhysicalDeviceMultiDrawFeaturesEXT;
+  struct PhysicalDeviceMultiDrawPropertiesEXT;
+  struct MultiDrawInfoEXT;
+  struct MultiDrawIndexedInfoEXT;
+
+  //=== VK_EXT_image_2d_view_of_3d ===
+  struct PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+  //=== VK_EXT_opacity_micromap ===
+  struct MicromapBuildInfoEXT;
+  struct MicromapUsageEXT;
+  struct MicromapCreateInfoEXT;
+  struct PhysicalDeviceOpacityMicromapFeaturesEXT;
+  struct PhysicalDeviceOpacityMicromapPropertiesEXT;
+  struct MicromapVersionInfoEXT;
+  struct CopyMicromapToMemoryInfoEXT;
+  struct CopyMemoryToMicromapInfoEXT;
+  struct CopyMicromapInfoEXT;
+  struct MicromapBuildSizesInfoEXT;
+  struct AccelerationStructureTrianglesOpacityMicromapEXT;
+  struct MicromapTriangleEXT;
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+  struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+  struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+
+  //=== VK_EXT_border_color_swizzle ===
+  struct PhysicalDeviceBorderColorSwizzleFeaturesEXT;
+  struct SamplerBorderColorComponentMappingCreateInfoEXT;
+
+  //=== VK_EXT_pageable_device_local_memory ===
+  struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+  struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+  struct DescriptorSetBindingReferenceVALVE;
+  struct DescriptorSetLayoutHostMappingInfoVALVE;
+
+  //=== VK_EXT_depth_clamp_zero_one ===
+  struct PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+  //=== VK_EXT_non_seamless_cube_map ===
+  struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+  //=== VK_QCOM_fragment_density_map_offset ===
+  struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+  struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+  struct SubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+  //=== VK_NV_copy_memory_indirect ===
+  struct CopyMemoryIndirectCommandNV;
+  struct CopyMemoryToImageIndirectCommandNV;
+  struct PhysicalDeviceCopyMemoryIndirectFeaturesNV;
+  struct PhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+  //=== VK_NV_memory_decompression ===
+  struct DecompressMemoryRegionNV;
+  struct PhysicalDeviceMemoryDecompressionFeaturesNV;
+  struct PhysicalDeviceMemoryDecompressionPropertiesNV;
+
+  //=== VK_NV_linear_color_attachment ===
+  struct PhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+  //=== VK_EXT_image_compression_control_swapchain ===
+  struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+  //=== VK_QCOM_image_processing ===
+  struct ImageViewSampleWeightCreateInfoQCOM;
+  struct PhysicalDeviceImageProcessingFeaturesQCOM;
+  struct PhysicalDeviceImageProcessingPropertiesQCOM;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+  struct PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+  struct PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+  struct ColorBlendEquationEXT;
+  struct ColorBlendAdvancedEXT;
+
+  //=== VK_EXT_subpass_merge_feedback ===
+  struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+  struct RenderPassCreationControlEXT;
+  struct RenderPassCreationFeedbackInfoEXT;
+  struct RenderPassCreationFeedbackCreateInfoEXT;
+  struct RenderPassSubpassFeedbackInfoEXT;
+  struct RenderPassSubpassFeedbackCreateInfoEXT;
+
+  //=== VK_LUNARG_direct_driver_loading ===
+  struct DirectDriverLoadingInfoLUNARG;
+  struct DirectDriverLoadingListLUNARG;
+
+  //=== VK_EXT_shader_module_identifier ===
+  struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+  struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+  struct PipelineShaderStageModuleIdentifierCreateInfoEXT;
+  struct ShaderModuleIdentifierEXT;
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+  struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+  using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+  //=== VK_NV_optical_flow ===
+  struct PhysicalDeviceOpticalFlowFeaturesNV;
+  struct PhysicalDeviceOpticalFlowPropertiesNV;
+  struct OpticalFlowImageFormatInfoNV;
+  struct OpticalFlowImageFormatPropertiesNV;
+  struct OpticalFlowSessionCreateInfoNV;
+  struct OpticalFlowSessionCreatePrivateDataInfoNV;
+  struct OpticalFlowExecuteInfoNV;
+
+  //=== VK_EXT_legacy_dithering ===
+  struct PhysicalDeviceLegacyDitheringFeaturesEXT;
+
+  //=== VK_EXT_pipeline_protected_access ===
+  struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+  //=== VK_QCOM_tile_properties ===
+  struct PhysicalDeviceTilePropertiesFeaturesQCOM;
+  struct TilePropertiesQCOM;
+
+  //=== VK_SEC_amigo_profiling ===
+  struct PhysicalDeviceAmigoProfilingFeaturesSEC;
+  struct AmigoProfilingSubmitInfoSEC;
+
+  //=== VK_QCOM_multiview_per_view_viewports ===
+  struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+  struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+  struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+
+  //=== VK_EXT_mutable_descriptor_type ===
+  struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+  using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+  struct MutableDescriptorTypeListEXT;
+  using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
+  struct MutableDescriptorTypeCreateInfoEXT;
+  using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+
+  //=== VK_ARM_shader_core_builtins ===
+  struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+  struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+
+
+  //===============
+  //=== HANDLEs ===
+  //===============
+
+  template <typename Type>
+  struct isVulkanHandleType
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
+  };
+
+  class SurfaceKHR
+  {
+  public:
+    using CType = VkSurfaceKHR;
+    using NativeType = VkSurfaceKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SurfaceKHR() = default;
+    VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR( surfaceKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = surfaceKHR;
+      return *this;
+    }
+#endif
+
+    SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceKHR const & ) const = default;
+#else
+    bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == rhs.m_surfaceKHR;
+    }
+
+    bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != rhs.m_surfaceKHR;
+    }
+
+    bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR < rhs.m_surfaceKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSurfaceKHR m_surfaceKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DebugReportCallbackEXT
+  {
+  public:
+    using CType = VkDebugReportCallbackEXT;
+    using NativeType = VkDebugReportCallbackEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default;
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT( debugReportCallbackEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = debugReportCallbackEXT;
+      return *this;
+    }
+#endif
+
+    DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugReportCallbackEXT const & ) const = default;
+#else
+    bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugReportCallbackEXT m_debugReportCallbackEXT = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DebugUtilsMessengerEXT
+  {
+  public:
+    using CType = VkDebugUtilsMessengerEXT;
+    using NativeType = VkDebugUtilsMessengerEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default;
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+      return *this;
+    }
+#endif
+
+    DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsMessengerEXT const & ) const = default;
+#else
+    bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DisplayKHR
+  {
+  public:
+    using CType = VkDisplayKHR;
+    using NativeType = VkDisplayKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayKHR() = default;
+    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR( displayKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = displayKHR;
+      return *this;
+    }
+#endif
+
+    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayKHR const & ) const = default;
+#else
+    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == rhs.m_displayKHR;
+    }
+
+    bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != rhs.m_displayKHR;
+    }
+
+    bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR < rhs.m_displayKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayKHR m_displayKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class SwapchainKHR
+  {
+  public:
+    using CType = VkSwapchainKHR;
+    using NativeType = VkSwapchainKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SwapchainKHR() = default;
+    VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR( swapchainKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = swapchainKHR;
+      return *this;
+    }
+#endif
+
+    SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SwapchainKHR const & ) const = default;
+#else
+    bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == rhs.m_swapchainKHR;
+    }
+
+    bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != rhs.m_swapchainKHR;
+    }
+
+    bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR < rhs.m_swapchainKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSwapchainKHR m_swapchainKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Semaphore
+  {
+  public:
+    using CType = VkSemaphore;
+    using NativeType = VkSemaphore;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Semaphore() = default;
+    VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
+      : m_semaphore( semaphore )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = semaphore;
+      return *this;
+    }
+#endif
+
+    Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Semaphore const & ) const = default;
+#else
+    bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == rhs.m_semaphore;
+    }
+
+    bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != rhs.m_semaphore;
+    }
+
+    bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore < rhs.m_semaphore;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSemaphore m_semaphore = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Semaphore>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Fence
+  {
+  public:
+    using CType = VkFence;
+    using NativeType = VkFence;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Fence() = default;
+    VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
+      : m_fence( fence )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = fence;
+      return *this;
+    }
+#endif
+
+    Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Fence const & ) const = default;
+#else
+    bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == rhs.m_fence;
+    }
+
+    bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != rhs.m_fence;
+    }
+
+    bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence < rhs.m_fence;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFence m_fence = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Fence;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Fence;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Fence>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class PerformanceConfigurationINTEL
+  {
+  public:
+    using CType = VkPerformanceConfigurationINTEL;
+    using NativeType = VkPerformanceConfigurationINTEL;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default;
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
+      return *this;
+    }
+#endif
+
+    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceConfigurationINTEL const & ) const = default;
+#else
+    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class QueryPool
+  {
+  public:
+    using CType = VkQueryPool;
+    using NativeType = VkQueryPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR QueryPool() = default;
+    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool( queryPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = queryPool;
+      return *this;
+    }
+#endif
+
+    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPool const & ) const = default;
+#else
+    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == rhs.m_queryPool;
+    }
+
+    bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != rhs.m_queryPool;
+    }
+
+    bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool < rhs.m_queryPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueryPool m_queryPool = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Buffer
+  {
+  public:
+    using CType = VkBuffer;
+    using NativeType = VkBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Buffer() = default;
+    VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
+      : m_buffer( buffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = buffer;
+      return *this;
+    }
+#endif
+
+    Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Buffer const & ) const = default;
+#else
+    bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == rhs.m_buffer;
+    }
+
+    bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != rhs.m_buffer;
+    }
+
+    bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer < rhs.m_buffer;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBuffer m_buffer = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Buffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Buffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Buffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class PipelineLayout
+  {
+  public:
+    using CType = VkPipelineLayout;
+    using NativeType = VkPipelineLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineLayout() = default;
+    VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout( pipelineLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = pipelineLayout;
+      return *this;
+    }
+#endif
+
+    PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineLayout const & ) const = default;
+#else
+    bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == rhs.m_pipelineLayout;
+    }
+
+    bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != rhs.m_pipelineLayout;
+    }
+
+    bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout < rhs.m_pipelineLayout;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineLayout m_pipelineLayout = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineLayout>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorSet
+  {
+  public:
+    using CType = VkDescriptorSet;
+    using NativeType = VkDescriptorSet;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSet() = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet( descriptorSet )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = descriptorSet;
+      return *this;
+    }
+#endif
+
+    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSet const & ) const = default;
+#else
+    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == rhs.m_descriptorSet;
+    }
+
+    bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != rhs.m_descriptorSet;
+    }
+
+    bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet < rhs.m_descriptorSet;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSet m_descriptorSet = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class ImageView
+  {
+  public:
+    using CType = VkImageView;
+    using NativeType = VkImageView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ImageView() = default;
+    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+      : m_imageView( imageView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = imageView;
+      return *this;
+    }
+#endif
+
+    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageView const & ) const = default;
+#else
+    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == rhs.m_imageView;
+    }
+
+    bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != rhs.m_imageView;
+    }
+
+    bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView < rhs.m_imageView;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImageView m_imageView = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Pipeline
+  {
+  public:
+    using CType = VkPipeline;
+    using NativeType = VkPipeline;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Pipeline() = default;
+    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline( pipeline )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = pipeline;
+      return *this;
+    }
+#endif
+
+    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Pipeline const & ) const = default;
+#else
+    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == rhs.m_pipeline;
+    }
+
+    bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != rhs.m_pipeline;
+    }
+
+    bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline < rhs.m_pipeline;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipeline m_pipeline = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Image
+  {
+  public:
+    using CType = VkImage;
+    using NativeType = VkImage;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Image() = default;
+    VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
+      : m_image( image )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = image;
+      return *this;
+    }
+#endif
+
+    Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Image const & ) const = default;
+#else
+    bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == rhs.m_image;
+    }
+
+    bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != rhs.m_image;
+    }
+
+    bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image < rhs.m_image;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImage m_image = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Image;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Image;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Image>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class AccelerationStructureNV
+  {
+  public:
+    using CType = VkAccelerationStructureNV;
+    using NativeType = VkAccelerationStructureNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
+
+  public:
+    VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default;
+    VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureNV( accelerationStructureNV )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureNV = accelerationStructureNV;
+      return *this;
+    }
+#endif
+
+    AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureNV = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
+    }
+
+    bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
+    }
+
+    bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkAccelerationStructureNV m_accelerationStructureNV = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class OpticalFlowSessionNV
+  {
+  public:
+    using CType = VkOpticalFlowSessionNV;
+    using NativeType = VkOpticalFlowSessionNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV() = default;
+    VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT
+      : m_opticalFlowSessionNV( opticalFlowSessionNV )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    OpticalFlowSessionNV & operator=(VkOpticalFlowSessionNV opticalFlowSessionNV) VULKAN_HPP_NOEXCEPT
+    {
+      m_opticalFlowSessionNV = opticalFlowSessionNV;
+      return *this;
+    }
+#endif
+
+    OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_opticalFlowSessionNV = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( OpticalFlowSessionNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV == rhs.m_opticalFlowSessionNV;
+    }
+
+    bool operator!=(OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV != rhs.m_opticalFlowSessionNV;
+    }
+
+    bool operator<(OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV < rhs.m_opticalFlowSessionNV;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_opticalFlowSessionNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkOpticalFlowSessionNV m_opticalFlowSessionNV = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorUpdateTemplate
+  {
+  public:
+    using CType = VkDescriptorUpdateTemplate;
+    using NativeType = VkDescriptorUpdateTemplate;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default;
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+      return *this;
+    }
+#endif
+
+    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorUpdateTemplate const & ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+  class Event
+  {
+  public:
+    using CType = VkEvent;
+    using NativeType = VkEvent;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Event() = default;
+    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+      : m_event( event )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = event;
+      return *this;
+    }
+#endif
+
+    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Event const & ) const = default;
+#else
+    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == rhs.m_event;
+    }
+
+    bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != rhs.m_event;
+    }
+
+    bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event < rhs.m_event;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkEvent m_event = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class AccelerationStructureKHR
+  {
+  public:
+    using CType = VkAccelerationStructureKHR;
+    using NativeType = VkAccelerationStructureKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default;
+    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureKHR( accelerationStructureKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureKHR = accelerationStructureKHR;
+      return *this;
+    }
+#endif
+
+    AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR;
+    }
+
+    bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR;
+    }
+
+    bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkAccelerationStructureKHR m_accelerationStructureKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class MicromapEXT
+  {
+  public:
+    using CType = VkMicromapEXT;
+    using NativeType = VkMicromapEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR MicromapEXT() = default;
+    VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT
+      : m_micromapEXT( micromapEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    MicromapEXT & operator=(VkMicromapEXT micromapEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_micromapEXT = micromapEXT;
+      return *this;
+    }
+#endif
+
+    MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_micromapEXT = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MicromapEXT const & ) const = default;
+#else
+    bool operator==( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT == rhs.m_micromapEXT;
+    }
+
+    bool operator!=(MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT != rhs.m_micromapEXT;
+    }
+
+    bool operator<(MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT < rhs.m_micromapEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromapEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkMicromapEXT m_micromapEXT = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::MicromapEXT;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::MicromapEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class CommandBuffer
+  {
+  public:
+    using CType = VkCommandBuffer;
+    using NativeType = VkCommandBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandBuffer() = default;
+    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer( commandBuffer )
+    {}
+
+    CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = commandBuffer;
+      return *this;
+    }
+
+    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBuffer const & ) const = default;
+#else
+    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == rhs.m_commandBuffer;
+    }
+
+    bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != rhs.m_commandBuffer;
+    }
+
+    bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer < rhs.m_commandBuffer;
+    }
+#endif
+
+  //=== VK_VERSION_1_0 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result end( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineWidth( float lineWidth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setBlendConstants( const float blendConstants[4], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect * pRects, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename ValuesType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMask( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRendering( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCount( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_debug_marker ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerEndEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_video_queue ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_video_decode_queue ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_transform_feedback ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NVX_binary_import ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_draw_indirect_count ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_dynamic_rendering ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderingKHR( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_device_group ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_push_descriptor ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, DataType const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_conditional_rendering ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endConditionalRenderingEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_discard_rectangles ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_create_renderpass2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_debug_utils ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_sample_locations ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, const uint32_t * pIndirectStrides, const uint32_t * const * ppMaxPrimitiveCounts, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructuresIndirectKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_shading_rate_image ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_ray_tracing ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_draw_indirect_count ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_AMD_buffer_marker ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_mesh_shader ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_scissor_exclusive ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CheckpointMarkerType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_INTEL_performance_query ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_line_rasterization ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_device_generated_commands ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_synchronization2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_descriptor_buffer ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorBuffersEXT( uint32_t bufferCount, const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t * pBufferIndices, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_mesh_shader ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_copy_commands2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setVertexInputEXT( uint32_t vertexBindingDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void subpassShadingHUAWEI( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_color_write_enable ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorWriteEnableEXT( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_multi_draw ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMultiEXT( uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, uint32_t instanceCount, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMultiIndexedEXT( uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t * pVertexOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, uint32_t instanceCount, uint32_t firstInstance, Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildMicromapsEXT( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeMicromapsPropertiesEXT( uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_copy_memory_indirect ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_memory_decompression ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void decompressMemoryNV( uint32_t decompressRegionCount, const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendEnableEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendEnableEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendEquationEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendEquationEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorWriteMaskEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorWriteMaskEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendAdvancedEXT( uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setColorBlendAdvancedEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportSwizzleNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportSwizzleNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageModulationTableNV( uint32_t coverageModulationTableCount, const float * pCoverageModulationTable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_optical_flow ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+    operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandBuffer m_commandBuffer = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DeviceMemory
+  {
+  public:
+    using CType = VkDeviceMemory;
+    using NativeType = VkDeviceMemory;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DeviceMemory() = default;
+    VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory( deviceMemory )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = deviceMemory;
+      return *this;
+    }
+#endif
+
+    DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceMemory const & ) const = default;
+#else
+    bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == rhs.m_deviceMemory;
+    }
+
+    bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != rhs.m_deviceMemory;
+    }
+
+    bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory < rhs.m_deviceMemory;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeviceMemory m_deviceMemory = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeviceMemory>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class VideoSessionKHR
+  {
+  public:
+    using CType = VkVideoSessionKHR;
+    using NativeType = VkVideoSessionKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default;
+    VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT
+      : m_videoSessionKHR( videoSessionKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    VideoSessionKHR & operator=(VkVideoSessionKHR videoSessionKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_videoSessionKHR = videoSessionKHR;
+      return *this;
+    }
+#endif
+
+    VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_videoSessionKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( VideoSessionKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR == rhs.m_videoSessionKHR;
+    }
+
+    bool operator!=(VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR != rhs.m_videoSessionKHR;
+    }
+
+    bool operator<(VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR < rhs.m_videoSessionKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkVideoSessionKHR m_videoSessionKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DeferredOperationKHR
+  {
+  public:
+    using CType = VkDeferredOperationKHR;
+    using NativeType = VkDeferredOperationKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default;
+    VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
+      : m_deferredOperationKHR( deferredOperationKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_deferredOperationKHR = deferredOperationKHR;
+      return *this;
+    }
+#endif
+
+    DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_deferredOperationKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeferredOperationKHR const & ) const = default;
+#else
+    bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR == rhs.m_deferredOperationKHR;
+    }
+
+    bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR != rhs.m_deferredOperationKHR;
+    }
+
+    bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR < rhs.m_deferredOperationKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeferredOperationKHR m_deferredOperationKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  class BufferCollectionFUCHSIA
+  {
+  public:
+    using CType = VkBufferCollectionFUCHSIA;
+    using NativeType = VkBufferCollectionFUCHSIA;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
+
+  public:
+    VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default;
+    VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT
+      : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    BufferCollectionFUCHSIA & operator=(VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA;
+      return *this;
+    }
+#endif
+
+    BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferCollectionFUCHSIA = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferCollectionFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA == rhs.m_bufferCollectionFUCHSIA;
+    }
+
+    bool operator!=(BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA != rhs.m_bufferCollectionFUCHSIA;
+    }
+
+    bool operator<(BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA < rhs.m_bufferCollectionFUCHSIA;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  class BufferView
+  {
+  public:
+    using CType = VkBufferView;
+    using NativeType = VkBufferView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR BufferView() = default;
+    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView( bufferView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = bufferView;
+      return *this;
+    }
+#endif
+
+    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferView const & ) const = default;
+#else
+    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == rhs.m_bufferView;
+    }
+
+    bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != rhs.m_bufferView;
+    }
+
+    bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView < rhs.m_bufferView;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBufferView m_bufferView = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class CommandPool
+  {
+  public:
+    using CType = VkCommandPool;
+    using NativeType = VkCommandPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandPool() = default;
+    VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
+      : m_commandPool( commandPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = commandPool;
+      return *this;
+    }
+#endif
+
+    CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandPool const & ) const = default;
+#else
+    bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == rhs.m_commandPool;
+    }
+
+    bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != rhs.m_commandPool;
+    }
+
+    bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool < rhs.m_commandPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandPool m_commandPool = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class PipelineCache
+  {
+  public:
+    using CType = VkPipelineCache;
+    using NativeType = VkPipelineCache;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineCache() = default;
+    VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache( pipelineCache )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = pipelineCache;
+      return *this;
+    }
+#endif
+
+    PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCache const & ) const = default;
+#else
+    bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == rhs.m_pipelineCache;
+    }
+
+    bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != rhs.m_pipelineCache;
+    }
+
+    bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache < rhs.m_pipelineCache;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineCache m_pipelineCache = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineCache>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class CuFunctionNVX
+  {
+  public:
+    using CType = VkCuFunctionNVX;
+    using NativeType = VkCuFunctionNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default;
+    VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT
+      : m_cuFunctionNVX( cuFunctionNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CuFunctionNVX & operator=(VkCuFunctionNVX cuFunctionNVX) VULKAN_HPP_NOEXCEPT
+    {
+      m_cuFunctionNVX = cuFunctionNVX;
+      return *this;
+    }
+#endif
+
+    CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_cuFunctionNVX = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CuFunctionNVX const & ) const = default;
+#else
+    bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX == rhs.m_cuFunctionNVX;
+    }
+
+    bool operator!=(CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX != rhs.m_cuFunctionNVX;
+    }
+
+    bool operator<(CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX < rhs.m_cuFunctionNVX;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuFunctionNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCuFunctionNVX m_cuFunctionNVX = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class CuModuleNVX
+  {
+  public:
+    using CType = VkCuModuleNVX;
+    using NativeType = VkCuModuleNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CuModuleNVX() = default;
+    VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT
+      : m_cuModuleNVX( cuModuleNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CuModuleNVX & operator=(VkCuModuleNVX cuModuleNVX) VULKAN_HPP_NOEXCEPT
+    {
+      m_cuModuleNVX = cuModuleNVX;
+      return *this;
+    }
+#endif
+
+    CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_cuModuleNVX = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CuModuleNVX const & ) const = default;
+#else
+    bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX == rhs.m_cuModuleNVX;
+    }
+
+    bool operator!=(CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX != rhs.m_cuModuleNVX;
+    }
+
+    bool operator<(CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX < rhs.m_cuModuleNVX;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_cuModuleNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCuModuleNVX m_cuModuleNVX = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CuModuleNVX>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorPool
+  {
+  public:
+    using CType = VkDescriptorPool;
+    using NativeType = VkDescriptorPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorPool() = default;
+    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool( descriptorPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = descriptorPool;
+      return *this;
+    }
+#endif
+
+    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPool const & ) const = default;
+#else
+    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == rhs.m_descriptorPool;
+    }
+
+    bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != rhs.m_descriptorPool;
+    }
+
+    bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool < rhs.m_descriptorPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorPool m_descriptorPool = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorSetLayout
+  {
+  public:
+    using CType = VkDescriptorSetLayout;
+    using NativeType = VkDescriptorSetLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default;
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout( descriptorSetLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = descriptorSetLayout;
+      return *this;
+    }
+#endif
+
+    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayout const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+    }
+
+    bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+    }
+
+    bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSetLayout m_descriptorSetLayout = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Framebuffer
+  {
+  public:
+    using CType = VkFramebuffer;
+    using NativeType = VkFramebuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Framebuffer() = default;
+    VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
+      : m_framebuffer( framebuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = framebuffer;
+      return *this;
+    }
+#endif
+
+    Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Framebuffer const & ) const = default;
+#else
+    bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == rhs.m_framebuffer;
+    }
+
+    bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != rhs.m_framebuffer;
+    }
+
+    bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer < rhs.m_framebuffer;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFramebuffer m_framebuffer = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Framebuffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class IndirectCommandsLayoutNV
+  {
+  public:
+    using CType = VkIndirectCommandsLayoutNV;
+    using NativeType = VkIndirectCommandsLayoutNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default;
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
+      return *this;
+    }
+#endif
+
+    IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNV = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsLayoutNV const & ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
+    }
+
+    bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
+    }
+
+    bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class PrivateDataSlot
+  {
+  public:
+    using CType = VkPrivateDataSlot;
+    using NativeType = VkPrivateDataSlot;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default;
+    VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT
+      : m_privateDataSlot( privateDataSlot )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PrivateDataSlot & operator=(VkPrivateDataSlot privateDataSlot) VULKAN_HPP_NOEXCEPT
+    {
+      m_privateDataSlot = privateDataSlot;
+      return *this;
+    }
+#endif
+
+    PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_privateDataSlot = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PrivateDataSlot const & ) const = default;
+#else
+    bool operator==( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot == rhs.m_privateDataSlot;
+    }
+
+    bool operator!=(PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot != rhs.m_privateDataSlot;
+    }
+
+    bool operator<(PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot < rhs.m_privateDataSlot;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPrivateDataSlot m_privateDataSlot = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+  using PrivateDataSlotEXT = PrivateDataSlot;
+
+  class RenderPass
+  {
+  public:
+    using CType = VkRenderPass;
+    using NativeType = VkRenderPass;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
+
+  public:
+    VULKAN_HPP_CONSTEXPR RenderPass() = default;
+    VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
+      : m_renderPass( renderPass )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = renderPass;
+      return *this;
+    }
+#endif
+
+    RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPass const & ) const = default;
+#else
+    bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == rhs.m_renderPass;
+    }
+
+    bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != rhs.m_renderPass;
+    }
+
+    bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass < rhs.m_renderPass;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkRenderPass m_renderPass = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::RenderPass>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Sampler
+  {
+  public:
+    using CType = VkSampler;
+    using NativeType = VkSampler;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Sampler() = default;
+    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+      : m_sampler( sampler )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = sampler;
+      return *this;
+    }
+#endif
+
+    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Sampler const & ) const = default;
+#else
+    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == rhs.m_sampler;
+    }
+
+    bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != rhs.m_sampler;
+    }
+
+    bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler < rhs.m_sampler;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSampler m_sampler = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class SamplerYcbcrConversion
+  {
+  public:
+    using CType = VkSamplerYcbcrConversion;
+    using NativeType = VkSamplerYcbcrConversion;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default;
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion( samplerYcbcrConversion )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = samplerYcbcrConversion;
+      return *this;
+    }
+#endif
+
+    SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerYcbcrConversion const & ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSamplerYcbcrConversion m_samplerYcbcrConversion = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+  using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+
+  class ShaderModule
+  {
+  public:
+    using CType = VkShaderModule;
+    using NativeType = VkShaderModule;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ShaderModule() = default;
+    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule( shaderModule )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = shaderModule;
+      return *this;
+    }
+#endif
+
+    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderModule const & ) const = default;
+#else
+    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == rhs.m_shaderModule;
+    }
+
+    bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != rhs.m_shaderModule;
+    }
+
+    bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule < rhs.m_shaderModule;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkShaderModule m_shaderModule = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class ValidationCacheEXT
+  {
+  public:
+    using CType = VkValidationCacheEXT;
+    using NativeType = VkValidationCacheEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default;
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT( validationCacheEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = validationCacheEXT;
+      return *this;
+    }
+#endif
+
+    ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ValidationCacheEXT const & ) const = default;
+#else
+    bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == rhs.m_validationCacheEXT;
+    }
+
+    bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != rhs.m_validationCacheEXT;
+    }
+
+    bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT < rhs.m_validationCacheEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkValidationCacheEXT m_validationCacheEXT = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class VideoSessionParametersKHR
+  {
+  public:
+    using CType = VkVideoSessionParametersKHR;
+    using NativeType = VkVideoSessionParametersKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default;
+    VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
+      : m_videoSessionParametersKHR( videoSessionParametersKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    VideoSessionParametersKHR & operator=(VkVideoSessionParametersKHR videoSessionParametersKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_videoSessionParametersKHR = videoSessionParametersKHR;
+      return *this;
+    }
+#endif
+
+    VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_videoSessionParametersKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( VideoSessionParametersKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR;
+    }
+
+    bool operator!=(VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR;
+    }
+
+    bool operator<(VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParametersKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkVideoSessionParametersKHR m_videoSessionParametersKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Queue
+  {
+  public:
+    using CType = VkQueue;
+    using NativeType = VkQueue;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Queue() = default;
+    VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
+      : m_queue( queue )
+    {}
+
+    Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = queue;
+      return *this;
+    }
+
+    Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Queue const & ) const = default;
+#else
+    bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == rhs.m_queue;
+    }
+
+    bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != rhs.m_queue;
+    }
+
+    bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue < rhs.m_queue;
+    }
+#endif
+
+  //=== VK_VERSION_1_0 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_swapchain ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_debug_utils ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getCheckpointDataNV( uint32_t * pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CheckpointDataNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CheckpointDataNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = CheckpointDataNVAllocator, typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_INTEL_performance_query ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_synchronization2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getCheckpointData2NV( uint32_t * pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CheckpointData2NVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CheckpointData2NVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = CheckpointData2NVAllocator, typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+    operator VkQueue() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueue m_queue = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Queue;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Queue;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Queue>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Device;
+  template <typename Dispatch>
+  class UniqueHandleTraits<AccelerationStructureKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueAccelerationStructureKHR = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<AccelerationStructureNV, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Buffer, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  template <typename Dispatch>
+  class UniqueHandleTraits<BufferCollectionFUCHSIA, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueBufferCollectionFUCHSIA = UniqueHandle<BufferCollectionFUCHSIA, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+  template <typename Dispatch>
+  class UniqueHandleTraits<BufferView, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<CommandBuffer, Dispatch>
+  {
+  public:
+    using deleter = PoolFree<Device, CommandPool, Dispatch>;
+  };
+  using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<CommandPool, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<CuFunctionNVX, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueCuFunctionNVX = UniqueHandle<CuFunctionNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<CuModuleNVX, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueCuModuleNVX = UniqueHandle<CuModuleNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DeferredOperationKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueDeferredOperationKHR = UniqueHandle<DeferredOperationKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DescriptorPool, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DescriptorSet, Dispatch>
+  {
+  public:
+    using deleter = PoolFree<Device, DescriptorPool, Dispatch>;
+  };
+  using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DescriptorSetLayout, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DeviceMemory, Dispatch>
+  {
+  public:
+    using deleter = ObjectFree<Device, Dispatch>;
+  };
+  using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Event, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Fence, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Framebuffer, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Image, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<ImageView, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<IndirectCommandsLayoutNV, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<MicromapEXT, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueMicromapEXT = UniqueHandle<MicromapEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<OpticalFlowSessionNV, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueOpticalFlowSessionNV = UniqueHandle<OpticalFlowSessionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Pipeline, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<PipelineCache, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<PipelineLayout, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<PrivateDataSlot, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniquePrivateDataSlot = UniqueHandle<PrivateDataSlot, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniquePrivateDataSlotEXT = UniqueHandle<PrivateDataSlot, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<QueryPool, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<RenderPass, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Sampler, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<Semaphore, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<ShaderModule, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<SwapchainKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<ValidationCacheEXT, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<VideoSessionKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueVideoSessionKHR = UniqueHandle<VideoSessionKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<VideoSessionParametersKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Device, Dispatch>;
+  };
+  using UniqueVideoSessionParametersKHR = UniqueHandle<VideoSessionParametersKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Device
+  {
+  public:
+    using CType = VkDevice;
+    using NativeType = VkDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Device() = default;
+    VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
+      : m_device( device )
+    {}
+
+    Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = device;
+      return *this;
+    }
+
+    Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Device const & ) const = default;
+#else
+    bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == rhs.m_device;
+    }
+
+    bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != rhs.m_device;
+    }
+
+    bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device < rhs.m_device;
+    }
+#endif
+
+  //=== VK_VERSION_1_0 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void ( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void ( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void ** ppData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<void *>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageMemoryRequirementsAllocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Event * pEvent, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<DataType, DataTypeAllocator>> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<DataType> getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Buffer * pBuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferView * pView, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Image * pImage, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ImageView * pView, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t * pDataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Sampler * pSampler, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DescriptorSetAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DescriptorSetAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>, typename B0 = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result ( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void ( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CommandBufferAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CommandBufferAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>, typename B0 = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void ( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void ( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_swapchain ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename ImageAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename ImageAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = ImageAllocator, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t * pImageIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_display_swapchain ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SwapchainKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SwapchainKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>, typename B0 = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_debug_marker ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_video_queue ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename VideoSessionMemoryRequirementsKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename VideoSessionMemoryRequirementsKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = VideoSessionMemoryRequirementsKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NVX_binary_import ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NVX_image_view_handle ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_shader_info ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t * pInfoSize, void * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE * pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_device_group ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_maintenance1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_descriptor_update_template ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_display_control ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_GOOGLE_display_timing ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PastPresentationTimingGOOGLEAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_hdr_metadata ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_create_renderpass2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_shared_presentable_image ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_performance_query ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void releaseProfilingLockKHR( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_debug_utils ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, struct AHardwareBuffer ** pBuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void * pData, size_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type writeAccelerationStructuresPropertyKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, const uint32_t * pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_bind_memory2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_validation_cache ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t * pDataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_ray_tracing ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_maintenance3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_external_memory_host ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, uint64_t * pTimestamps, uint64_t * pMaxDeviation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Uint64_tAllocator = std::allocator<uint64_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Uint64_tAllocator = std::allocator<uint64_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = Uint64_tAllocator, typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Uint64_tAllocator & uint64_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_timeline_semaphore ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_INTEL_performance_query ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void uninitializePerformanceApiINTEL( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_AMD_display_native_hdr ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_buffer_device_address ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_present_wait ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_buffer_device_address ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_host_query_reset ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_deferred_host_operations ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, uint32_t * pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PipelineExecutablePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PipelineExecutableStatisticKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PipelineExecutableInternalRepresentationKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_device_generated_commands ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_private_data ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_descriptor_buffer ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DescriptorType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_image_compression_control ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_device_fault ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>> getFaultInfoEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>, typename B0 = PipelineAllocator, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_external_memory_rdma ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_pipeline_properties ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void * pData, size_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_maintenance4 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_shader_module_identifier ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_optical_flow ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  //=== VK_QCOM_tile_properties ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, uint32_t * pPropertiesCount, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename TilePropertiesQCOMAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename TilePropertiesQCOMAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = TilePropertiesQCOMAllocator, typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+    operator VkDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDevice m_device = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Device;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Device;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Device>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DisplayModeKHR
+  {
+  public:
+    using CType = VkDisplayModeKHR;
+    using NativeType = VkDisplayModeKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default;
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR( displayModeKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = displayModeKHR;
+      return *this;
+    }
+#endif
+
+    DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModeKHR const & ) const = default;
+#else
+    bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == rhs.m_displayModeKHR;
+    }
+
+    bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != rhs.m_displayModeKHR;
+    }
+
+    bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR < rhs.m_displayModeKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayModeKHR m_displayModeKHR = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  class UniqueHandleTraits<Device, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<NoParent, Dispatch>;
+  };
+  using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class PhysicalDevice
+  {
+  public:
+    using CType = VkPhysicalDevice;
+    using NativeType = VkPhysicalDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PhysicalDevice() = default;
+    VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( physicalDevice )
+    {}
+
+    PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = physicalDevice;
+      return *this;
+    }
+
+    PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevice const & ) const = default;
+#else
+    bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == rhs.m_physicalDevice;
+    }
+
+    bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != rhs.m_physicalDevice;
+    }
+
+    bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice < rhs.m_physicalDevice;
+    }
+#endif
+
+  //=== VK_VERSION_1_0 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename QueueFamilyPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename QueueFamilyPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = QueueFamilyPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Device * pDevice, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = ExtensionPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = LayerPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageFormatPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = QueueFamilyProperties2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = StructureChainAllocator, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getToolProperties( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceToolPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 * pSupported, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SurfaceFormatKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SurfaceFormatKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SurfaceFormatKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PresentModeKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_swapchain ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pRectCount, VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Rect2DAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Rect2DAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = Rect2DAllocator, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_display ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayPropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayPlanePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t * pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayModePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayModePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayModePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t * connection, xcb_visualid_t visual_id, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_video_queue ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = VideoFormatPropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_external_memory_capabilities ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = QueueFamilyProperties2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = StructureChainAllocator, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_direct_mode_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display * dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_performance_query ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t * pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PerformanceCounterKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PerformanceCounterKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SurfaceFormat2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SurfaceFormat2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = SurfaceFormat2KHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = StructureChainAllocator, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_get_display_properties2 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayPlaneProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DisplayModeProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DisplayModeProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = DisplayModeProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_sample_locations ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename TimeDomainEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename TimeDomainEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = TimeDomainEXTAllocator, typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceFragmentShadingRateKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_EXT_tooling_info ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceToolPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_cooperative_matrix ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = CooperativeMatrixPropertiesNVAllocator, typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = FramebufferMixedSamplesCombinationNVAllocator, typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PresentModeKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_acquire_drm_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, VULKAN_HPP_NAMESPACE::DisplayKHR * display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getWinrtDisplayNV( uint32_t deviceRelativeId, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_NV_optical_flow ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, uint32_t * pFormatCount, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = OpticalFlowImageFormatPropertiesNVAllocator, typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+    operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPhysicalDevice m_physicalDevice = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Instance;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Instance, Dispatch>;
+  };
+  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Instance, Dispatch>;
+  };
+  using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch>
+  class UniqueHandleTraits<SurfaceKHR, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<Instance, Dispatch>;
+  };
+  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Instance
+  {
+  public:
+    using CType = VkInstance;
+    using NativeType = VkInstance;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Instance() = default;
+    VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {}
+    Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
+      : m_instance( instance )
+    {}
+
+    Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = instance;
+      return *this;
+    }
+
+    Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = {};
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Instance const & ) const = default;
+#else
+    bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == rhs.m_instance;
+    }
+
+    bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != rhs.m_instance;
+    }
+
+    bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance < rhs.m_instance;
+    }
+#endif
+
+  //=== VK_VERSION_1_0 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceGroupPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_KHR_display ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_device_group_creation ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PhysicalDeviceGroupPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_headless_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+    operator VkInstance() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkInstance m_instance = {};
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Instance;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Instance;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Instance>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  //=== VK_VERSION_1_0 ===
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  class UniqueHandleTraits<Instance, Dispatch>
+  {
+  public:
+    using deleter = ObjectDestroy<NoParent, Dispatch>;
+  };
+  using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Instance * pInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+    template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = ExtensionPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+    template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = LayerPropertiesAllocator, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_hash.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_hash.hpp
new file mode 100644
index 0000000..3047672
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_hash.hpp
@@ -0,0 +1,13298 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HASH_HPP
+#  define VULKAN_HASH_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace std
+{
+  //=======================================
+  //=== HASH structures for Flags types ===
+  //=======================================
+
+  template <typename BitType>
+  struct hash<VULKAN_HPP_NAMESPACE::Flags<BitType>>
+  {
+    std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags<BitType> const & flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<typename std::underlying_type<BitType>::type>{}(
+        static_cast<typename std::underlying_type<BitType>::type>( flags ) );
+    }
+  };
+
+
+  //===================================
+  //=== HASH structures for handles ===
+  //===================================
+
+
+  //=== VK_VERSION_1_0 ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Instance>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Instance const & instance) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkInstance>{}(static_cast<VkInstance>(instance));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPhysicalDevice>{}(static_cast<VkPhysicalDevice>(physicalDevice));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Device>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Device const & device) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDevice>{}(static_cast<VkDevice>(device));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Queue>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Queue const & queue) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkQueue>{}(static_cast<VkQueue>(queue));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemory>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDeviceMemory>{}(static_cast<VkDeviceMemory>(deviceMemory));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Fence>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Fence const & fence) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkFence>{}(static_cast<VkFence>(fence));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Semaphore>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Semaphore const & semaphore) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSemaphore>{}(static_cast<VkSemaphore>(semaphore));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Event>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Event const & event) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkEvent>{}(static_cast<VkEvent>(event));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPool const & queryPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkQueryPool>{}(static_cast<VkQueryPool>(queryPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Buffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Buffer const & buffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkBuffer>{}(static_cast<VkBuffer>(buffer));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferView>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferView const & bufferView) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkBufferView>{}(static_cast<VkBufferView>(bufferView));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Image>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Image const & image) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkImage>{}(static_cast<VkImage>(image));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageView>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageView const & imageView) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkImageView>{}(static_cast<VkImageView>(imageView));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModule>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkShaderModule>{}(static_cast<VkShaderModule>(shaderModule));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCache>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipelineCache>{}(static_cast<VkPipelineCache>(pipelineCache));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Pipeline>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Pipeline const & pipeline) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipeline>{}(static_cast<VkPipeline>(pipeline));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipelineLayout>{}(static_cast<VkPipelineLayout>(pipelineLayout));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Sampler>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Sampler const & sampler) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSampler>{}(static_cast<VkSampler>(sampler));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorPool>{}(static_cast<VkDescriptorPool>(descriptorPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSet>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorSet>{}(static_cast<VkDescriptorSet>(descriptorSet));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorSetLayout>{}(static_cast<VkDescriptorSetLayout>(descriptorSetLayout));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Framebuffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkFramebuffer>{}(static_cast<VkFramebuffer>(framebuffer));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPass>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPass const & renderPass) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkRenderPass>{}(static_cast<VkRenderPass>(renderPass));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPool const & commandPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCommandPool>{}(static_cast<VkCommandPool>(commandPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBuffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCommandBuffer>{}(static_cast<VkCommandBuffer>(commandBuffer));
+    }
+  };
+
+  //=== VK_VERSION_1_1 ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSamplerYcbcrConversion>{}(static_cast<VkSamplerYcbcrConversion>(samplerYcbcrConversion));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorUpdateTemplate>{}(static_cast<VkDescriptorUpdateTemplate>(descriptorUpdateTemplate));
+    }
+  };
+
+  //=== VK_VERSION_1_3 ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlot>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlot const & privateDataSlot) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPrivateDataSlot>{}(static_cast<VkPrivateDataSlot>(privateDataSlot));
+    }
+  };
+
+  //=== VK_KHR_surface ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSurfaceKHR>{}(static_cast<VkSurfaceKHR>(surfaceKHR));
+    }
+  };
+
+  //=== VK_KHR_swapchain ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSwapchainKHR>{}(static_cast<VkSwapchainKHR>(swapchainKHR));
+    }
+  };
+
+  //=== VK_KHR_display ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDisplayKHR>{}(static_cast<VkDisplayKHR>(displayKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDisplayModeKHR>{}(static_cast<VkDisplayModeKHR>(displayModeKHR));
+    }
+  };
+
+  //=== VK_EXT_debug_report ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDebugReportCallbackEXT>{}(static_cast<VkDebugReportCallbackEXT>(debugReportCallbackEXT));
+    }
+  };
+
+  //=== VK_KHR_video_queue ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkVideoSessionKHR>{}(static_cast<VkVideoSessionKHR>(videoSessionKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkVideoSessionParametersKHR>{}(static_cast<VkVideoSessionParametersKHR>(videoSessionParametersKHR));
+    }
+  };
+
+  //=== VK_NVX_binary_import ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CuModuleNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCuModuleNVX>{}(static_cast<VkCuModuleNVX>(cuModuleNVX));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CuFunctionNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCuFunctionNVX>{}(static_cast<VkCuFunctionNVX>(cuFunctionNVX));
+    }
+  };
+
+  //=== VK_EXT_debug_utils ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDebugUtilsMessengerEXT>{}(static_cast<VkDebugUtilsMessengerEXT>(debugUtilsMessengerEXT));
+    }
+  };
+
+  //=== VK_KHR_acceleration_structure ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkAccelerationStructureKHR>{}(static_cast<VkAccelerationStructureKHR>(accelerationStructureKHR));
+    }
+  };
+
+  //=== VK_EXT_validation_cache ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkValidationCacheEXT>{}(static_cast<VkValidationCacheEXT>(validationCacheEXT));
+    }
+  };
+
+  //=== VK_NV_ray_tracing ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkAccelerationStructureNV>{}(static_cast<VkAccelerationStructureNV>(accelerationStructureNV));
+    }
+  };
+
+  //=== VK_INTEL_performance_query ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPerformanceConfigurationINTEL>{}(static_cast<VkPerformanceConfigurationINTEL>(performanceConfigurationINTEL));
+    }
+  };
+
+  //=== VK_KHR_deferred_host_operations ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDeferredOperationKHR>{}(static_cast<VkDeferredOperationKHR>(deferredOperationKHR));
+    }
+  };
+
+  //=== VK_NV_device_generated_commands ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkIndirectCommandsLayoutNV>{}(static_cast<VkIndirectCommandsLayoutNV>(indirectCommandsLayoutNV));
+    }
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkBufferCollectionFUCHSIA>{}(static_cast<VkBufferCollectionFUCHSIA>(bufferCollectionFUCHSIA));
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkMicromapEXT>{}(static_cast<VkMicromapEXT>(micromapEXT));
+    }
+  };
+
+  //=== VK_NV_optical_flow ===
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkOpticalFlowSessionNV>{}(static_cast<VkOpticalFlowSessionNV>(opticalFlowSessionNV));
+    }
+  };
+
+
+
+#if 14 <= VULKAN_HPP_CPP_VERSION
+  //======================================
+  //=== HASH structures for structures ===
+  //======================================
+
+#  if !defined( VULKAN_HPP_HASH_COMBINE )
+#    define VULKAN_HPP_HASH_COMBINE( seed, value ) \
+      seed ^= std::hash<std::decay<decltype( value )>::type>{}( value ) + 0x9e3779b9 + ( seed << 6 ) + ( seed >> 2 )
+#  endif
+
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AabbPositionsKHR const & aabbPositionsKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minX );
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minY );
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minZ );
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxX );
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxY );
+    VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxZ );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const & accelerationStructureBuildRangeInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveCount );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.firstVertex );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.transformOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const & accelerationStructureBuildSizesInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.accelerationStructureSize );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.updateScratchSize );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.buildScratchSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const & accelerationStructureCaptureDescriptorDataInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructure );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructureNV );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & accelerationStructureCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.createFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.size );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.type );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.deviceAddress );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexData );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexStride );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexData );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexType );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformData );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeometryAABBNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeometryAABBNV const & geometryAABBNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.aabbData );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.numAABBs );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.stride );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.offset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeometryDataNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometryDataNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.triangles );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.aabbs );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeometryNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeometryNV const & geometryNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, geometryNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometryType );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometry );
+    VULKAN_HPP_HASH_COMBINE( seed, geometryNV.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.type );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.instanceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.geometryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pGeometries );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & accelerationStructureCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.compactedSize );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.info );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const & accelerationStructureDeviceAddressInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.accelerationStructure );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      for ( size_t j=0; j < 4; ++j )
+      {
+        VULKAN_HPP_HASH_COMBINE( seed, transformMatrixKHR.matrix[i][j] );
+      }
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & accelerationStructureInstanceKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.transform );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceCustomIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.mask );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceShaderBindingTableRecordOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.accelerationStructureReference );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & accelerationStructureMatrixMotionInstanceNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT0 );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT1 );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceCustomIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.mask );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.accelerationStructureReference );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & accelerationStructureMemoryRequirementsInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.type );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.accelerationStructure );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const & accelerationStructureMotionInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.maxInstances );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SRTDataNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SRTDataNV const & sRTDataNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sx );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.a );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.b );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvx );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sy );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.c );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvy );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sz );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvz );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qx );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qy );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qz );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qw );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tx );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.ty );
+    VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tz );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & accelerationStructureSRTMotionInstanceNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT0 );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT1 );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceCustomIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.mask );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceShaderBindingTableRecordOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.accelerationStructureReference );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & accelerationStructureVersionInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pVersionData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.swapchain );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.timeout );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.fence );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.deviceMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.timeout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AllocationCallbacks>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pUserData );
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnReallocation );
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnFree );
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalFree );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const & amigoProfilingSubmitInfoSEC) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.firstDrawTimestamp );
+    VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.swapBufferTimestamp );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ComponentMapping>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ComponentMapping const & componentMapping) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, componentMapping.r );
+    VULKAN_HPP_HASH_COMBINE( seed, componentMapping.g );
+    VULKAN_HPP_HASH_COMBINE( seed, componentMapping.b );
+    VULKAN_HPP_HASH_COMBINE( seed, componentMapping.a );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const & androidHardwareBufferFormatProperties2ANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.format );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.externalFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.formatFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.samplerYcbcrConversionComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrModel );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrRange );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedXChromaOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYChromaOffset );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & androidHardwareBufferFormatPropertiesANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.format );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.externalFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.formatFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.samplerYcbcrConversionComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrModel );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrRange );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedXChromaOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYChromaOffset );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & androidHardwareBufferPropertiesANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.allocationSize );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.memoryTypeBits );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & androidHardwareBufferUsageANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.androidHardwareBufferUsage );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.window );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ApplicationInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ApplicationInfo const & applicationInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.pNext );
+    for ( const char* p = applicationInfo.pApplicationName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.applicationVersion );
+    for ( const char* p = applicationInfo.pEngineName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.engineVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.apiVersion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescription>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.format );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.samples );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.loadOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.storeOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilLoadOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilStoreOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.initialLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.finalLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescription2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.format );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.samples );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.loadOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.storeOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilLoadOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilStoreOp );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.initialLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.finalLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & attachmentDescriptionStencilLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilInitialLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilFinalLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentReference>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.attachment );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.layout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentReference2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.attachment );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.layout );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.aspectMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & attachmentReferenceStencilLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.stencilLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pColorAttachmentSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.depthStencilAttachmentSamples );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Extent2D>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Extent2D const & extent2D) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, extent2D.width );
+    VULKAN_HPP_HASH_COMBINE( seed, extent2D.height );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SampleLocationEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.x );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.y );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsPerPixel );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationGridSize );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsCount );
+    VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pSampleLocations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.attachmentIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.sampleLocationsInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BaseInStructure>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BaseInStructure const & baseInStructure) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.pNext );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BaseOutStructure>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BaseOutStructure const & baseOutStructure) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.pNext );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & bindAccelerationStructureMemoryInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.accelerationStructure );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memoryOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.deviceIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pDeviceIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.deviceIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pDeviceIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memoryOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Offset2D>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Offset2D const & offset2D) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, offset2D.x );
+    VULKAN_HPP_HASH_COMBINE( seed, offset2D.y );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Rect2D>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Rect2D const & rect2D) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rect2D.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, rect2D.extent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.deviceIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pDeviceIndices );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.splitInstanceBindRegionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pSplitInstanceBindRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.image );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memoryOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.swapchain );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.imageIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.planeAspect );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const & bindIndexBufferIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.bufferAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.size );
+    VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.indexType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & bindShaderGroupIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindShaderGroupIndirectCommandNV.groupIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseMemoryBind>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseMemoryBind const & sparseMemoryBind) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.resourceOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.size );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memoryOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.bindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.pBinds );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.image );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.bindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.pBinds );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.aspectMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.mipLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.arrayLayer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Offset3D>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Offset3D const & offset3D) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, offset3D.x );
+    VULKAN_HPP_HASH_COMBINE( seed, offset3D.y );
+    VULKAN_HPP_HASH_COMBINE( seed, offset3D.z );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Extent3D>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Extent3D const & extent3D) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, extent3D.width );
+    VULKAN_HPP_HASH_COMBINE( seed, extent3D.height );
+    VULKAN_HPP_HASH_COMBINE( seed, extent3D.depth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.subresource );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.extent );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memoryOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.image );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.bindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.pBinds );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindSparseInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindSparseInfo const & bindSparseInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.waitSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pWaitSemaphores );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.bufferBindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pBufferBinds );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageOpaqueBindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageOpaqueBinds );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageBindCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageBinds );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.signalSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pSignalSemaphores );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const & bindVertexBufferIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.bufferAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.size );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.stride );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const & bindVideoSessionMemoryInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryBindIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memorySize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.aspectMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.mipLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.baseArrayLayer );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.layerCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageBlit2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageBlit2 const & imageBlit2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcSubresource );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcOffsets[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstSubresource );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstOffsets[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BlitImageInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BlitImageInfo2 const & blitImageInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImage );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImage );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pRegions );
+    VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.filter );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const & bufferCaptureDescriptorDataInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.buffer );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const & bufferCollectionBufferCreateInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.collection );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.index );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraintsInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.maxBufferCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForCamping );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForDedicatedSlack );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForSharedSlack );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & bufferCollectionCreateInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.collectionToken );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const & bufferCollectionImageCreateInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.collection );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.index );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.colorSpace );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const & bufferCollectionPropertiesFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.memoryTypeBits );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.bufferCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.createInfoIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemPixelFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.formatFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemColorSpaceIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.samplerYcbcrConversionComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrModel );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrRange );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedXChromaOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYChromaOffset );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCreateInfo const & bufferCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.size );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sharingMode );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.queueFamilyIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pQueueFamilyIndices );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.createInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.requiredFormatFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.bufferCollectionConstraints );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCopy>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCopy const & bufferCopy) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferCopy2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferCopy2 const & bufferCopy2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & bufferDeviceAddressCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.deviceAddress );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.buffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferImageCopy>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferImageCopy const & bufferImageCopy) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferRowLength );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferImageHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferImageCopy2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferImageCopy2 const & bufferImageCopy2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferRowLength );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferImageHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.buffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & bufferOpaqueCaptureAddressCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.opaqueCaptureAddress );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.format );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.range );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & calibratedTimestampInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.timeDomain );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CheckpointData2NV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.stage );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pCheckpointMarker );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CheckpointDataNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CheckpointDataNV const & checkpointDataNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.stage );
+    VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pCheckpointMarker );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.depth );
+    VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.stencil );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ClearRect>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ClearRect const & clearRect) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, clearRect.rect );
+    VULKAN_HPP_HASH_COMBINE( seed, clearRect.baseArrayLayer );
+    VULKAN_HPP_HASH_COMBINE( seed, clearRect.layerCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX );
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY );
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate );
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount );
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount );
+    VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.transform );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.renderArea );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const & commandBufferInheritanceRenderingInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.viewMask );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pColorAttachmentFormats );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.depthAttachmentFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.stencilAttachmentFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.rasterizationSamples );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Viewport>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Viewport const & viewport) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.x );
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.y );
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.width );
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.height );
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.minDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, viewport.maxDepth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const & commandBufferInheritanceViewportScissorInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportScissor2D );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportDepthCount );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pViewportDepths );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.commandBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.deviceMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.queueFamilyIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.constantID );
+    VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SpecializationInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.mapEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pMapEntries );
+    VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.dataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.stage );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.module );
+    for ( const char* p = pipelineShaderStageCreateInfo.pName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pSpecializationInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.stage );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.layout );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & conditionalRenderingBeginInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ConformanceVersion>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.major );
+    VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.minor );
+    VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.subminor );
+    VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.patch );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.MSize );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.NSize );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.KSize );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.AType );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.BType );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.CType );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.DType );
+    VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.scope );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const & copyAccelerationStructureInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.src );
+    VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.dst );
+    VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.mode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const & copyBufferInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.srcBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.dstBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.srcBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImage );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.transform );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcSet );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcBinding );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcArrayElement );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstSet );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstBinding );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstArrayElement );
+    VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.descriptorCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCopy2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCopy2 const & imageCopy2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.extent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyImageInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyImageInfo2 const & copyImageInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImage );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImage );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImage );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.dstBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.srcAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.dstAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const & copyMemoryToImageIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.srcAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferRowLength );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferImageHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst );
+    VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.module );
+    for ( const char* p = cuFunctionCreateInfoNVX.pName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const & cuLaunchInfoNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.function );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimX );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimY );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimZ );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimX );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimY );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimZ );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sharedMemBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.paramCount );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pParams );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.extraCount );
+    VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pExtras );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.dataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pData );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount );
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues );
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount );
+    VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.pNext );
+    for ( const char* p = debugMarkerMarkerInfoEXT.pMarkerName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    for ( size_t i = 0; i < 4; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.color[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.objectType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.object );
+    for ( const char* p = debugMarkerObjectNameInfoEXT.pObjectName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.objectType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.object );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagName );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagSize );
+    VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pTag );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & debugReportCallbackCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pfnCallback );
+    VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pUserData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.pNext );
+    for ( const char* p = debugUtilsLabelEXT.pLabelName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    for ( size_t i = 0; i < 4; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.color[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectHandle );
+    for ( const char* p = debugUtilsObjectNameInfoEXT.pObjectName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & debugUtilsMessengerCallbackDataEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.flags );
+    for ( const char* p = debugUtilsMessengerCallbackDataEXT.pMessageIdName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.messageIdNumber );
+    for ( const char* p = debugUtilsMessengerCallbackDataEXT.pMessage; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.queueLabelCount );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pQueueLabels );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.cmdBufLabelCount );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pCmdBufLabels );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.objectCount );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pObjects );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & debugUtilsMessengerCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageSeverity );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pfnUserCallback );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pUserData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectType );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagName );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagSize );
+    VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pTag );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const & decompressMemoryRegionNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.srcAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.dstAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.compressedSize );
+    VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressedSize );
+    VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressionMethod );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & dedicatedAllocationBufferCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.dedicatedAllocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & dedicatedAllocationImageCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.dedicatedAllocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & dedicatedAllocationMemoryAllocateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.image );
+    VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.buffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryBarrier2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryBarrier2 const & memoryBarrier2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstAccessMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.aspectMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseMipLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.levelCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseArrayLayer );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.layerCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.oldLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.newLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.image );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.subresourceRange );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DependencyInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DependencyInfo const & dependencyInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.dependencyFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.memoryBarrierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pMemoryBarriers );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.bufferMemoryBarrierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pBufferMemoryBarriers );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.imageMemoryBarrierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pImageMemoryBarriers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const & descriptorAddressInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.address );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.range );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.format );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const & descriptorBufferBindingInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.address );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.usage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const & descriptorBufferBindingPushDescriptorBufferHandleEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.buffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.range );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.sampler );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageView );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.type );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.descriptorCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.maxSets );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.poolSizeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pPoolSizes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const & descriptorPoolInlineUniformBlockCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.maxInlineUniformBlockBindings );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorPool );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorSetCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pSetLayouts );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const & descriptorSetBindingReferenceVALVE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.descriptorSetLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.binding );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.stageFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.pImmutableSamplers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & descriptorSetLayoutBindingFlagsCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.bindingCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pBindingFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.bindingCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pBindings );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const & descriptorSetLayoutHostMappingInfoVALVE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.supported );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & descriptorSetVariableDescriptorCountAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.descriptorSetCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pDescriptorCounts );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & descriptorSetVariableDescriptorCountLayoutSupport) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.maxVariableDescriptorCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstBinding );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstArrayElement );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.stride );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & descriptorUpdateTemplateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorUpdateEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pDescriptorUpdateEntries );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.templateType );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorSetLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.set );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pCreateInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pQueuePriorities );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.robustBufferAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fullDrawIndexUint32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.imageCubeArray );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.independentBlend );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.geometryShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.tessellationShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sampleRateShading );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.dualSrcBlend );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.logicOp );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiDrawIndirect );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.drawIndirectFirstInstance );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthClamp );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBiasClamp );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fillModeNonSolid );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBounds );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.wideLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.largePoints );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.alphaToOne );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiViewport );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.samplerAnisotropy );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionETC2 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionASTC_LDR );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionBC );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.occlusionQueryPrecise );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.pipelineStatisticsQuery );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.vertexPipelineStoresAndAtomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fragmentStoresAndAtomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderTessellationAndGeometryPointSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderImageGatherExtended );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageExtendedFormats );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageMultisample );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageReadWithoutFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageWriteWithoutFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderClipDistance );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderCullDistance );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceResidency );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceMinLod );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseBinding );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage2D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage3D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency2Samples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency4Samples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency8Samples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency16Samples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyAliased );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.variableMultisampleRate );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.inheritedQueries );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & deviceCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.queueCreateInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pQueueCreateInfos );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledLayerCount );
+    for ( size_t i = 0; i < deviceCreateInfo.enabledLayerCount; ++i )
+    {
+        for ( const char* p = deviceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p )
+        {
+          VULKAN_HPP_HASH_COMBINE( seed, *p );
+        }
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledExtensionCount );
+    for ( size_t i = 0; i < deviceCreateInfo.enabledExtensionCount; ++i )
+    {
+        for ( const char* p = deviceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p )
+        {
+          VULKAN_HPP_HASH_COMBINE( seed, *p );
+        }
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pEnabledFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const & deviceDeviceMemoryReportCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pfnUserCallback );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pUserData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const & deviceDiagnosticsConfigCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.deviceEvent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.resourceDeviceIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.memoryDeviceIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & deviceGroupCommandBufferBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.deviceMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.physicalDeviceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pPhysicalDevices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & deviceGroupPresentCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.pNext );
+    for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.presentMask[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.modes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pDeviceMasks );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.mode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceMask );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceRenderAreaCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pDeviceRenderAreas );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.waitSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pWaitSemaphoreDeviceIndices );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.commandBufferCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pCommandBufferDeviceMasks );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.signalSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pSignalSemaphoreDeviceIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & deviceGroupSwapchainCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.modes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.imageType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.format );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.extent );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.mipLevels );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.arrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.samples );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.tiling );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sharingMode );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.queueFamilyIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pQueueFamilyIndices );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.initialLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pCreateInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.planeAspect );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & deviceMemoryOpaqueCaptureAddressInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.memory );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & deviceMemoryOverallocationCreateInfoAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.overallocationBehavior );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const & deviceMemoryReportCallbackDataEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.type );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.memoryObjectId );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.size );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.heapIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.privateDataSlotRequestCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & deviceQueueGlobalPriorityCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.globalPriority );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & deviceQueueInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pfnGetInstanceProcAddr );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const & directDriverLoadingListLUNARG) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.mode );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.driverCount );
+    VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pDrivers );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.dfb );
+    VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.surface );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.x );
+    VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.y );
+    VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.z );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.displayEvent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.visibleRegion );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.refreshRate );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.parameters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.displayMode );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.parameters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.displayModeProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & displayNativeHdrSurfaceCapabilitiesAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.localDimmingSupport );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.supportedAlpha );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcPosition );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcPosition );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstPosition );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstPosition );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.capabilities );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.mode );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.planeIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentDisplay );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentStackIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.displayPlaneProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.powerState );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.srcRect );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.dstRect );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.persistent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.display );
+    for ( const char* p = displayPropertiesKHR.displayName; *p != '\0'; ++p )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, *p );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalDimensions );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalResolution );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.supportedTransforms );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.planeReorderPossible );
+    VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.persistentContent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.displayProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.displayMode );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeStackIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.transform );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.globalAlpha );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.alphaMode );
+    VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.imageExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.indexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.instanceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.vertexOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstInstance );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.vertexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.instanceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstVertex );
+    VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstInstance );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const & drawMeshTasksIndirectCommandEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountX );
+    VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountY );
+    VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountZ );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.taskCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.firstTask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifier );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierPlaneCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierTilingFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifier );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierPlaneCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierTilingFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const & drmFormatModifierPropertiesList2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.drmFormatModifierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pDrmFormatModifierProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & drmFormatModifierPropertiesListEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.drmFormatModifierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pDrmFormatModifierProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::EventCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::EventCreateInfo const & eventCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.dwAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.handleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.handleTypes );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.dwAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.dwAccess );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const & exportMetalBufferInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.mtlBuffer );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const & exportMetalCommandQueueInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.queue );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.mtlCommandQueue );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const & exportMetalDeviceInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.mtlDevice );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const & exportMetalIOSurfaceInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.image );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.ioSurface );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const & exportMetalObjectCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.exportObjectType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const & exportMetalObjectsInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.pNext );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const & exportMetalSharedEventInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.event );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.mtlSharedEvent );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const & exportMetalTextureInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.image );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.imageView );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.bufferView );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.plane );
+    VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.mtlTexture );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.handleTypes );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & exportSemaphoreWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.dwAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExtensionProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.extensionName[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.specVersion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.externalMemoryFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.exportFromImportedHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.compatibleHandleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.externalMemoryProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.exportFromImportedHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.compatibleHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.externalFenceFeatures );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.externalFormat );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.externalMemoryProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageFormatProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxMipLevels );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.sampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxResourceSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.imageFormatProperties );
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.externalMemoryFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.exportFromImportedHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.compatibleHandleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.handleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.handleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.handleTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.exportFromImportedHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.compatibleHandleTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.externalSemaphoreFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FenceCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FenceCreateInfo const & fenceCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.fence );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.handleType );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.fence );
+    VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.handleType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & filterCubicImageViewImageFormatPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubic );
+    VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubicMinmax );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FormatProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FormatProperties const & formatProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties.linearTilingFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties.optimalTilingFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties.bufferFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FormatProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.formatProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FormatProperties3>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.linearTilingFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.optimalTilingFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.bufferFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const & fragmentShadingRateAttachmentInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pFragmentShadingRateAttachment );
+    VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.width );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.height );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.layerCount );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.viewFormatCount );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pViewFormats );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & framebufferAttachmentsCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.attachmentImageInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pAttachmentImageInfos );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.renderPass );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.width );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.height );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.layers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & framebufferMixedSamplesCombinationNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.coverageReductionMode );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.rasterizationSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.depthStencilSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.colorSamples );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.offset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipeline );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.indirectCommandsLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.streamCount );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pStreams );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCount );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessSize );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const & generatedCommandsMemoryRequirementsInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipeline );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.indirectCommandsLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.maxSequencesCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.stride );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.inputRate );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.location );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.format );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.offset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & pipelineVertexInputStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexBindingDescriptions );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & pipelineInputAssemblyStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.topology );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.primitiveRestartEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & pipelineTessellationStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.patchControlPoints );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.viewportCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pViewports );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.scissorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pScissors );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & pipelineRasterizationStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthClampEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.rasterizerDiscardEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.polygonMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.cullMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.frontFace );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasConstantFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasClamp );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasSlopeFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.lineWidth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & pipelineMultisampleStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.rasterizationSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sampleShadingEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.minSampleShading );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pSampleMask );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToCoverageEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToOneEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::StencilOpState>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::StencilOpState const & stencilOpState) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.failOp );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.passOp );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.depthFailOp );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareOp );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareMask );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.writeMask );
+    VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.reference );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & pipelineDepthStencilStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthTestEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthWriteEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthCompareOp );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthBoundsTestEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.stencilTestEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.front );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.back );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.minDepthBounds );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.maxDepthBounds );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const & pipelineColorBlendAttachmentState) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.blendEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcColorBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstColorBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorBlendOp );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcAlphaBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstAlphaBlendFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.alphaBlendOp );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorWriteMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & pipelineColorBlendStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOpEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOp );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pAttachments );
+    for ( size_t i = 0; i < 4; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.blendConstants[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.dynamicStateCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pDynamicStates );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.stageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pStages );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pVertexInputState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pInputAssemblyState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pTessellationState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pViewportState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pRasterizationState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pMultisampleState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDepthStencilState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pColorBlendState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDynamicState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.layout );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.renderPass );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.subpass );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const & graphicsPipelineLibraryCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.stageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pStages );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pVertexInputState );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pTessellationState );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const & graphicsPipelineShaderGroupsCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.groupCount );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pGroups );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pipelineCount );
+    VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pPipelines );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::XYColorEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::XYColorEXT const & xYColorEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.x );
+    VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.y );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & hdrMetadataEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryRed );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryGreen );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryBlue );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.whitePoint );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxLuminance );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.minLuminance );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxContentLightLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxFrameAverageLightLevel );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.flags );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_IOS_MVK )
+template <> struct hash<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pView );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageBlit>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageBlit const & imageBlit) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcSubresource );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcOffsets[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstSubresource );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstOffsets[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const & imageCaptureDescriptorDataInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.image );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const & imageCompressionControlEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.compressionControlPlaneCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pFixedRateFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const & imageCompressionPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFixedRateFlags );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const & imageFormatConstraintsInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.imageCreateInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.requiredFormatFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sysmemPixelFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.colorSpaceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pColorSpaces );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.formatConstraintsCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pFormatConstraints );
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.bufferCollectionConstraints );
+    VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.flags );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageCopy>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageCopy const & imageCopy) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageCopy.extent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.size );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.rowPitch );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.arrayPitch );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.depthPitch );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & imageDrmFormatModifierExplicitCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifier );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifierPlaneCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pPlaneLayouts );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & imageDrmFormatModifierListCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.drmFormatModifierCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pDrmFormatModifiers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & imageDrmFormatModifierPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.drmFormatModifier );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.viewFormatCount );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pViewFormats );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.imageFormatProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.oldLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.newLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstQueueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.image );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.subresourceRange );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.image );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & imagePipeSurfaceCreateInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.imagePipeHandle );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & imagePlaneMemoryRequirementsInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.planeAspect );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageResolve>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageResolve const & imageResolve) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve.extent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageResolve2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageResolve2 const & imageResolve2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstSubresource );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.extent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & imageSparseMemoryRequirementsInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.image );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.stencilUsage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.swapchain );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.decodeMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.deviceAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const & imageViewCaptureDescriptorDataInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.imageView );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.imageView );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.descriptorType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sampler );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.minLod );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const & imageViewSampleWeightCreateInfoQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterCenter );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterSize );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.numPhases );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.usage );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & importAndroidHardwareBufferInfoANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.buffer );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fence );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fd );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.fence );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handle );
+    VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const & importMemoryBufferCollectionFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.collection );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.index );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.fd );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pHostPointer );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handle );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handle );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const & importMemoryZirconHandleInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handle );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const & importMetalBufferInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.mtlBuffer );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const & importMetalIOSurfaceInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.ioSurface );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const & importMetalSharedEventInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.mtlSharedEvent );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const & importMetalTextureInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.plane );
+    VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.mtlTexture );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.fd );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & importSemaphoreWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handle );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.name );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const & importSemaphoreZirconHandleInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.handleType );
+    VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.zirconHandle );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.tokenType );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.stream );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexBindingUnit );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexDynamicStride );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantPipelineLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantShaderStageFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantSize );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indirectStateFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indexTypeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypeValues );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & indirectCommandsLayoutCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.tokenCount );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pTokens );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.streamCount );
+    VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pStreamStrides );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.subpass );
+    VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.inputAttachmentIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.aspectMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pApplicationInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledLayerCount );
+    for ( size_t i = 0; i < instanceCreateInfo.enabledLayerCount; ++i )
+    {
+        for ( const char* p = instanceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p )
+        {
+          VULKAN_HPP_HASH_COMBINE( seed, *p );
+        }
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledExtensionCount );
+    for ( size_t i = 0; i < instanceCreateInfo.enabledExtensionCount; ++i )
+    {
+        for ( const char* p = instanceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p )
+        {
+          VULKAN_HPP_HASH_COMBINE( seed, *p );
+        }
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::LayerProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::LayerProperties const & layerProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, layerProperties.layerName[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, layerProperties.specVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, layerProperties.implementationVersion );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, layerProperties.description[i] );
+    }
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_MACOS_MVK )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pView );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MappedMemoryRange>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.deviceMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.allocationSize );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.memoryTypeIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryBarrier>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryBarrier const & memoryBarrier) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.dstAccessMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.image );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.buffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.prefersDedicatedAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.requiresDedicatedAllocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.memoryTypeBits );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_ANDROID_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & memoryGetAndroidHardwareBufferInfoANDROID) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.memory );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.handleType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.handleType );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.handleType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const & memoryGetZirconHandleInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.memory );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.handleType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryHeap>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryHeap const & memoryHeap) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.size );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.memoryTypeBits );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & memoryOpaqueCaptureAddressAllocateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.opaqueCaptureAddress );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.priority );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryRequirements>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.size );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.alignment );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.memoryTypeBits );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryRequirements2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.memoryRequirements );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryType>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryType const & memoryType) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryType.propertyFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryType.heapIndex );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.memoryTypeBits );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const & memoryZirconHandlePropertiesFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.memoryTypeBits );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  #if defined( VK_USE_PLATFORM_METAL_EXT )
+template <> struct hash<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pLayer );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.firstIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.indexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.vertexOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const & multiDrawInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.firstVertex );
+    VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.vertexCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.maxSampleLocationGridSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const & multisampledRenderToSingleSampledInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.multisampledRenderToSingleSampledEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.rasterizationSamples );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const & multiviewPerViewAttributesInfoNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributesPositionXOnly );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const & mutableDescriptorTypeListEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.descriptorTypeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.pDescriptorTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const & mutableDescriptorTypeCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.mutableDescriptorTypeListCount );
+    VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pMutableDescriptorTypeLists );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const & opaqueCaptureDescriptorDataCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.opaqueCaptureDescriptorData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size );
+    VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID );
+    VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime );
+    VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime );
+    VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime );
+    VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4R4G4B4 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4B4G4R4 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & physicalDevice8BitStorageFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storageBuffer8BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.uniformAndStorageBuffer8BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storagePushConstant8 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & physicalDeviceASTCDecodeFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.decodeModeSharedExponent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const & physicalDeviceAccelerationStructureFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureHostCommands );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const & physicalDeviceAccelerationStructurePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxGeometryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxInstanceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPrimitiveCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const & physicalDeviceAmigoProfilingFeaturesSEC) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.amigoProfiling );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.attachmentFeedbackLoopLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & physicalDeviceBlendOperationAdvancedFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.advancedBlendCoherentOperations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & physicalDeviceBlendOperationAdvancedPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendIndependentBlend );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendCorrelatedOverlap );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendAllOperations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const & physicalDeviceBorderColorSwizzleFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzle );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzleFromImage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & physicalDeviceBufferDeviceAddressFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & physicalDeviceBufferDeviceAddressFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressMultiDevice );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & physicalDeviceClusterCullingShaderFeaturesHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.clustercullingShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.multiviewClusterCullingShader );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & physicalDeviceClusterCullingShaderPropertiesHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.pNext );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupCount[i] );
+    }
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxOutputClusterCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & physicalDeviceComputeShaderDerivativesFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupQuads );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupLinear );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & physicalDeviceConservativeRasterizationPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const & physicalDeviceCopyMemoryIndirectFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.indirectCopy );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const & physicalDeviceCopyMemoryIndirectPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.supportedQueues );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & physicalDeviceCornerSampledImageFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.cornerSampledImage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & physicalDeviceCoverageReductionModeFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.coverageReductionMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const & physicalDeviceCustomBorderColorFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColors );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColorWithoutFormat );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const & physicalDeviceCustomBorderColorPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.maxCustomBorderColorSamplers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & physicalDeviceDedicatedAllocationImageAliasingFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const & physicalDeviceDepthClampZeroOneFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.depthClampZeroOne );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const & physicalDeviceDepthClipControlFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.depthClipControl );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & physicalDeviceDepthClipEnableFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.depthClipEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & physicalDeviceDepthStencilResolveProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedDepthResolveModes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedStencilResolveModes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolveNone );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolve );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & physicalDeviceDescriptorBufferDensityMapPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.combinedImageSamplerDensityMapDescriptorSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const & physicalDeviceDescriptorBufferFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferImageLayoutIgnored );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferPushDescriptors );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const & physicalDeviceDescriptorBufferPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSingleArray );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferlessPushDescriptors );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.allowSamplerImageViewPostSubmitCreation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxDescriptorBufferBindings );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferBindings );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferBindings );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplerBindings );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferCaptureReplayDescriptorDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageCaptureReplayDescriptorDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageViewCaptureReplayDescriptorDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerCaptureReplayDescriptorDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureCaptureReplayDescriptorDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sampledImageDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageImageDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformTexelBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformTexelBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageTexelBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageTexelBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageBufferDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.inputAttachmentDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureDescriptorSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferRange );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferRange );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorBufferAddressSpaceSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.resourceDescriptorBufferAddressSpaceSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferAddressSpaceSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & physicalDeviceDescriptorIndexingFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingPartiallyBound );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.runtimeDescriptorArray );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & physicalDeviceDescriptorIndexingProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.robustBufferAccessUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.quadDivergentImplicitLod );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageUpdateAfterBindResources );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.descriptorSetHostMapping );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & physicalDeviceDeviceGeneratedCommandsFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.deviceGeneratedCommands );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & physicalDeviceDeviceGeneratedCommandsPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxGraphicsShaderGroupCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectSequenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const & physicalDeviceDeviceMemoryReportFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.deviceMemoryReport );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const & physicalDeviceDiagnosticsConfigFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.diagnosticsConfig );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & physicalDeviceDiscardRectanglePropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.maxDiscardRectangles );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverID );
+    for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverName[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverInfo[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.conformanceVersion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasPrimary );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasRender );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMajor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMinor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMajor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMinor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const & physicalDeviceDynamicRenderingFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.dynamicRendering );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & physicalDeviceExclusiveScissorFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.exclusiveScissor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const & physicalDeviceExtendedDynamicState2FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2LogicOp );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & physicalDeviceExtendedDynamicStateFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.extendedDynamicState );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & physicalDeviceExternalBufferInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.handleType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.handleType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & physicalDeviceExternalImageFormatInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.handleType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & physicalDeviceExternalMemoryHostPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.minImportedHostPointerAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const & physicalDeviceExternalMemoryRDMAFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.externalMemoryRDMA );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & physicalDeviceExternalSemaphoreInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.handleType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.features );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & physicalDeviceFloatControlsProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.denormBehaviorIndependence );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.roundingModeIndependence );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat64 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const & physicalDeviceFragmentDensityMap2FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.fragmentDensityMapDeferred );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const & physicalDeviceFragmentDensityMap2PropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledLoads );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxSubsampledArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & physicalDeviceFragmentDensityMapFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMap );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & physicalDeviceFragmentDensityMapOffsetFeaturesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.fragmentDensityMapOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & physicalDeviceFragmentDensityMapOffsetPropertiesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.fragmentDensityOffsetGranularity );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & physicalDeviceFragmentDensityMapPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.minFragmentDensityTexelSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.maxFragmentDensityTexelSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.fragmentDensityInvocations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & physicalDeviceFragmentShaderBarycentricFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.fragmentShaderBarycentric );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & physicalDeviceFragmentShaderBarycentricPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.triStripVertexOrderIndependentOfProvokingVertex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & physicalDeviceFragmentShaderInterlockFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderSampleInterlock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderPixelInterlock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & physicalDeviceFragmentShadingRateEnumsFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.fragmentShadingRateEnums );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.supersampleFragmentShadingRates );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & physicalDeviceFragmentShadingRateEnumsPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const & physicalDeviceFragmentShadingRateFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pipelineFragmentShadingRate );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.primitiveFragmentShadingRate );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.attachmentFragmentShadingRate );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const & physicalDeviceFragmentShadingRateKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.fragmentSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const & physicalDeviceFragmentShadingRatePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.layeredShadingRateAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSizeAspectRatio );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithSampleMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & physicalDeviceGlobalPriorityQueryFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.globalPriorityQuery );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & physicalDeviceGraphicsPipelineLibraryFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.graphicsPipelineLibrary );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & physicalDeviceGraphicsPipelineLibraryPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryFastLinking );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryIndependentInterpolationDecoration );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDeviceCount );
+    for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDevices[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.subsetAllocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & physicalDeviceHostQueryResetFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.hostQueryReset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.pNext );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceUUID[i] );
+    }
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.driverUUID[i] );
+    }
+    for ( size_t i = 0; i < VK_LUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUID[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceNodeMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUIDValid );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const & physicalDeviceImage2DViewOf3DFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.image2DViewOf3D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sampler2DViewOf3D );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const & physicalDeviceImageCompressionControlFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.imageCompressionControl );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & physicalDeviceImageCompressionControlSwapchainFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.imageCompressionControlSwapchain );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & physicalDeviceImageDrmFormatModifierInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.drmFormatModifier );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sharingMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.queueFamilyIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pQueueFamilyIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.format );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.type );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.tiling );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const & physicalDeviceImageProcessingFeaturesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureSampleWeighted );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBoxFilter );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBlockMatch );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const & physicalDeviceImageProcessingPropertiesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterPhases );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterDimension );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBlockMatchRegion );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBoxFilterBlockSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const & physicalDeviceImageRobustnessFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.robustImageAccess );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & physicalDeviceImageViewImageFormatInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.imageViewType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const & physicalDeviceImageViewMinLodFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.minLod );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & physicalDeviceImagelessFramebufferFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & physicalDeviceIndexTypeUint8FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.indexTypeUint8 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const & physicalDeviceInheritedViewportScissorFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.inheritedViewportScissor2D );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const & physicalDeviceInlineUniformBlockFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.inlineUniformBlock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const & physicalDeviceInlineUniformBlockProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxInlineUniformBlockSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const & physicalDeviceInvocationMaskFeaturesHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.invocationMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension1D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension2D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension3D );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimensionCube );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelBufferElements );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxUniformBufferRange );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxStorageBufferRange );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPushConstantsSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxMemoryAllocationCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAllocationCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.bufferImageGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sparseAddressSpaceSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxBoundDescriptorSets );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageResources );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindings );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributeOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindingStride );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationGenerationLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationPatchSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexInputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerPatchOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlTotalOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationInputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryShaderInvocations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryInputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputVertices );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryTotalOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentInputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentOutputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentDualSrcAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentCombinedOutputResources );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeSharedMemorySize );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupCount[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupInvocations );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelPrecisionBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subTexelPrecisionBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.mipmapPrecisionBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndexedIndexValue );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndirectCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerLodBias );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAnisotropy );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewports );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewportDimensions[i] );
+    }
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportBoundsRange[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportSubPixelBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minMemoryMapAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minUniformBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minStorageBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelGatherOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelGatherOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minInterpolationOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxInterpolationOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelInterpolationOffsetBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferWidth );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferColorSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferDepthSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferStencilSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferNoAttachmentsSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxColorAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageColorSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageIntegerSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageDepthSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageStencilSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.storageImageSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSampleMaskWords );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampComputeAndGraphics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampPeriod );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxClipDistances );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCullDistances );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCombinedClipAndCullDistances );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.discreteQueuePriorities );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeRange[i] );
+    }
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthRange[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.strictLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.standardSampleLocations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyRowPitchAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.nonCoherentAtomSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & physicalDeviceLineRasterizationFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.rectangularLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.bresenhamLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.smoothLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledRectangularLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledBresenhamLines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledSmoothLines );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & physicalDeviceLineRasterizationPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.lineSubPixelPrecisionBits );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const & physicalDeviceLinearColorAttachmentFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.linearColorAttachment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & physicalDeviceMaintenance3Properties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxPerSetDescriptors );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxMemoryAllocationSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const & physicalDeviceMaintenance4Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.maintenance4 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const & physicalDeviceMaintenance4Properties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.maxBufferSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & physicalDeviceMemoryBudgetPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.pNext );
+    for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapBudget[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapUsage[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const & physicalDeviceMemoryDecompressionFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.memoryDecompression );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const & physicalDeviceMemoryDecompressionPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.decompressionMethods );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.maxDecompressionIndirectCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & physicalDeviceMemoryPriorityFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.memoryPriority );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypeCount );
+    for ( size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypes[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeapCount );
+    for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeaps[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.memoryProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const & physicalDeviceMeshShaderFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.taskShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.multiviewMeshShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.primitiveFragmentShadingRateMeshShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShaderQueries );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & physicalDeviceMeshShaderFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.taskShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.meshShader );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const & physicalDeviceMeshShaderPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupTotalCount );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupCount[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupInvocations );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskSharedMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadAndSharedMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupTotalCount );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupCount[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupInvocations );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshSharedMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndSharedMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndOutputMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputComponents );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputVertices );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputPrimitives );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshMultiviewViewCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerVertexGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerPrimitiveGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredTaskWorkGroupInvocations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredMeshWorkGroupInvocations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationVertexOutput );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationPrimitiveOutput );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactVertexOutput );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactPrimitiveOutput );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & physicalDeviceMeshShaderPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxDrawMeshTasksCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupInvocations );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskTotalMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskOutputCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupInvocations );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupSize[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshTotalMemorySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputVertices );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputPrimitives );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshMultiviewViewCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerVertexGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerPrimitiveGranularity );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const & physicalDeviceMultiDrawFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.multiDraw );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const & physicalDeviceMultiDrawPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.maxMultiDrawCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.multisampledRenderToSingleSampled );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiview );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewGeometryShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewTessellationShader );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & physicalDeviceMultiviewPerViewAttributesPropertiesNVX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & physicalDeviceMultiviewPerViewViewportsFeaturesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.multiviewPerViewViewports );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & physicalDeviceMultiviewProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewViewCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewInstanceIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & physicalDeviceMutableDescriptorTypeFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.mutableDescriptorType );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & physicalDevicePipelineCreationCacheControlFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pipelineCreationCacheControl );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & physicalDevicePipelineExecutablePropertiesFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pipelineExecutableInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const & physicalDevicePipelinePropertiesFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pipelinePropertiesIdentifier );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const & physicalDevicePipelineProtectedAccessFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pipelineProtectedAccess );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pipelineRobustness );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const & physicalDevicePipelineRobustnessPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessVertexInputs );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessImages );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & physicalDevicePointClippingProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pointClippingBehavior );
+      return seed;
+    }
+  };
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const & physicalDevicePortabilitySubsetFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.constantAlphaColorBlendFactors );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.events );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatReinterpretation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatSwizzle );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageView2DOn3DImage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.multisampleArrayImage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.mutableComparisonSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pointPolygons );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.samplerMipLodBias );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.separateStencilMaskRef );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationIsolines );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationPointMode );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.triangleFans );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.vertexAttributeAccessBeyondStride );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const & physicalDevicePortabilitySubsetPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.presentId );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & physicalDevicePresentWaitFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.presentWait );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & physicalDevicePrimitiveTopologyListRestartFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & physicalDevicePrimitivesGeneratedQueryFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQuery );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithRasterizerDiscard );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithNonZeroStreams );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const & physicalDevicePrivateDataFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.privateData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType );
+    for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] );
+    }
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & physicalDeviceProtectedMemoryFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.protectedMemory );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & physicalDeviceProtectedMemoryProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.protectedNoFault );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const & physicalDeviceProvokingVertexFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.provokingVertexLast );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const & physicalDeviceProvokingVertexPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.provokingVertexModePerPipeline );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & physicalDevicePushDescriptorPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.maxPushDescriptors );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & physicalDeviceRGBA10X6FormatsFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.formatRgba10x6WithoutYCbCrSampler );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderColorAttachmentAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderDepthAttachmentAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderStencilAttachmentAccess );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const & physicalDeviceRayQueryFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.rayQuery );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & physicalDeviceRayTracingInvocationReorderFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.rayTracingInvocationReorder );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & physicalDeviceRayTracingInvocationReorderPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.rayTracingInvocationReorderReorderingHint );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & physicalDeviceRayTracingMaintenance1FeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingMaintenance1 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingPipelineTraceRaysIndirect2 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const & physicalDeviceRayTracingMotionBlurFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlur );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const & physicalDeviceRayTracingPipelineFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineTraceRaysIndirect );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTraversalPrimitiveCulling );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const & physicalDeviceRayTracingPipelinePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayRecursionDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxShaderGroupStride );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupBaseAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleCaptureReplaySize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayDispatchInvocationCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayHitAttributeSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & physicalDeviceRayTracingPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupHandleSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxRecursionDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxShaderGroupStride );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupBaseAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxGeometryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxInstanceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxTriangleCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxDescriptorSetAccelerationStructures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & physicalDeviceRepresentativeFragmentTestFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.representativeFragmentTest );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const & physicalDeviceRobustness2FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustBufferAccess2 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustImageAccess2 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.nullDescriptor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const & physicalDeviceRobustness2PropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustStorageBufferAccessSizeAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustUniformBufferAccessSizeAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & physicalDeviceSampleLocationsPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSampleCounts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.maxSampleLocationGridSize );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationCoordinateRange[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSubPixelBits );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.variableSampleLocations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & physicalDeviceSamplerFilterMinmaxProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxImageComponentMapping );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & physicalDeviceSamplerYcbcrConversionFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.samplerYcbcrConversion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & physicalDeviceScalarBlockLayoutFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.scalarBlockLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & physicalDeviceSeparateDepthStencilLayoutsFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.separateDepthStencilLayouts );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & physicalDeviceShaderAtomicFloat2FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat32AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat64AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat32AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat64AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderImageFloat32AtomicMinMax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sparseImageFloat32AtomicMinMax );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const & physicalDeviceShaderAtomicFloatFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32AtomicAdd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32AtomicAdd );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & physicalDeviceShaderAtomicInt64Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderBufferInt64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderSharedInt64Atomics );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & physicalDeviceShaderClockFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderSubgroupClock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderDeviceClock );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & physicalDeviceShaderCoreBuiltinsFeaturesARM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.shaderCoreBuiltins );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & physicalDeviceShaderCoreBuiltinsPropertiesARM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderWarpsPerCore );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & physicalDeviceShaderCoreProperties2AMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.shaderCoreFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.activeComputeUnitCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & physicalDeviceShaderCorePropertiesAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderEngineCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderArraysPerEngineCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.computeUnitsPerShaderArray );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.simdPerComputeUnit );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontsPerSimd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprsPerSimd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minSgprAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxSgprAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprAllocationGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprsPerSimd );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minVgprAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxVgprAllocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprAllocationGranularity );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & physicalDeviceShaderDemoteToHelperInvocationFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & physicalDeviceShaderDrawParametersFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.shaderDrawParameters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.shaderEarlyAndLateFragmentTests );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & physicalDeviceShaderFloat16Int8Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderInt8 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & physicalDeviceShaderImageAtomicInt64FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.shaderImageInt64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sparseImageInt64Atomics );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & physicalDeviceShaderImageFootprintFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.imageFootprint );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const & physicalDeviceShaderIntegerDotProductFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.shaderIntegerDotProduct );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const & physicalDeviceShaderIntegerDotProductProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & physicalDeviceShaderIntegerFunctions2FeaturesINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.shaderIntegerFunctions2 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & physicalDeviceShaderModuleIdentifierFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.shaderModuleIdentifier );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & physicalDeviceShaderModuleIdentifierPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.pNext );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.shaderModuleIdentifierAlgorithmUUID[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & physicalDeviceShaderSMBuiltinsFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.shaderSMBuiltins );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & physicalDeviceShaderSMBuiltinsPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderSMCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderWarpsPerSM );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & physicalDeviceShaderSubgroupExtendedTypesFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const & physicalDeviceShaderTerminateInvocationFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.shaderTerminateInvocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & physicalDeviceShadingRateImageFeaturesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateImage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateCoarseSampleOrder );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & physicalDeviceShadingRateImagePropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateTexelSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRatePaletteSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateMaxCoarseSamples );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & physicalDeviceSparseImageFormatInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.format );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.type );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.samples );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.usage );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.tiling );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & physicalDeviceSubgroupProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.subgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedStages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedOperations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.quadOperationsInAllStages );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const & physicalDeviceSubgroupSizeControlFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.subgroupSizeControl );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.computeFullSubgroups );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const & physicalDeviceSubgroupSizeControlProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.minSubgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxSubgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxComputeWorkgroupSubgroups );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.requiredSubgroupSizeStages );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & physicalDeviceSubpassMergeFeedbackFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.subpassMergeFeedback );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const & physicalDeviceSubpassShadingFeaturesHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.subpassShading );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const & physicalDeviceSubpassShadingPropertiesHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.surface );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & physicalDeviceSwapchainMaintenance1FeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.swapchainMaintenance1 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const & physicalDeviceSynchronization2Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.synchronization2 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & physicalDeviceTexelBufferAlignmentFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.texelBufferAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const & physicalDeviceTexelBufferAlignmentProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const & physicalDeviceTextureCompressionASTCHDRFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.textureCompressionASTC_HDR );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const & physicalDeviceTilePropertiesFeaturesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.tileProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & physicalDeviceTimelineSemaphoreFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.timelineSemaphore );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & physicalDeviceTimelineSemaphoreProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.maxTimelineSemaphoreValueDifference );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.pNext );
+    for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.name[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.version[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.purposes );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.description[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.layer[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & physicalDeviceTransformFeedbackFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.transformFeedback );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.geometryStreams );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & physicalDeviceTransformFeedbackPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreams );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreamDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackQueries );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackDraw );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & physicalDeviceUniformBufferStandardLayoutFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.uniformBufferStandardLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & physicalDeviceVariablePointersFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointersStorageBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointers );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & physicalDeviceVertexAttributeDivisorFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateDivisor );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateZeroDivisor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & physicalDeviceVertexAttributeDivisorPropertiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.maxVertexAttribDivisor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const & physicalDeviceVideoFormatInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.imageUsage );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageBuffer16BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.uniformAndStorageBuffer16BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storagePushConstant16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageInputOutput16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiview );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewGeometryShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewTessellationShader );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointersStorageBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.protectedMemory );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.samplerYcbcrConversion );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.shaderDrawParameters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & physicalDeviceVulkan11Properties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pNext );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceUUID[i] );
+    }
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.driverUUID[i] );
+    }
+    for ( size_t i = 0; i < VK_LUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUID[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceNodeMask );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUIDValid );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedStages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedOperations );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupQuadOperationsInAllStages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pointClippingBehavior );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewViewCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewInstanceIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.protectedNoFault );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxPerSetDescriptors );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMemoryAllocationSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerMirrorClampToEdge );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.drawIndirectCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storageBuffer8BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformAndStorageBuffer8BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storagePushConstant8 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderBufferInt64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSharedInt64Atomics );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInt8 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayDynamicIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSampledImageArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageImageArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayNonUniformIndexing );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingSampledImageUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageImageUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUpdateUnusedWhilePending );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingPartiallyBound );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingVariableDescriptorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.runtimeDescriptorArray );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerFilterMinmax );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.scalarBlockLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.imagelessFramebuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformBufferStandardLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSubgroupExtendedTypes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.separateDepthStencilLayouts );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.hostQueryReset );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.timelineSemaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressCaptureReplay );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressMultiDevice );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModel );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelDeviceScope );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelAvailabilityVisibilityChains );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputViewportIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputLayer );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.subgroupBroadcastDynamicId );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & physicalDeviceVulkan12Properties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverID );
+    for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverName[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverInfo[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.conformanceVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.denormBehaviorIndependence );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.roundingModeIndependence );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat16 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat32 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat64 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxUpdateAfterBindDescriptorsInAllPools );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSampledImageArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageImageArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.robustBufferAccessUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.quadDivergentImplicitLod );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageUpdateAfterBindResources );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSamplers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSampledImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageImages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedDepthResolveModes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedStencilResolveModes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolveNone );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolve );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxSingleComponentFormats );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxImageComponentMapping );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxTimelineSemaphoreValueDifference );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.framebufferIntegerColorSampleCounts );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.robustImageAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.inlineUniformBlock );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pipelineCreationCacheControl );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.privateData );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderDemoteToHelperInvocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderTerminateInvocation );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.subgroupSizeControl );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.computeFullSubgroups );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.synchronization2 );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.textureCompressionASTC_HDR );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderZeroInitializeWorkgroupMemory );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.dynamicRendering );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderIntegerDotProduct );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.maintenance4 );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const & physicalDeviceVulkan13Properties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.minSubgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxSubgroupSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxComputeWorkgroupSubgroups );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.requiredSubgroupSizeStages );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformBlockSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformTotalSize );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetAlignmentBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetSingleTexelAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetAlignmentBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetSingleTexelAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxBufferSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & physicalDeviceVulkanMemoryModelFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModel );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & physicalDeviceYcbcr2Plane444FormatsFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.ycbcr2plane444Formats );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & physicalDeviceYcbcrImageArraysFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.ycbcrImageArrays );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & physicalDeviceZeroInitializeWorkgroupMemoryFeatures) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.initialDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pInitialData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerSize );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerVersion );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.vendorID );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.deviceID );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.pipelineCacheUUID[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & pipelineColorBlendAdvancedStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.srcPremultiplied );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.dstPremultiplied );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.blendOverlap );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pColorWriteEnables );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & pipelineCompilerControlCreateInfoAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.compilerControlFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & pipelineCoverageModulationStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pCoverageModulationTable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & pipelineCoverageReductionStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.coverageReductionMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & pipelineCoverageToColorStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorLocation );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.duration );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const & pipelineCreationFeedbackCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineCreationFeedback );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pipelineStageCreationFeedbackCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineStageCreationFeedbacks );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & pipelineDiscardRectangleStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pDiscardRectangles );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pipeline );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.executableIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & pipelineExecutableInternalRepresentationKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pNext );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.name[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.description[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.isText );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.dataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.stages );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.name[i] );
+    }
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.description[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.subgroupSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const & pipelineFragmentShadingRateEnumStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRateType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRate );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.combinerOps[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const & pipelineFragmentShadingRateStateCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.fragmentSize );
+    for ( size_t i = 0; i < 2; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.combinerOps[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PushConstantRange>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.setLayoutCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pSetLayouts );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pushConstantRangeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pPushConstantRanges );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const & pipelinePropertiesIdentifierEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pNext );
+    for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pipelineIdentifier[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & pipelineRasterizationConservativeStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & pipelineRasterizationDepthClipStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.depthClipEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & pipelineRasterizationLineStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineRasterizationMode );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.stippledLineEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStippleFactor );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStipplePattern );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const & pipelineRasterizationProvokingVertexStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.provokingVertexMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & pipelineRasterizationStateRasterizationOrderAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.rasterizationOrder );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & pipelineRasterizationStateStreamCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.rasterizationStream );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.viewMask );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pColorAttachmentFormats );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.depthAttachmentFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.stencilAttachmentFormat );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & pipelineRepresentativeFragmentTestStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const & pipelineRobustnessCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.storageBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.uniformBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.vertexInputs );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.images );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & pipelineSampleLocationsStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const & pipelineShaderStageModuleIdentifierCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.identifierSize );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pIdentifier );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const & pipelineShaderStageRequiredSubgroupSizeCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.requiredSubgroupSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & pipelineTessellationDomainOriginStateCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.domainOrigin );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const & vertexInputBindingDivisorDescriptionEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.divisor );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & pipelineVertexInputDivisorStateCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.vertexBindingDivisorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pVertexBindingDivisors );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & pipelineViewportCoarseSampleOrderStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sampleOrderType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.customSampleOrderCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pCustomSampleOrders );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const & pipelineViewportDepthClipControlCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.negativeOneToOne );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & pipelineViewportExclusiveScissorStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.exclusiveScissorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pExclusiveScissors );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.shadingRatePaletteEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.pShadingRatePaletteEntries );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & pipelineViewportShadingRateImageStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.shadingRateImageEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.viewportCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pShadingRatePalettes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.x );
+    VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.y );
+    VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.z );
+    VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.w );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & pipelineViewportSwizzleStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.viewportCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pViewportSwizzles );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.xcoeff );
+    VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.ycoeff );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & pipelineViewportWScalingStateCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportWScalingEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportCount );
+    VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pViewportWScalings );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_GGP )
+template <> struct hash<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.frameToken );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentIdKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentIdKHR const & presentIdKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pPresentIds );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentInfoKHR const & presentInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.waitSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pWaitSemaphores );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pSwapchains );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pImageIndices );
+    VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pResults );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RectLayerKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RectLayerKHR const & rectLayerKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.offset );
+    VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.extent );
+    VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.layer );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentRegionKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentRegionKHR const & presentRegionKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.rectangleCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.pRectangles );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.presentID );
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.desiredPresentTime );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pTimes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.protectedSubmit );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.performanceCountersSampling );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const & queueFamilyCheckpointProperties2NV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.checkpointExecutionStageMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & queueFamilyCheckpointPropertiesNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.checkpointExecutionStageMask );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & queueFamilyGlobalPriorityPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorityCount );
+    for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorities[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueCount );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.timestampValidBits );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.minImageTransferGranularity );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.queueFamilyProperties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const & queueFamilyQueryResultStatusPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.queryResultStatusSupport );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const & queueFamilyVideoPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.videoCodecOperations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const & rayTracingShaderGroupCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.type );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.generalShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.closestHitShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.anyHitShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.intersectionShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pShaderGroupCaptureReplayHandle );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const & rayTracingPipelineInterfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayPayloadSize );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayHitAttributeSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.stageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pStages );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.groupCount );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pGroups );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.maxPipelineRayRecursionDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInterface );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pDynamicState );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.layout );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rayTracingShaderGroupCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.type );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.generalShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.closestHitShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.anyHitShader );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.intersectionShader );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.stageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pStages );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.groupCount );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pGroups );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.maxRecursionDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.layout );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineHandle );
+    VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, refreshCycleDurationGOOGLE.refreshDuration );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const & releaseSwapchainImagesInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.swapchain );
+    VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.imageIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pImageIndices );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pAttachments );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderPass );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.framebuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderArea );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.clearValueCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pClearValues );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassDescription>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.inputAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pColorAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pResolveAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pDepthStencilAttachment );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.preserveAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pPreserveAttachments );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassDependency>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcSubpass );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstSubpass );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dependencyFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.subpassCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pSubpasses );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.dependencyCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pDependencies );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassDescription2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pipelineBindPoint );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.viewMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.inputAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pInputAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pColorAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pResolveAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pDepthStencilAttachment );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.preserveAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pPreserveAttachments );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassDependency2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcSubpass );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstSubpass );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstAccessMask );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dependencyFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.viewOffset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.attachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.subpassCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pSubpasses );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.dependencyCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pDependencies );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.correlatedViewMaskCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pCorrelatedViewMasks );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const & renderPassCreationControlEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.disallowMerging );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const & renderPassCreationFeedbackInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackInfoEXT.postMergeSubpassCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const & renderPassCreationFeedbackCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pRenderPassFeedback );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & renderPassFragmentDensityMapCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.fragmentDensityMapAttachment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & renderPassInputAttachmentAspectCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.aspectReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pAspectReferences );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.subpassCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewMasks );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.dependencyCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewOffsets );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.correlationMaskCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pCorrelationMasks );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.subpassIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.sampleLocationsInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & renderPassSampleLocationsBeginInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.attachmentInitialSampleLocationsCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pAttachmentInitialSampleLocations );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.postSubpassSampleLocationsCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pPostSubpassSampleLocations );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const & renderPassSubpassFeedbackInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.subpassMergeStatus );
+    for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.description[i] );
+    }
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.postMergeIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const & renderPassSubpassFeedbackCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pSubpassFeedback );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const & renderPassTransformBeginInfoQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.transform );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & renderingFragmentDensityMapAttachmentInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageView );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const & renderingFragmentShadingRateAttachmentInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageView );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderingInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderingInfo const & renderingInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.renderArea );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.layerCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.viewMask );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.colorAttachmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pColorAttachments );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pDepthAttachment );
+    VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pStencilAttachment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImage );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImage );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImageLayout );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.regionCount );
+    VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pRegions );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const & samplerBorderColorComponentMappingCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.components );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.srgb );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const & samplerCaptureDescriptorDataInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sampler );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.reductionMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & samplerYcbcrConversionCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.format );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrModel );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrRange );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.components );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.xChromaOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.yChromaOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.chromaFilter );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.forceExplicitReconstruction );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & samplerYcbcrConversionImageFormatProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.combinedImageSamplerDescriptorCount );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.conversion );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_SCREEN_QNX )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.context );
+    VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.window );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.handleType );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.handleType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_FUCHSIA )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const & semaphoreGetZirconHandleInfoFUCHSIA) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.handleType );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.value );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.semaphoreType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.initialValue );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.semaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pSemaphores );
+    VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pValues );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize );
+    for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.validationCache );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedVgprs );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedSgprs );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsSizePerLocalWorkGroup );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsUsageSizeInBytes );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.scratchMemUsageInBytes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.shaderStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.resourceUsage );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalVgprs );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalSgprs );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableVgprs );
+    VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableSgprs );
+    for ( size_t i = 0; i < 3; ++i )
+    {
+      VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.computeWorkGroupSize[i] );
+    }
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & sharedPresentSurfaceCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sharedPresentSupportedUsageFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.aspectMask );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.imageGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.properties );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.formatProperties );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailFirstLod );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailSize );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailStride );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.memoryRequirements );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_GGP )
+template <> struct hash<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & streamDescriptorSurfaceCreateInfoGGP) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.streamDescriptor );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride );
+    VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubmitInfo const & submitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.waitSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitSemaphores );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitDstStageMask );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.commandBufferCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pCommandBuffers );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.signalSemaphoreCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pSignalSemaphores );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubmitInfo2>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubmitInfo2 const & submitInfo2) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.waitSemaphoreInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pWaitSemaphoreInfos );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.commandBufferInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pCommandBufferInfos );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.signalSemaphoreInfoCount );
+    VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pSignalSemaphoreInfos );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & subpassBeginInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.contents );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & subpassDescriptionDepthStencilResolve) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.depthResolveMode );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.stencilResolveMode );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassEndInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassEndInfo const & subpassEndInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.pNext );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const & subpassFragmentDensityMapOffsetEndInfoQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.fragmentDensityOffsetCount );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pFragmentDensityOffsets );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const & subpassResolvePerformanceQueryEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.optimal );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const & subpassShadingPipelineCreateInfoHUAWEI) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.renderPass );
+    VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.subpass );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedTransforms );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentTransform );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedCompositeAlpha );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedUsageFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedSurfaceCounters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedTransforms );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentTransform );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedCompositeAlpha );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedUsageFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.surfaceCapabilities );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & surfaceCapabilitiesFullScreenExclusiveEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.fullScreenExclusiveSupported );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const & surfacePresentModeCompatibilityEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.presentModeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pPresentModes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const & surfacePresentModeEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.presentMode );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const & surfacePresentScalingCapabilitiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentScaling );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityX );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityY );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.minScaledImageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.maxScaledImageExtent );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const & swapchainPresentFenceInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pFences );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const & swapchainPresentModeInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.swapchainCount );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pPresentModes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const & swapchainPresentModesCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.presentModeCount );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pPresentModes );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const & swapchainPresentScalingCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.scalingBehavior );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityX );
+    VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityY );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize );
+    VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize );
+    VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount );
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues );
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount );
+    VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height );
+    VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount );
+    VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate );
+    VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_VI_NN )
+template <> struct hash<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer );
+    VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pictureAccessGranularity );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minCodedExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxCodedExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxDpbSlots );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxActiveReferencePictures );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const & videoDecodeH264CapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.maxLevelIdc );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.fieldOffsetGranularity );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const & videoDecodeH264DpbSlotInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pStdReferenceInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const & videoDecodeH264PictureInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pStdPictureInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sliceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pSliceOffsets );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const & videoDecodeH264ProfileInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.stdProfileIdc );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pictureLayout );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const & videoDecodeH264SessionParametersAddInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdSPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdPPSs );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const & videoDecodeH264SessionParametersCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pParametersAddInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const & videoDecodeH265CapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.maxLevelIdc );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const & videoDecodeH265DpbSlotInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pStdReferenceInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const & videoDecodeH265PictureInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pStdPictureInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sliceSegmentCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pSliceSegmentOffsets );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const & videoDecodeH265ProfileInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.stdProfileIdc );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const & videoDecodeH265SessionParametersAddInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdVPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdVPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdSPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdPPSs );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const & videoDecodeH265SessionParametersCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdVPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pParametersAddInfo );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints );
+      return seed;
+    }
+  };
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlLayerCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.qualityLevelCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.inputImageDataFillAlignment );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const & videoEncodeH264CapabilitiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputModeFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.outputModeFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxPPictureL0ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBPictureL0ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxL1ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.motionVectorsOverPicBoundariesFlag );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBytesPerPicDenom );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBitsPerMbDenom );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthHorizontal );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthVertical );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const & videoEncodeH264DpbSlotInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.slotIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pStdReferenceInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT const & videoEncodeH264EmitPictureParametersInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.spsId );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.emitSpsEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntries );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & videoEncodeH264FrameSizeEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameISize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.framePSize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameBSize );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT const & videoEncodeH264ReferenceListsInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList0EntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList0Entries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList1EntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList1Entries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pMemMgmtCtrlOperations );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT const & videoEncodeH264NaluSliceInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.mbCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pReferenceFinalLists );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pSliceHeaderStd );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT const & videoEncodeH264ProfileInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.stdProfileIdc );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & videoEncodeH264QpEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpI );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpP );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpB );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const & videoEncodeH264RateControlInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.gopFrameCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.idrPeriod );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.consecutiveBFrameCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.rateControlStructure );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.temporalLayerCount );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const & videoEncodeH264RateControlLayerInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.temporalLayerId );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useInitialRcQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.initialRcQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMinQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.minQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxFrameSize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxFrameSize );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const & videoEncodeH264SessionParametersAddInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.stdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pStdSPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.stdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pStdPPSs );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const & videoEncodeH264SessionParametersCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxStdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxStdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pParametersAddInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const & videoEncodeH264VclFrameInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pReferenceFinalLists );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.naluSliceEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNaluSliceEntries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pCurrentPictureInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const & videoEncodeH265CapabilitiesEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputModeFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.outputModeFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.ctbSizes );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.transformBlockSizes );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxPPictureL0ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxBPictureL0ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxL1ReferenceCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxSubLayersCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaCodingBlockSizeMinus3 );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaCodingBlockSizeMinus3 );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaTransformBlockSizeMinus2 );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaTransformBlockSizeMinus2 );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthInter );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthInter );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthIntra );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthIntra );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxDiffCuQpDeltaDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxNumMergeCand );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxNumMergeCand );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const & videoEncodeH265DpbSlotInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.slotIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pStdReferenceInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT const & videoEncodeH265EmitPictureParametersInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.vpsId );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.spsId );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitVpsEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitSpsEnable );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntries );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & videoEncodeH265FrameSizeEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameISize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.framePSize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameBSize );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT const & videoEncodeH265ReferenceListsInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList0EntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList0Entries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList1EntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList1Entries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceModifications );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT const & videoEncodeH265NaluSliceSegmentInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.ctbCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pReferenceFinalLists );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pSliceSegmentHeaderStd );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT const & videoEncodeH265ProfileInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.stdProfileIdc );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & videoEncodeH265QpEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpI );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpP );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpB );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const & videoEncodeH265RateControlInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.gopFrameCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.idrPeriod );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.consecutiveBFrameCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.rateControlStructure );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.subLayerCount );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const & videoEncodeH265RateControlLayerInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.temporalId );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useInitialRcQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.initialRcQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMinQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.minQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxQp );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxFrameSize );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxFrameSize );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const & videoEncodeH265SessionParametersAddInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdVPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdVPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdSPSs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdPPSs );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const & videoEncodeH265SessionParametersCreateInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdVPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdSPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdPPSCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pParametersAddInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const & videoEncodeH265VclFrameInfoEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pReferenceFinalLists );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.naluSliceSegmentEntryCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNaluSliceSegmentEntries );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pCurrentPictureInfo );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.qualityLevel );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBuffer );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferOffset );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferMaxRange );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const & videoEncodeRateControlLayerInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.averageBitrate );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.maxBitrate );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateNumerator );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateDenominator );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.virtualBufferSizeInMs );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.initialVirtualBufferSizeInMs );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.rateControlMode );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.layerCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayerConfigs );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  #if defined( VK_ENABLE_BETA_EXTENSIONS )
+template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const & videoEncodeUsageInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoUsageHints );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoContentHints );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.tuningMode );
+      return seed;
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.flags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.format );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.componentMapping );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageCreateFlags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageTiling );
+    VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageUsageFlags );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const & videoProfileListInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.profileCount );
+    VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pProfiles );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.queueFamilyIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePictureFormat );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxDpbSlots );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxActiveReferencePictures );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pStdHeaderVersion );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const & videoSessionMemoryRequirementsKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryBindIndex );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryRequirements );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & videoSessionParametersCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSessionParametersTemplate );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSession );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const & videoSessionParametersUpdateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.updateSequenceCount );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.display );
+    VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.surface );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & win32KeyedMutexAcquireReleaseInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.acquireCount );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireSyncs );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireKeys );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireTimeouts );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.releaseCount );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseSyncs );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseKeys );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & win32KeyedMutexAcquireReleaseInfoNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.acquireCount );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireSyncs );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireKeys );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireTimeoutMilliseconds );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.releaseCount );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseSyncs );
+    VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseKeys );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  #if defined( VK_USE_PLATFORM_WIN32_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hinstance );
+    VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hwnd );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const & writeDescriptorSetAccelerationStructureKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.accelerationStructureCount );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pAccelerationStructures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & writeDescriptorSetAccelerationStructureNV) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.accelerationStructureCount );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pAccelerationStructures );
+      return seed;
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const & writeDescriptorSetInlineUniformBlock) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.dataSize );
+    VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pData );
+      return seed;
+    }
+  };
+
+  #if defined( VK_USE_PLATFORM_XCB_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.connection );
+    VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.window );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+  #if defined( VK_USE_PLATFORM_XLIB_KHR )
+template <> struct hash<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      std::size_t seed = 0;
+    VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.sType );
+    VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.pNext );
+    VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.flags );
+    VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.dpy );
+    VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.window );
+      return seed;
+    }
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#endif    // 14 <= VULKAN_HPP_CPP_VERSION
+
+} // namespace std
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_ios.h b/host/libs/graphics_detector/include/vulkan/vulkan_ios.h
new file mode 100644
index 0000000..5792205
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_ios.h
@@ -0,0 +1,47 @@
+#ifndef VULKAN_IOS_H_
+#define VULKAN_IOS_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_MVK_ios_surface 1
+#define VK_MVK_IOS_SURFACE_SPEC_VERSION   3
+#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface"
+typedef VkFlags VkIOSSurfaceCreateFlagsMVK;
+typedef struct VkIOSSurfaceCreateInfoMVK {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkIOSSurfaceCreateFlagsMVK    flags;
+    const void*                   pView;
+} VkIOSSurfaceCreateInfoMVK;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_macos.h b/host/libs/graphics_detector/include/vulkan/vulkan_macos.h
new file mode 100644
index 0000000..8e197c7
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_macos.h
@@ -0,0 +1,47 @@
+#ifndef VULKAN_MACOS_H_
+#define VULKAN_MACOS_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_MVK_macos_surface 1
+#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3
+#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface"
+typedef VkFlags VkMacOSSurfaceCreateFlagsMVK;
+typedef struct VkMacOSSurfaceCreateInfoMVK {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkMacOSSurfaceCreateFlagsMVK    flags;
+    const void*                     pView;
+} VkMacOSSurfaceCreateInfoMVK;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_metal.h b/host/libs/graphics_detector/include/vulkan/vulkan_metal.h
new file mode 100644
index 0000000..11b9640
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_metal.h
@@ -0,0 +1,193 @@
+#ifndef VULKAN_METAL_H_
+#define VULKAN_METAL_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_metal_surface 1
+#ifdef __OBJC__
+@class CAMetalLayer;
+#else
+typedef void CAMetalLayer;
+#endif
+
+#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1
+#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface"
+typedef VkFlags VkMetalSurfaceCreateFlagsEXT;
+typedef struct VkMetalSurfaceCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkMetalSurfaceCreateFlagsEXT    flags;
+    const CAMetalLayer*             pLayer;
+} VkMetalSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_EXT_metal_objects 1
+#ifdef __OBJC__
+@protocol MTLDevice;
+typedef id<MTLDevice> MTLDevice_id;
+#else
+typedef void* MTLDevice_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLCommandQueue;
+typedef id<MTLCommandQueue> MTLCommandQueue_id;
+#else
+typedef void* MTLCommandQueue_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLBuffer;
+typedef id<MTLBuffer> MTLBuffer_id;
+#else
+typedef void* MTLBuffer_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLTexture;
+typedef id<MTLTexture> MTLTexture_id;
+#else
+typedef void* MTLTexture_id;
+#endif
+
+typedef struct __IOSurface* IOSurfaceRef;
+#ifdef __OBJC__
+@protocol MTLSharedEvent;
+typedef id<MTLSharedEvent> MTLSharedEvent_id;
+#else
+typedef void* MTLSharedEvent_id;
+#endif
+
+#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 1
+#define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects"
+
+typedef enum VkExportMetalObjectTypeFlagBitsEXT {
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT = 0x00000001,
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT = 0x00000002,
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT = 0x00000004,
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT = 0x00000008,
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT = 0x00000010,
+    VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT = 0x00000020,
+    VK_EXPORT_METAL_OBJECT_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkExportMetalObjectTypeFlagBitsEXT;
+typedef VkFlags VkExportMetalObjectTypeFlagsEXT;
+typedef struct VkExportMetalObjectCreateInfoEXT {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExportMetalObjectTypeFlagBitsEXT    exportObjectType;
+} VkExportMetalObjectCreateInfoEXT;
+
+typedef struct VkExportMetalObjectsInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+} VkExportMetalObjectsInfoEXT;
+
+typedef struct VkExportMetalDeviceInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    MTLDevice_id       mtlDevice;
+} VkExportMetalDeviceInfoEXT;
+
+typedef struct VkExportMetalCommandQueueInfoEXT {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkQueue               queue;
+    MTLCommandQueue_id    mtlCommandQueue;
+} VkExportMetalCommandQueueInfoEXT;
+
+typedef struct VkExportMetalBufferInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+    MTLBuffer_id       mtlBuffer;
+} VkExportMetalBufferInfoEXT;
+
+typedef struct VkImportMetalBufferInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    MTLBuffer_id       mtlBuffer;
+} VkImportMetalBufferInfoEXT;
+
+typedef struct VkExportMetalTextureInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImage                  image;
+    VkImageView              imageView;
+    VkBufferView             bufferView;
+    VkImageAspectFlagBits    plane;
+    MTLTexture_id            mtlTexture;
+} VkExportMetalTextureInfoEXT;
+
+typedef struct VkImportMetalTextureInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    plane;
+    MTLTexture_id            mtlTexture;
+} VkImportMetalTextureInfoEXT;
+
+typedef struct VkExportMetalIOSurfaceInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    IOSurfaceRef       ioSurface;
+} VkExportMetalIOSurfaceInfoEXT;
+
+typedef struct VkImportMetalIOSurfaceInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    IOSurfaceRef       ioSurface;
+} VkImportMetalIOSurfaceInfoEXT;
+
+typedef struct VkExportMetalSharedEventInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkSemaphore          semaphore;
+    VkEvent              event;
+    MTLSharedEvent_id    mtlSharedEvent;
+} VkExportMetalSharedEventInfoEXT;
+
+typedef struct VkImportMetalSharedEventInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    MTLSharedEvent_id    mtlSharedEvent;
+} VkImportMetalSharedEventInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkExportMetalObjectsEXT)(VkDevice device, VkExportMetalObjectsInfoEXT* pMetalObjectsInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT(
+    VkDevice                                    device,
+    VkExportMetalObjectsInfoEXT*                pMetalObjectsInfo);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_raii.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_raii.hpp
new file mode 100644
index 0000000..fef9fa4
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_raii.hpp
@@ -0,0 +1,20566 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_RAII_HPP
+#define VULKAN_RAII_HPP
+
+#include <memory>
+#include <utility>  // std::exchange, std::forward
+#include <vulkan/vulkan.hpp>
+
+#include <android-base/expected.h>
+#include <android-base/logging.h>
+
+#if !defined( VULKAN_HPP_RAII_NAMESPACE )
+#  define VULKAN_HPP_RAII_NAMESPACE raii
+#endif
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+namespace VULKAN_HPP_NAMESPACE
+{
+  namespace VULKAN_HPP_RAII_NAMESPACE
+  {
+    template <class T, class U = T>
+    VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue )
+    {
+#  if ( 14 <= VULKAN_HPP_CPP_VERSION )
+      return std::exchange<T>( obj, std::forward<U>( newValue ) );
+#  else
+      T oldValue = std::move( obj );
+      obj        = std::forward<U>( newValue );
+      return oldValue;
+#  endif
+    }
+
+
+    class ContextDispatcher : public DispatchLoaderBase
+    {
+    public:
+      ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr )
+        : vkGetInstanceProcAddr( getProcAddr )
+  //=== VK_VERSION_1_0 ===
+, vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ), vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ), vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) )
+  //=== VK_VERSION_1_1 ===
+, vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) )
+      {}
+
+    public:
+      PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+
+  //=== VK_VERSION_1_0 ===
+      PFN_vkCreateInstance vkCreateInstance = 0;
+      PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+      PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+
+  //=== VK_VERSION_1_1 ===
+      PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+
+    };
+
+    class InstanceDispatcher : public DispatchLoaderBase
+    {
+    public:
+      InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance )
+        : vkGetInstanceProcAddr( getProcAddr )
+      {
+
+  //=== VK_VERSION_1_0 ===
+        vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+        vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+        vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+        vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+        vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+        vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+        vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+        vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+        vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+        vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+        vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+        vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+
+  //=== VK_VERSION_1_1 ===
+        vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+        vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+        vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+        vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+        vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+        vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+        vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+        vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+        vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+        vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+        vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+
+  //=== VK_VERSION_1_3 ===
+        vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
+
+  //=== VK_EXT_acquire_drm_display ===
+        vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
+        vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+        vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+        vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_calibrated_timestamps ===
+        vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+
+  //=== VK_EXT_debug_report ===
+        vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+        vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+        vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+
+  //=== VK_EXT_debug_utils ===
+        vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+        vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+        vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+
+  //=== VK_EXT_direct_mode_display ===
+        vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+        vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+        vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+        vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+        vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+        vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+        vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_sample_locations ===
+        vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+
+  //=== VK_EXT_tooling_info ===
+        vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+        if ( !vkGetPhysicalDeviceToolProperties ) vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+        vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+        vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+        vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_KHR_device_group ===
+        vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+
+  //=== VK_KHR_device_group_creation ===
+        vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+        if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
+
+  //=== VK_KHR_display ===
+        vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+        vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+        vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+        vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+        vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+        vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+        vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+
+  //=== VK_KHR_external_fence_capabilities ===
+        vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+        if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
+
+  //=== VK_KHR_external_memory_capabilities ===
+        vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+        if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+        vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+        if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+  //=== VK_KHR_fragment_shading_rate ===
+        vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+
+  //=== VK_KHR_get_display_properties2 ===
+        vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+        vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+        vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+        vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+        vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+        if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
+        vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
+        vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
+        vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
+        vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+        vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
+        vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+        if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+        vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+        vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+
+  //=== VK_KHR_performance_query ===
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+        vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+
+  //=== VK_KHR_surface ===
+        vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
+        vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+        vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+        vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+        vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+
+  //=== VK_KHR_video_queue ===
+        vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
+        vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+        vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+        vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+        vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+        vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+        vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
+        vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+        vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+        vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+        vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+        vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+        vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+        vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
+        vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_NV_cooperative_matrix ===
+        vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+
+  //=== VK_NV_coverage_reduction_mode ===
+        vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+
+  //=== VK_NV_external_memory_capabilities ===
+        vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+
+  //=== VK_NV_optical_flow ===
+        vkGetPhysicalDeviceOpticalFlowImageFormatsNV = PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+        vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
+        vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+        vkGetDeviceProcAddr =
+          PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      }
+
+    public:
+
+  //=== VK_VERSION_1_0 ===
+      PFN_vkDestroyInstance vkDestroyInstance = 0;
+      PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
+      PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
+      PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
+      PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
+      PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
+      PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
+      PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
+      PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+      PFN_vkCreateDevice vkCreateDevice = 0;
+      PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+      PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+      PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
+
+  //=== VK_VERSION_1_1 ===
+      PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+      PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
+      PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+      PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+      PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
+      PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
+      PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
+      PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+      PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
+      PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+      PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
+
+  //=== VK_VERSION_1_3 ===
+      PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0;
+
+  //=== VK_EXT_acquire_drm_display ===
+      PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0;
+      PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0;
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+      PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+      PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
+#else 
+      PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0;
+      PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_calibrated_timestamps ===
+      PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
+
+  //=== VK_EXT_debug_report ===
+      PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+      PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+      PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+
+  //=== VK_EXT_debug_utils ===
+      PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+      PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+      PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+
+  //=== VK_EXT_direct_mode_display ===
+      PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+      PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
+      PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
+#else 
+      PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+      PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
+#else 
+      PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+      PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+      PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#else 
+      PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_sample_locations ===
+      PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
+
+  //=== VK_EXT_tooling_info ===
+      PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+      PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
+#else 
+      PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+      PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#else 
+      PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+      PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#else 
+      PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_KHR_device_group ===
+      PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
+
+  //=== VK_KHR_device_group_creation ===
+      PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+
+  //=== VK_KHR_display ===
+      PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
+      PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
+      PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
+      PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+      PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+      PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+      PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
+
+  //=== VK_KHR_external_fence_capabilities ===
+      PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+
+  //=== VK_KHR_external_memory_capabilities ===
+      PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+      PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+
+  //=== VK_KHR_fragment_shading_rate ===
+      PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
+
+  //=== VK_KHR_get_display_properties2 ===
+      PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
+      PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
+      PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+      PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+      PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+      PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+      PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
+      PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
+
+  //=== VK_KHR_performance_query ===
+      PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+      PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+
+  //=== VK_KHR_surface ===
+      PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
+      PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+      PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
+      PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+      PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
+
+  //=== VK_KHR_video_queue ===
+      PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
+      PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+      PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+      PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
+#else 
+      PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+      PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+      PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
+#else 
+      PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+      PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+      PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
+#else 
+      PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+      PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+      PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
+#else 
+      PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+      PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#else 
+      PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+      PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#else 
+      PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+      PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#else 
+      PFN_dummy vkCreateViSurfaceNN_placeholder = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+      PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
+      PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
+#else 
+      PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0;
+      PFN_dummy vkGetWinrtDisplayNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_NV_cooperative_matrix ===
+      PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+
+  //=== VK_NV_coverage_reduction_mode ===
+      PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+
+  //=== VK_NV_external_memory_capabilities ===
+      PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+
+  //=== VK_NV_optical_flow ===
+      PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+      PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
+      PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
+#else 
+      PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0;
+      PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+      PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+    };
+
+    class DeviceDispatcher : public DispatchLoaderBase
+    {
+    public:
+      DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr )
+      {
+
+  //=== VK_VERSION_1_0 ===
+        vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+        vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+        vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+        vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+        vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+        vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+        vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+        vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+        vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+        vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+        vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+        vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+        vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+        vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+        vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+        vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+        vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+        vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+        vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+        vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+        vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+        vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+        vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+        vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+        vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+        vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+        vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+        vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+        vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+        vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+        vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+        vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+        vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+        vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+        vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+        vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+        vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+        vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+        vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+        vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+        vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+        vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+        vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+        vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+        vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+        vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+        vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+        vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+        vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+        vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+        vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+        vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+        vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+        vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+        vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+        vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+        vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+        vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+        vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+        vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+        vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+        vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+        vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+        vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+        vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+        vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+        vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+        vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+        vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+        vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+        vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+        vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+        vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+        vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+        vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+        vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+        vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+        vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+        vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+        vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+        vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+        vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+        vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+        vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+        vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+        vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+        vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+        vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+        vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+        vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+        vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+        vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+        vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+        vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+        vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+        vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+        vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+        vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+        vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+        vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+        vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+        vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+        vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+        vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+        vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+        vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+        vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+        vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+        vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+        vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+        vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+        vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+        vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+        vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+        vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+        vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+        vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+        vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+        vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+        vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+        vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+
+  //=== VK_VERSION_1_1 ===
+        vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+        vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+        vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+        vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+        vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+        vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+        vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+        vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+        vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+        vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+        vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+        vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+        vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+        vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+        vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+        vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+
+  //=== VK_VERSION_1_2 ===
+        vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+        vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+        vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+        vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+        vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+        vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+        vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+        vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+        vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+        vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+        vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+        vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+        vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+
+  //=== VK_VERSION_1_3 ===
+        vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
+        vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
+        vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
+        vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
+        vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
+        vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
+        vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
+        vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
+        vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
+        vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
+        vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
+        vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
+        vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
+        vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
+        vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
+        vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
+        vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
+        vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
+        vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
+        vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
+        vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
+        vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
+        vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
+        vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
+        vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
+        vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
+        vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
+        vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
+        vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
+        vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
+        vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
+        vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
+        vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
+        vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
+        vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
+        vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
+
+  //=== VK_AMD_buffer_marker ===
+        vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+
+  //=== VK_AMD_display_native_hdr ===
+        vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+
+  //=== VK_AMD_draw_indirect_count ===
+        vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+        if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+        vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+        if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+
+  //=== VK_AMD_shader_info ===
+        vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+        vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+        vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_buffer_device_address ===
+        vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+        if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+
+  //=== VK_EXT_calibrated_timestamps ===
+        vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+
+  //=== VK_EXT_color_write_enable ===
+        vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+
+  //=== VK_EXT_conditional_rendering ===
+        vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+        vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+
+  //=== VK_EXT_debug_marker ===
+        vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+        vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+        vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+        vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+        vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+
+  //=== VK_EXT_debug_utils ===
+        vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+        vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+        vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+        vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+        vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+        vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+        vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+        vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+
+  //=== VK_EXT_descriptor_buffer ===
+        vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) );
+        vkGetDescriptorSetLayoutBindingOffsetEXT = PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
+        vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) );
+        vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) );
+        vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
+        vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
+        vkGetBufferOpaqueCaptureDescriptorDataEXT = PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
+        vkGetImageOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
+        vkGetImageViewOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
+        vkGetSamplerOpaqueCaptureDescriptorDataEXT = PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
+        vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
+
+  //=== VK_EXT_device_fault ===
+        vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
+  //=== VK_EXT_discard_rectangles ===
+        vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+
+  //=== VK_EXT_display_control ===
+        vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+        vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+        vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+        vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+
+  //=== VK_EXT_extended_dynamic_state ===
+        vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
+        if ( !vkCmdSetCullMode ) vkCmdSetCullMode = vkCmdSetCullModeEXT;
+        vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
+        if ( !vkCmdSetFrontFace ) vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
+        vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+        if ( !vkCmdSetPrimitiveTopology ) vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
+        vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+        if ( !vkCmdSetViewportWithCount ) vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
+        vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+        if ( !vkCmdSetScissorWithCount ) vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
+        vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+        if ( !vkCmdBindVertexBuffers2 ) vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
+        vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+        if ( !vkCmdSetDepthTestEnable ) vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
+        vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+        if ( !vkCmdSetDepthWriteEnable ) vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
+        vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+        if ( !vkCmdSetDepthCompareOp ) vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
+        vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+        if ( !vkCmdSetDepthBoundsTestEnable ) vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
+        vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+        if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
+        vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
+        if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+        vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
+        vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+        if ( !vkCmdSetRasterizerDiscardEnable ) vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
+        vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
+        if ( !vkCmdSetDepthBiasEnable ) vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
+        vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
+        vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+        if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+        vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+        vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+        vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+        vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+        vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+        vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+        vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+        vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+        vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+        vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+        vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+        vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+        vkCmdSetConservativeRasterizationModeEXT = PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+        vkCmdSetExtraPrimitiveOverestimationSizeEXT = PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+        vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+        vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+        vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+        vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+        vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+        vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+        vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+        vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+        vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+        vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+        vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+        vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+        vkCmdSetCoverageModulationTableEnableNV = PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+        vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+        vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+        vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+        vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
+  //=== VK_EXT_external_memory_host ===
+        vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+        vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+        vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+        vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_hdr_metadata ===
+        vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
+
+  //=== VK_EXT_host_query_reset ===
+        vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+        if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+
+  //=== VK_EXT_image_compression_control ===
+        vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
+  //=== VK_EXT_image_drm_format_modifier ===
+        vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+
+  //=== VK_EXT_line_rasterization ===
+        vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+
+  //=== VK_EXT_mesh_shader ===
+        vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
+        vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
+        vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+        vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_multi_draw ===
+        vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
+        vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+
+  //=== VK_EXT_opacity_micromap ===
+        vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+        vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+        vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+        vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+        vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+        vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+        vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+        vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+        vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+        vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+        vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+        vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+        vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+        vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+        vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+
+  //=== VK_EXT_pipeline_properties ===
+        vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
+
+  //=== VK_EXT_private_data ===
+        vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+        if ( !vkCreatePrivateDataSlot ) vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
+        vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+        if ( !vkDestroyPrivateDataSlot ) vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
+        vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
+        if ( !vkSetPrivateData ) vkSetPrivateData = vkSetPrivateDataEXT;
+        vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
+        if ( !vkGetPrivateData ) vkGetPrivateData = vkGetPrivateDataEXT;
+
+  //=== VK_EXT_sample_locations ===
+        vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+
+  //=== VK_EXT_shader_module_identifier ===
+        vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
+        vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+        vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) );
+
+  //=== VK_EXT_transform_feedback ===
+        vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+        vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+        vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+        vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+        vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+        vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+
+  //=== VK_EXT_validation_cache ===
+        vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+        vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+        vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+        vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+        vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+        vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
+        vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+        vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+        vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
+        vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+        vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
+        vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+        vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+        vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_GOOGLE_display_timing ===
+        vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+        vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+        vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) );
+        vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) );
+
+  //=== VK_HUAWEI_invocation_mask ===
+        vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
+
+  //=== VK_HUAWEI_subpass_shading ===
+        vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+        vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
+
+  //=== VK_INTEL_performance_query ===
+        vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+        vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+        vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+        vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+        vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+        vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+        vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+        vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+        vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+
+  //=== VK_KHR_acceleration_structure ===
+        vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+        vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+        vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
+        vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+        vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
+        vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+        vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+        vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+        vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+        vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+        vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+        vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+        vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+        vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+        vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+        vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
+
+  //=== VK_KHR_bind_memory2 ===
+        vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+        if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+        vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+        if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+
+  //=== VK_KHR_buffer_device_address ===
+        vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+        if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+        vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+        if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+        vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+        if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+
+  //=== VK_KHR_copy_commands2 ===
+        vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
+        if ( !vkCmdCopyBuffer2 ) vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
+        vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
+        if ( !vkCmdCopyImage2 ) vkCmdCopyImage2 = vkCmdCopyImage2KHR;
+        vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+        if ( !vkCmdCopyBufferToImage2 ) vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
+        vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+        if ( !vkCmdCopyImageToBuffer2 ) vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
+        vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
+        if ( !vkCmdBlitImage2 ) vkCmdBlitImage2 = vkCmdBlitImage2KHR;
+        vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
+        if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+
+  //=== VK_KHR_create_renderpass2 ===
+        vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+        if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+        vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+        if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+        vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+        if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+        vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+        if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+
+  //=== VK_KHR_deferred_host_operations ===
+        vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+        vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+        vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+        vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+        vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+
+  //=== VK_KHR_descriptor_update_template ===
+        vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+        if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+        vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+        if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+        vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+        if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+        vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+
+  //=== VK_KHR_device_group ===
+        vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+        if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+        vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+        if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+        vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+        if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+        vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+        vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+        vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+
+  //=== VK_KHR_display_swapchain ===
+        vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+
+  //=== VK_KHR_draw_indirect_count ===
+        vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+        if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+        vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+        if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+
+  //=== VK_KHR_dynamic_rendering ===
+        vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) );
+        if ( !vkCmdBeginRendering ) vkCmdBeginRendering = vkCmdBeginRenderingKHR;
+        vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) );
+        if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR;
+
+  //=== VK_KHR_external_fence_fd ===
+        vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
+        vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+        vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+        vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+        vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+        vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+        vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+        vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+        vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
+        vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+        vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+        vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+        vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+
+  //=== VK_KHR_get_memory_requirements2 ===
+        vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+        if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+        vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+        if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+        vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+        if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+
+  //=== VK_KHR_maintenance1 ===
+        vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+        if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+
+  //=== VK_KHR_maintenance3 ===
+        vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+        if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+
+  //=== VK_KHR_maintenance4 ===
+        vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+        if ( !vkGetDeviceBufferMemoryRequirements ) vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
+        vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+        if ( !vkGetDeviceImageMemoryRequirements ) vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
+        vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+        if ( !vkGetDeviceImageSparseMemoryRequirements ) vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+  //=== VK_KHR_performance_query ===
+        vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+        vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+        vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+        vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+        vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+
+  //=== VK_KHR_present_wait ===
+        vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
+
+  //=== VK_KHR_push_descriptor ===
+        vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+        vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+        vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+        vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+        vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+        vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+        vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+        vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+        vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+        vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+        if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+        vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+        if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+
+  //=== VK_KHR_shared_presentable_image ===
+        vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+
+  //=== VK_KHR_swapchain ===
+        vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+        vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+        vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+        vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+        vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+
+  //=== VK_KHR_synchronization2 ===
+        vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
+        if ( !vkCmdSetEvent2 ) vkCmdSetEvent2 = vkCmdSetEvent2KHR;
+        vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
+        if ( !vkCmdResetEvent2 ) vkCmdResetEvent2 = vkCmdResetEvent2KHR;
+        vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
+        if ( !vkCmdWaitEvents2 ) vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
+        vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
+        if ( !vkCmdPipelineBarrier2 ) vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
+        vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
+        if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
+        vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
+        if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR;
+        vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
+        vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
+
+  //=== VK_KHR_timeline_semaphore ===
+        vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+        if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+        vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
+        if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+        vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
+        if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+
+  //=== VK_KHR_video_decode_queue ===
+        vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+        vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_queue ===
+        vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
+        vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
+        vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+        vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
+        vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
+        vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
+        vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
+        vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
+        vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
+        vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
+
+  //=== VK_NVX_binary_import ===
+        vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
+        vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
+        vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
+        vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
+        vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
+
+  //=== VK_NVX_image_view_handle ===
+        vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+        vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+
+  //=== VK_NV_clip_space_w_scaling ===
+        vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+
+  //=== VK_NV_copy_memory_indirect ===
+        vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) );
+        vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+        vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+        vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+
+  //=== VK_NV_device_generated_commands ===
+        vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+        vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+        vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+        vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+        vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+        vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+
+  //=== VK_NV_external_memory_rdma ===
+        vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+        vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+        vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+  //=== VK_NV_memory_decompression ===
+        vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
+        vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
+
+  //=== VK_NV_mesh_shader ===
+        vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+        vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+        vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+  //=== VK_NV_optical_flow ===
+        vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+        vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+        vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+        vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
+  //=== VK_NV_ray_tracing ===
+        vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+        vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+        vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+        vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+        vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+        vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+        vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+        vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+        vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+        if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+        vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+        vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+        vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+
+  //=== VK_NV_scissor_exclusive ===
+        vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+
+  //=== VK_NV_shading_rate_image ===
+        vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+        vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+        vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+
+  //=== VK_QCOM_tile_properties ===
+        vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
+        vkGetDynamicRenderingTilePropertiesQCOM = PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+        vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+        vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+      }
+
+    public:
+
+  //=== VK_VERSION_1_0 ===
+      PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+      PFN_vkDestroyDevice vkDestroyDevice = 0;
+      PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
+      PFN_vkQueueSubmit vkQueueSubmit = 0;
+      PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+      PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
+      PFN_vkAllocateMemory vkAllocateMemory = 0;
+      PFN_vkFreeMemory vkFreeMemory = 0;
+      PFN_vkMapMemory vkMapMemory = 0;
+      PFN_vkUnmapMemory vkUnmapMemory = 0;
+      PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
+      PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+      PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+      PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+      PFN_vkBindImageMemory vkBindImageMemory = 0;
+      PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
+      PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
+      PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
+      PFN_vkQueueBindSparse vkQueueBindSparse = 0;
+      PFN_vkCreateFence vkCreateFence = 0;
+      PFN_vkDestroyFence vkDestroyFence = 0;
+      PFN_vkResetFences vkResetFences = 0;
+      PFN_vkGetFenceStatus vkGetFenceStatus = 0;
+      PFN_vkWaitForFences vkWaitForFences = 0;
+      PFN_vkCreateSemaphore vkCreateSemaphore = 0;
+      PFN_vkDestroySemaphore vkDestroySemaphore = 0;
+      PFN_vkCreateEvent vkCreateEvent = 0;
+      PFN_vkDestroyEvent vkDestroyEvent = 0;
+      PFN_vkGetEventStatus vkGetEventStatus = 0;
+      PFN_vkSetEvent vkSetEvent = 0;
+      PFN_vkResetEvent vkResetEvent = 0;
+      PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+      PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
+      PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+      PFN_vkCreateBuffer vkCreateBuffer = 0;
+      PFN_vkDestroyBuffer vkDestroyBuffer = 0;
+      PFN_vkCreateBufferView vkCreateBufferView = 0;
+      PFN_vkDestroyBufferView vkDestroyBufferView = 0;
+      PFN_vkCreateImage vkCreateImage = 0;
+      PFN_vkDestroyImage vkDestroyImage = 0;
+      PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+      PFN_vkCreateImageView vkCreateImageView = 0;
+      PFN_vkDestroyImageView vkDestroyImageView = 0;
+      PFN_vkCreateShaderModule vkCreateShaderModule = 0;
+      PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+      PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
+      PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
+      PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+      PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+      PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+      PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+      PFN_vkDestroyPipeline vkDestroyPipeline = 0;
+      PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+      PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+      PFN_vkCreateSampler vkCreateSampler = 0;
+      PFN_vkDestroySampler vkDestroySampler = 0;
+      PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
+      PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
+      PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
+      PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
+      PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+      PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+      PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
+      PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+      PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
+      PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
+      PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+      PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
+      PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+      PFN_vkCreateCommandPool vkCreateCommandPool = 0;
+      PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+      PFN_vkResetCommandPool vkResetCommandPool = 0;
+      PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+      PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
+      PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+      PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+      PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+      PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+      PFN_vkCmdSetViewport vkCmdSetViewport = 0;
+      PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+      PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+      PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
+      PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
+      PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
+      PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+      PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
+      PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+      PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
+      PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
+      PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+      PFN_vkCmdDraw vkCmdDraw = 0;
+      PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
+      PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
+      PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+      PFN_vkCmdDispatch vkCmdDispatch = 0;
+      PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
+      PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+      PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+      PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+      PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+      PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+      PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
+      PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
+      PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
+      PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+      PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
+      PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+      PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+      PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+      PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+      PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+      PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
+      PFN_vkCmdEndQuery vkCmdEndQuery = 0;
+      PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
+      PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+      PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
+      PFN_vkCmdPushConstants vkCmdPushConstants = 0;
+      PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+      PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+      PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+      PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+
+  //=== VK_VERSION_1_1 ===
+      PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+      PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+      PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+      PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+      PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
+      PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
+      PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+      PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
+      PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+      PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+      PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
+      PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
+      PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+      PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
+      PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+      PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+
+  //=== VK_VERSION_1_2 ===
+      PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
+      PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
+      PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
+      PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
+      PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
+      PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
+      PFN_vkResetQueryPool vkResetQueryPool = 0;
+      PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
+      PFN_vkWaitSemaphores vkWaitSemaphores = 0;
+      PFN_vkSignalSemaphore vkSignalSemaphore = 0;
+      PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
+      PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
+      PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
+
+  //=== VK_VERSION_1_3 ===
+      PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0;
+      PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0;
+      PFN_vkSetPrivateData vkSetPrivateData = 0;
+      PFN_vkGetPrivateData vkGetPrivateData = 0;
+      PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0;
+      PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0;
+      PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0;
+      PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0;
+      PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0;
+      PFN_vkQueueSubmit2 vkQueueSubmit2 = 0;
+      PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0;
+      PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0;
+      PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0;
+      PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0;
+      PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0;
+      PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0;
+      PFN_vkCmdBeginRendering vkCmdBeginRendering = 0;
+      PFN_vkCmdEndRendering vkCmdEndRendering = 0;
+      PFN_vkCmdSetCullMode vkCmdSetCullMode = 0;
+      PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0;
+      PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0;
+      PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0;
+      PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0;
+      PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0;
+      PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0;
+      PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0;
+      PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0;
+      PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0;
+      PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0;
+      PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0;
+      PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0;
+      PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0;
+      PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0;
+      PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0;
+      PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0;
+      PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0;
+
+  //=== VK_AMD_buffer_marker ===
+      PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
+
+  //=== VK_AMD_display_native_hdr ===
+      PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+
+  //=== VK_AMD_draw_indirect_count ===
+      PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
+      PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
+
+  //=== VK_AMD_shader_info ===
+      PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+      PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
+      PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
+#else 
+      PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0;
+      PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_buffer_device_address ===
+      PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+
+  //=== VK_EXT_calibrated_timestamps ===
+      PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
+
+  //=== VK_EXT_color_write_enable ===
+      PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
+
+  //=== VK_EXT_conditional_rendering ===
+      PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
+      PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
+
+  //=== VK_EXT_debug_marker ===
+      PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+      PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
+      PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
+      PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
+      PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
+
+  //=== VK_EXT_debug_utils ===
+      PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+      PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+      PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
+      PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
+      PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
+      PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
+      PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
+      PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+
+  //=== VK_EXT_descriptor_buffer ===
+      PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0;
+      PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0;
+      PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0;
+      PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0;
+      PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0;
+      PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0;
+      PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0;
+      PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0;
+      PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0;
+      PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0;
+      PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0;
+
+  //=== VK_EXT_device_fault ===
+      PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
+  //=== VK_EXT_discard_rectangles ===
+      PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
+
+  //=== VK_EXT_display_control ===
+      PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+      PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+      PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+      PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state ===
+      PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
+      PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
+      PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
+      PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
+      PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
+      PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
+      PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
+      PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
+      PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
+      PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
+      PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
+      PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+      PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
+      PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
+      PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0;
+      PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
+      PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+      PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+      PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+      PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+      PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+      PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+      PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+      PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+      PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+      PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+      PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+      PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+      PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+      PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+      PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+      PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+      PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+      PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+      PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+      PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+      PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+      PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+      PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+      PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+      PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+      PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+      PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+      PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+      PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+      PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+      PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+      PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
+  //=== VK_EXT_external_memory_host ===
+      PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+      PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+      PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
+#else 
+      PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0;
+      PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0;
+      PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_hdr_metadata ===
+      PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+
+  //=== VK_EXT_host_query_reset ===
+      PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+
+  //=== VK_EXT_image_compression_control ===
+      PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+      PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
+
+  //=== VK_EXT_line_rasterization ===
+      PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
+
+  //=== VK_EXT_mesh_shader ===
+      PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
+      PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
+      PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+      PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
+#else 
+      PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_multi_draw ===
+      PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
+      PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+
+  //=== VK_EXT_opacity_micromap ===
+      PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+      PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+      PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+      PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+      PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+      PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+      PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+      PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+      PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+      PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+      PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+      PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+      PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+      PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
+  //=== VK_EXT_pageable_device_local_memory ===
+      PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
+
+  //=== VK_EXT_pipeline_properties ===
+      PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
+
+  //=== VK_EXT_private_data ===
+      PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
+      PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
+      PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
+      PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
+
+  //=== VK_EXT_sample_locations ===
+      PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+
+  //=== VK_EXT_shader_module_identifier ===
+      PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
+      PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+      PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0;
+
+  //=== VK_EXT_transform_feedback ===
+      PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
+      PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
+      PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
+      PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
+      PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
+      PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
+
+  //=== VK_EXT_validation_cache ===
+      PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+      PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
+      PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
+      PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+      PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+      PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0;
+      PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0;
+      PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0;
+      PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0;
+      PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0;
+#else 
+      PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0;
+      PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0;
+      PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0;
+      PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0;
+      PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+      PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
+      PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
+#else 
+      PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0;
+      PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+      PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
+      PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
+#else 
+      PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+      PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_GOOGLE_display_timing ===
+      PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+      PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+      PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0;
+      PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0;
+
+  //=== VK_HUAWEI_invocation_mask ===
+      PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0;
+
+  //=== VK_HUAWEI_subpass_shading ===
+      PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0;
+      PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0;
+
+  //=== VK_INTEL_performance_query ===
+      PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
+      PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
+      PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
+      PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+      PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
+      PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+      PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+      PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
+      PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
+
+  //=== VK_KHR_acceleration_structure ===
+      PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
+      PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
+      PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
+      PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
+      PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
+      PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
+      PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
+      PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
+      PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
+      PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
+      PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
+      PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
+      PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
+      PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
+      PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
+      PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
+
+  //=== VK_KHR_bind_memory2 ===
+      PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+      PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+
+  //=== VK_KHR_buffer_device_address ===
+      PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+      PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+      PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+
+  //=== VK_KHR_copy_commands2 ===
+      PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
+      PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
+      PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
+      PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
+      PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
+      PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
+
+  //=== VK_KHR_create_renderpass2 ===
+      PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+      PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+      PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+      PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+
+  //=== VK_KHR_deferred_host_operations ===
+      PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
+      PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
+      PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
+      PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
+      PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
+
+  //=== VK_KHR_descriptor_update_template ===
+      PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+      PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+      PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+      PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
+
+  //=== VK_KHR_device_group ===
+      PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+      PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+      PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+      PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+      PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
+      PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+
+  //=== VK_KHR_display_swapchain ===
+      PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+
+  //=== VK_KHR_draw_indirect_count ===
+      PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+      PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+
+  //=== VK_KHR_dynamic_rendering ===
+      PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0;
+      PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0;
+
+  //=== VK_KHR_external_fence_fd ===
+      PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+      PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+      PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+      PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
+#else 
+      PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0;
+      PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+      PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
+      PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+      PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
+      PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
+#else 
+      PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0;
+      PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+      PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+      PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+      PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+      PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#else 
+      PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0;
+      PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+      PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
+
+  //=== VK_KHR_get_memory_requirements2 ===
+      PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+      PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+      PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+
+  //=== VK_KHR_maintenance1 ===
+      PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+
+  //=== VK_KHR_maintenance3 ===
+      PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
+
+  //=== VK_KHR_maintenance4 ===
+      PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0;
+      PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
+      PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
+
+  //=== VK_KHR_performance_query ===
+      PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+      PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+      PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+      PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+      PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+
+  //=== VK_KHR_present_wait ===
+      PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0;
+
+  //=== VK_KHR_push_descriptor ===
+      PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+      PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+      PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
+      PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
+      PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
+      PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
+      PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
+      PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
+      PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+      PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+      PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+
+  //=== VK_KHR_shared_presentable_image ===
+      PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+
+  //=== VK_KHR_swapchain ===
+      PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
+      PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
+      PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+      PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+      PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+
+  //=== VK_KHR_synchronization2 ===
+      PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
+      PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
+      PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
+      PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
+      PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
+      PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
+      PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
+      PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
+
+  //=== VK_KHR_timeline_semaphore ===
+      PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+      PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+      PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+
+  //=== VK_KHR_video_decode_queue ===
+      PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+      PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
+#else 
+      PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_queue ===
+      PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
+      PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
+      PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
+      PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
+      PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
+      PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
+      PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
+      PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
+      PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
+      PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
+
+  //=== VK_NVX_binary_import ===
+      PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0;
+      PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0;
+      PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0;
+      PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0;
+      PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0;
+
+  //=== VK_NVX_image_view_handle ===
+      PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
+      PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
+
+  //=== VK_NV_clip_space_w_scaling ===
+      PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+
+  //=== VK_NV_copy_memory_indirect ===
+      PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0;
+      PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+      PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
+      PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
+
+  //=== VK_NV_device_generated_commands ===
+      PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
+      PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
+      PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
+      PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
+      PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
+      PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
+
+  //=== VK_NV_external_memory_rdma ===
+      PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+      PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
+#else 
+      PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+      PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
+
+  //=== VK_NV_memory_decompression ===
+      PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
+      PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
+
+  //=== VK_NV_mesh_shader ===
+      PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
+      PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
+      PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+
+  //=== VK_NV_optical_flow ===
+      PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+      PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+      PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+      PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
+  //=== VK_NV_ray_tracing ===
+      PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
+      PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+      PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
+      PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+      PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
+      PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+      PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
+      PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
+      PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+      PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+      PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+      PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+
+  //=== VK_NV_scissor_exclusive ===
+      PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+
+  //=== VK_NV_shading_rate_image ===
+      PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
+      PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
+      PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+
+  //=== VK_QCOM_tile_properties ===
+      PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
+      PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+      PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
+      PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
+
+    };
+
+
+  //========================================
+  //=== RAII HANDLE forward declarations ===
+  //========================================
+
+
+  //=== VK_VERSION_1_0 ===
+  class Instance;
+  class PhysicalDevice;
+  class Device;
+  class Queue;
+  class DeviceMemory;
+  class Fence;
+  class Semaphore;
+  class Event;
+  class QueryPool;
+  class Buffer;
+  class BufferView;
+  class Image;
+  class ImageView;
+  class ShaderModule;
+  class PipelineCache;
+  class Pipeline;
+  class PipelineLayout;
+  class Sampler;
+  class DescriptorPool;
+  class DescriptorSet;
+  class DescriptorSetLayout;
+  class Framebuffer;
+  class RenderPass;
+  class CommandPool;
+  class CommandBuffer;
+
+  //=== VK_VERSION_1_1 ===
+  class SamplerYcbcrConversion;
+  class DescriptorUpdateTemplate;
+
+  //=== VK_VERSION_1_3 ===
+  class PrivateDataSlot;
+
+  //=== VK_KHR_surface ===
+  class SurfaceKHR;
+
+  //=== VK_KHR_swapchain ===
+  class SwapchainKHR;
+
+  //=== VK_KHR_display ===
+  class DisplayKHR;
+  class DisplayModeKHR;
+
+  //=== VK_EXT_debug_report ===
+  class DebugReportCallbackEXT;
+
+  //=== VK_KHR_video_queue ===
+  class VideoSessionKHR;
+  class VideoSessionParametersKHR;
+
+  //=== VK_NVX_binary_import ===
+  class CuModuleNVX;
+  class CuFunctionNVX;
+
+  //=== VK_EXT_debug_utils ===
+  class DebugUtilsMessengerEXT;
+
+  //=== VK_KHR_acceleration_structure ===
+  class AccelerationStructureKHR;
+
+  //=== VK_EXT_validation_cache ===
+  class ValidationCacheEXT;
+
+  //=== VK_NV_ray_tracing ===
+  class AccelerationStructureNV;
+
+  //=== VK_INTEL_performance_query ===
+  class PerformanceConfigurationINTEL;
+
+  //=== VK_KHR_deferred_host_operations ===
+  class DeferredOperationKHR;
+
+  //=== VK_NV_device_generated_commands ===
+  class IndirectCommandsLayoutNV;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  class BufferCollectionFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+  class MicromapEXT;
+
+  //=== VK_NV_optical_flow ===
+  class OpticalFlowSessionNV;
+
+
+  //====================
+  //=== RAII HANDLES ===
+  //====================
+
+
+    class Context
+    {
+    public:
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      Context()
+        : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher(
+            m_dynamicLoader.getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" ) ) )
+#else
+      Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr )
+        : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) )
+#endif
+      {}
+
+      ~Context() = default;
+
+      Context( Context const & ) = delete;
+      Context( Context && rhs ) VULKAN_HPP_NOEXCEPT
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+        : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) )
+        , m_dispatcher( rhs.m_dispatcher.release() )
+#else
+        : m_dispatcher( rhs.m_dispatcher.release() )
+#endif
+      {}
+      Context & operator=( Context const & ) = delete;
+      Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT
+      {
+        if ( this != &rhs )
+        {
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+          m_dynamicLoader = std::move( rhs.m_dynamicLoader );
+#endif
+          m_dispatcher.reset( rhs.m_dispatcher.release() );
+        }
+        return *this;
+      }
+
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const
+      {
+        VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+        return &*m_dispatcher;
+      }
+
+      void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs )
+      {
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+        std::swap( m_dynamicLoader, rhs.m_dynamicLoader );
+#endif
+        m_dispatcher.swap( rhs.m_dispatcher );
+      }
+
+
+  //=== VK_VERSION_1_0 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateInstanceLayerProperties(  ) const ;
+
+  //=== VK_VERSION_1_1 ===
+
+    
+    VULKAN_HPP_NODISCARD  uint32_t enumerateInstanceVersion(  ) const ;
+
+
+    private:
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      VULKAN_HPP_NAMESPACE::DynamicLoader                                                 m_dynamicLoader;
+#endif
+      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher> m_dispatcher;
+    };
+
+
+  class Instance
+  {
+  public:
+    using CType = VkInstance;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
+
+  public:
+
+
+    static android::base::expected<Instance, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkInstance instance;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( context.getDispatcher()->vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance(context, instance, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( context.getDispatcher()->vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkInstance*>( &instance ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateInstance" );
+      }
+        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, static_cast<VkInstance>( m_instance ) ) );
+    }
+#endif
+
+    Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, VkInstance instance, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+    {
+        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, static_cast<VkInstance>( m_instance ) ) );}
+
+
+    Instance( std::nullptr_t ) {}
+
+    ~Instance()
+    {
+      clear();
+    }
+
+    Instance() = delete;
+      Instance( Instance const & ) = delete;
+    Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( rhs.m_dispatcher.release() )
+    {}
+      Instance & operator=( Instance const & ) = delete;
+    Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher.reset( rhs.m_dispatcher.release() );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_instance )
+        {
+          getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_instance = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Instance release()
+    {
+
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_instance, nullptr );
+    }
+
+
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return &*m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_instance, rhs.m_instance );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> enumeratePhysicalDevices(  ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  PFN_vkVoidFunction getProcAddr( const std::string & name ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_1 ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroups(  ) const ;
+
+  //=== VK_KHR_display ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_device_group_creation ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroupsKHR(  ) const ;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_headless_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Instance m_instance = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher> m_dispatcher;
+  };
+
+  class PhysicalDevice
+  {
+  public:
+    using CType = VkPhysicalDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
+
+  public:
+
+
+    PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice )
+      : m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() )
+    {}
+
+
+    PhysicalDevice( std::nullptr_t ) {}
+
+    ~PhysicalDevice()
+    {
+      clear();
+    }
+
+    PhysicalDevice() = delete;
+      PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {}
+    PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      PhysicalDevice & operator=( PhysicalDevice const & rhs )
+      {
+        m_physicalDevice = rhs.m_physicalDevice;
+        m_dispatcher    = rhs.m_dispatcher;
+        return *this;
+      }
+    PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+
+          m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+
+        m_physicalDevice = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevice release()
+    {
+
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_physicalDevice, nullptr );
+    }
+
+
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_physicalDevice, rhs.m_physicalDevice );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ImageFormatProperties getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> getQueueFamilyProperties(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(  ) const  VULKAN_HPP_NOEXCEPT;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateDeviceLayerProperties(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const ;
+
+  //=== VK_VERSION_1_1 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getFeatures2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getProperties2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const ;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2(  ) const ;
+
+    template <typename StructureChain>
+    VULKAN_HPP_NODISCARD  std::vector<StructureChain> getQueueFamilyProperties2(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getMemoryProperties2(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_3 ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolProperties(  ) const ;
+
+  //=== VK_KHR_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_KHR_swapchain ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::Rect2D> getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const ;
+
+  //=== VK_KHR_display ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> getDisplayPropertiesKHR(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> getDisplayPlanePropertiesKHR(  ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_video_queue ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const ;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const ;
+
+  //=== VK_NV_external_memory_capabilities ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getFeatures2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const ;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2KHR(  ) const ;
+
+    template <typename StructureChain>
+    VULKAN_HPP_NODISCARD  std::vector<StructureChain> getQueueFamilyProperties2KHR(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getMemoryProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const ;
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+    
+     void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const ;
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_performance_query ===
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  uint32_t getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+
+    template <typename StructureChain>
+    VULKAN_HPP_NODISCARD  std::vector<StructureChain> getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+
+  //=== VK_KHR_get_display_properties2 ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> getDisplayProperties2KHR(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> getDisplayPlaneProperties2KHR(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const ;
+
+  //=== VK_EXT_sample_locations ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> getCalibrateableTimeDomainsEXT(  ) const ;
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> getFragmentShadingRatesKHR(  ) const ;
+
+  //=== VK_EXT_tooling_info ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolPropertiesEXT(  ) const ;
+
+  //=== VK_NV_cooperative_matrix ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> getCooperativeMatrixPropertiesNV(  ) const ;
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> getSupportedFramebufferMixedSamplesCombinationsNV(  ) const ;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_acquire_drm_display ===
+
+    
+     void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getWinrtDisplayNV( uint32_t deviceRelativeId ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_NV_optical_flow ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class PhysicalDevices : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>
+  {
+  public:
+    
+
+    static android::base::expected<PhysicalDevices, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher();
+      std::vector<VkPhysicalDevice> physicalDevices;
+      uint32_t physicalDeviceCount;
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, nullptr ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
+        {
+          physicalDevices.resize( physicalDeviceCount );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, physicalDevices.data() ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+      PhysicalDevices ret(nullptr);
+      ret.reserve( physicalDeviceCount );
+      for ( auto const & physicalDevice : physicalDevices )
+      {
+        ret.emplace_back( instance, physicalDevice );
+      }
+      return std::move(ret);
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance )
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher();
+      std::vector<VkPhysicalDevice> physicalDevices;
+      uint32_t physicalDeviceCount;
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, nullptr ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
+        {
+          physicalDevices.resize( physicalDeviceCount );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, physicalDevices.data() ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+        this->reserve( physicalDeviceCount );
+        for ( auto const & physicalDevice : physicalDevices )
+        {
+          this->emplace_back( instance, physicalDevice );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkEnumeratePhysicalDevices" );
+      }
+    }
+#endif
+
+    PhysicalDevices( std::nullptr_t ) {}
+
+    PhysicalDevices() = delete;
+    PhysicalDevices( PhysicalDevices const & ) = delete;
+    PhysicalDevices( PhysicalDevices && rhs ) = default;
+    PhysicalDevices & operator=( PhysicalDevices const & ) = delete;
+    PhysicalDevices & operator=( PhysicalDevices && rhs ) = default;
+  };
+
+
+  class Device
+  {
+  public:
+    using CType = VkDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
+
+  public:
+
+
+    static android::base::expected<Device, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDevice device;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkCreateDevice( static_cast<VkPhysicalDevice>( *physicalDevice ), reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device(physicalDevice, device, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkCreateDevice( static_cast<VkPhysicalDevice>( *physicalDevice ), reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDevice*>( &device ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDevice" );
+      }
+        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast<VkDevice>( m_device ) ) );
+    }
+#endif
+
+    Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDevice device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+    {
+        m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast<VkDevice>( m_device ) ) );}
+
+
+    Device( std::nullptr_t ) {}
+
+    ~Device()
+    {
+      clear();
+    }
+
+    Device() = delete;
+      Device( Device const & ) = delete;
+    Device( Device && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( rhs.m_dispatcher.release() )
+    {}
+      Device & operator=( Device const & ) = delete;
+    Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher.reset( rhs.m_dispatcher.release() );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_device )
+        {
+          getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Device release()
+    {
+
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_device, nullptr );
+    }
+
+
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return &*m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  PFN_vkVoidFunction getProcAddr( const std::string & name ) const  VULKAN_HPP_NOEXCEPT;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const; 
+#endif
+
+    
+     void waitIdle(  ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const ;
+
+    
+     void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createComputePipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet> allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; 
+#endif
+
+    
+     void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const  VULKAN_HPP_NOEXCEPT;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer> allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; 
+#endif
+
+  //=== VK_VERSION_1_1 ===
+
+    
+     void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const ;
+
+    
+     void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_2 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const ;
+
+    
+     void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_3 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const ;
+
+  //=== VK_KHR_swapchain ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t> acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const ;
+
+  //=== VK_KHR_display_swapchain ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR> createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_EXT_debug_marker ===
+
+    
+     void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const ;
+
+    
+     void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const ;
+
+  //=== VK_KHR_video_queue ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_NVX_binary_import ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_NVX_image_view_handle ===
+
+    
+    VULKAN_HPP_NODISCARD  uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_device_group ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+    
+    VULKAN_HPP_NODISCARD  HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+    
+    VULKAN_HPP_NODISCARD  int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const ;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+    
+     void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+    
+     void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const ;
+
+  //=== VK_KHR_descriptor_update_template ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_display_control ===
+
+    
+     void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_EXT_hdr_metadata ===
+
+    
+     void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const ;
+
+  //=== VK_KHR_create_renderpass2 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+    
+     void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+    
+     void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const ;
+
+  //=== VK_KHR_performance_query ===
+
+    
+     void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const ;
+
+    
+     void releaseProfilingLockKHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_debug_utils ===
+
+    
+     void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const ;
+
+    
+     void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const ;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const ;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  struct AHardwareBuffer * getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const ;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const ;
+
+  //=== VK_KHR_acceleration_structure ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType writeAccelerationStructuresPropertyKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_bind_memory2 ===
+
+    
+     void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const ;
+
+    
+     void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const ;
+
+  //=== VK_EXT_validation_cache ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_NV_ray_tracing ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_KHR_maintenance3 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_external_memory_host ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const ;
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<std::vector<uint64_t>, uint64_t> getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<uint64_t, uint64_t> getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const ;
+
+  //=== VK_KHR_timeline_semaphore ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const ;
+
+    
+     void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const ;
+
+  //=== VK_INTEL_performance_query ===
+
+    
+     void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const ;
+
+    
+     void uninitializePerformanceApiINTEL(  ) const  VULKAN_HPP_NOEXCEPT;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::PerformanceValueINTEL getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const ;
+
+  //=== VK_EXT_buffer_device_address ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_buffer_device_address ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_deferred_host_operations ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const ;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+    
+     void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const ;
+
+  //=== VK_NV_device_generated_commands ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_EXT_private_data ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+     void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> exportMetalObjectsEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_descriptor_buffer ===
+
+    template <typename DescriptorType>
+    VULKAN_HPP_NODISCARD  DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const ;
+
+  //=== VK_EXT_device_fault ===
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>> getFaultInfoEXT(  ) const ;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+    
+    VULKAN_HPP_NODISCARD  zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const ;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+    
+     void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  zx_handle_t getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const ;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_NV_external_memory_rdma ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::RemoteAddressNV getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const ;
+
+  //=== VK_EXT_pipeline_properties ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const ;
+
+  //=== VK_EXT_opacity_micromap ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::MicromapEXT createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_maintenance4 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const ;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_shader_module_identifier ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_optical_flow ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_QCOM_tile_properties ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher> m_dispatcher;
+  };
+
+  class AccelerationStructureKHR
+  {
+  public:
+    using CType = VkAccelerationStructureKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
+
+  public:
+
+
+    static android::base::expected<AccelerationStructureKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkAccelerationStructureKHR accelerationStructure;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR*>( &accelerationStructure ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR(device, accelerationStructure, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkAccelerationStructureKHR*>( &accelerationStructure ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateAccelerationStructureKHR" );
+      }
+    }
+#endif
+
+    AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkAccelerationStructureKHR accelerationStructure, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_accelerationStructure( accelerationStructure ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    AccelerationStructureKHR( std::nullptr_t ) {}
+
+    ~AccelerationStructureKHR()
+    {
+      clear();
+    }
+
+    AccelerationStructureKHR() = delete;
+      AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete;
+    AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete;
+    AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructure;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_accelerationStructure )
+        {
+          getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_accelerationStructure = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class AccelerationStructureNV
+  {
+  public:
+    using CType = VkAccelerationStructureNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
+
+  public:
+
+
+    static android::base::expected<AccelerationStructureNV, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkAccelerationStructureNV accelerationStructure;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV(device, accelerationStructure, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateAccelerationStructureNV" );
+      }
+    }
+#endif
+
+    AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkAccelerationStructureNV accelerationStructure, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_accelerationStructure( accelerationStructure ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    AccelerationStructureNV( std::nullptr_t ) {}
+
+    ~AccelerationStructureNV()
+    {
+      clear();
+    }
+
+    AccelerationStructureNV() = delete;
+      AccelerationStructureNV( AccelerationStructureNV const & ) = delete;
+    AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete;
+    AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructure;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_accelerationStructure )
+        {
+          getDispatcher()->vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( m_accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_accelerationStructure = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_NV_ray_tracing ===
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> getHandle( size_t dataSize ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getHandle(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Buffer
+  {
+  public:
+    using CType = VkBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
+
+  public:
+
+
+    static android::base::expected<Buffer, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkBuffer buffer;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBuffer( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer(device, buffer, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBuffer( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateBuffer" );
+      }
+    }
+#endif
+
+    Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBuffer buffer, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_buffer( buffer ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Buffer( std::nullptr_t ) {}
+
+    ~Buffer()
+    {
+      clear();
+    }
+
+    Buffer() = delete;
+      Buffer( Buffer const & ) = delete;
+    Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Buffer & operator=( Buffer const & ) = delete;
+    Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_buffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_buffer )
+        {
+          getDispatcher()->vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_buffer = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Buffer release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_buffer, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_buffer, rhs.m_buffer );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements(  ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Buffer m_buffer = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  class BufferCollectionFUCHSIA
+  {
+  public:
+    using CType = VkBufferCollectionFUCHSIA;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
+
+  public:
+
+
+    static android::base::expected<BufferCollectionFUCHSIA, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkBufferCollectionFUCHSIA collection;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferCollectionFUCHSIA*>( &collection ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA(device, collection, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkBufferCollectionFUCHSIA*>( &collection ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateBufferCollectionFUCHSIA" );
+      }
+    }
+#endif
+
+    BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBufferCollectionFUCHSIA collection, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_collection( collection ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    BufferCollectionFUCHSIA( std::nullptr_t ) {}
+
+    ~BufferCollectionFUCHSIA()
+    {
+      clear();
+    }
+
+    BufferCollectionFUCHSIA() = delete;
+      BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete;
+    BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete;
+    BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_collection = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_collection;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_collection )
+        {
+          getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( m_collection ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_collection = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_collection, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_collection, rhs.m_collection );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_FUCHSIA_buffer_collection ===
+
+    
+     void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const ;
+
+    
+     void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  class BufferView
+  {
+  public:
+    using CType = VkBufferView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+
+  public:
+
+
+    static android::base::expected<BufferView, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkBufferView bufferView;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBufferView( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView*>( &bufferView ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView(device, bufferView, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBufferView( static_cast<VkDevice>( *device ), reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkBufferView*>( &bufferView ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateBufferView" );
+      }
+    }
+#endif
+
+    BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBufferView bufferView, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_bufferView( bufferView ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    BufferView( std::nullptr_t ) {}
+
+    ~BufferView()
+    {
+      clear();
+    }
+
+    BufferView() = delete;
+      BufferView( BufferView const & ) = delete;
+    BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      BufferView & operator=( BufferView const & ) = delete;
+    BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_bufferView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_bufferView )
+        {
+          getDispatcher()->vkDestroyBufferView( static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( m_bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_bufferView = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::BufferView release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_bufferView, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_bufferView, rhs.m_bufferView );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class CommandPool
+  {
+  public:
+    using CType = VkCommandPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
+
+  public:
+
+
+    static android::base::expected<CommandPool, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkCommandPool commandPool;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCommandPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool(device, commandPool, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCommandPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateCommandPool" );
+      }
+    }
+#endif
+
+    CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandPool commandPool, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_commandPool( commandPool ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    CommandPool( std::nullptr_t ) {}
+
+    ~CommandPool()
+    {
+      clear();
+    }
+
+    CommandPool() = delete;
+      CommandPool( CommandPool const & ) = delete;
+    CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      CommandPool & operator=( CommandPool const & ) = delete;
+    CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_commandPool )
+        {
+          getDispatcher()->vkDestroyCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_commandPool = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::CommandPool release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandPool, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_commandPool, rhs.m_commandPool );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_VERSION_1_1 ===
+
+    
+     void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_maintenance1 ===
+
+    
+     void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class CommandBuffer
+  {
+  public:
+    using CType = VkCommandBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+
+  public:
+
+
+    CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
+      : m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    CommandBuffer( std::nullptr_t ) {}
+
+    ~CommandBuffer()
+    {
+      clear();
+    }
+
+    CommandBuffer() = delete;
+      CommandBuffer( CommandBuffer const & ) = delete;
+    CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ), m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      CommandBuffer & operator=( CommandBuffer const & ) = delete;
+    CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} );
+          m_commandBuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_commandBuffer )
+        {
+          getDispatcher()->vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), 1, reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
+        }
+      m_device = nullptr;
+      m_commandPool = nullptr;
+      m_commandBuffer = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::CommandBuffer release()
+    {
+
+      m_device = nullptr;
+      m_commandPool = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandBuffer, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_commandPool, rhs.m_commandPool );
+      std::swap( m_commandBuffer, rhs.m_commandBuffer );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const ;
+
+    
+     void end(  ) const ;
+
+    
+     void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+     void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setLineWidth( float lineWidth ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setBlendConstants( const float blendConstants[4] ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const ;
+
+    
+     void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename DataType>
+     void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename ValuesType>
+     void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endRenderPass(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_1 ===
+
+    
+     void setDeviceMask( uint32_t deviceMask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_2 ===
+
+    
+     void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VERSION_1_3 ===
+
+    
+     void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const ;
+
+    
+     void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endRendering(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+     void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_debug_marker ===
+
+    
+     void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void debugMarkerEndEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_video_queue ===
+
+    
+     void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_video_decode_queue ===
+
+    
+     void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_transform_feedback ===
+
+    
+     void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+     void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+     void endTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+     void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NVX_binary_import ===
+
+    
+     void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_AMD_draw_indirect_count ===
+
+    
+     void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_dynamic_rendering ===
+
+    
+     void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endRenderingKHR(  ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_device_group ===
+
+    
+     void setDeviceMaskKHR( uint32_t deviceMask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_push_descriptor ===
+
+    
+     void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename DataType>
+     void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, DataType const & data ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_conditional_rendering ===
+
+    
+     void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endConditionalRenderingEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+    
+     void setViewportWScalingNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_discard_rectangles ===
+
+    
+     void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_create_renderpass2 ===
+
+    
+     void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_debug_utils ===
+
+    
+     void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endDebugUtilsLabelEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_sample_locations ===
+
+    
+     void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_acceleration_structure ===
+
+    
+     void buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const ;
+
+    
+     void buildAccelerationStructuresIndirectKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const ;
+
+    
+     void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_shading_rate_image ===
+
+    
+     void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewportShadingRatePaletteNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_ray_tracing ===
+
+    
+     void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeAccelerationStructuresPropertiesNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_draw_indirect_count ===
+
+    
+     void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_AMD_buffer_marker ===
+
+    
+     void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_mesh_shader ===
+
+    
+     void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_scissor_exclusive ===
+
+    
+     void setExclusiveScissorNV( uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+    template <typename CheckpointMarkerType>
+     void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_INTEL_performance_query ===
+
+    
+     void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const ;
+
+    
+     void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const ;
+
+    
+     void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const ;
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+    
+     void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_line_rasterization ===
+
+    
+     void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+    
+     void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindVertexBuffers2EXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const ;
+
+    
+     void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_device_generated_commands ===
+
+    
+     void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+    
+     void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const  VULKAN_HPP_NOEXCEPT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_synchronization2 ===
+
+    
+     void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const ;
+
+    
+     void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_descriptor_buffer ===
+
+    
+     void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const ;
+
+    
+     void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+    
+     void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_mesh_shader ===
+
+    
+     void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_copy_commands2 ===
+
+    
+     void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+    
+     void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+    
+     void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+    
+     void subpassShadingHUAWEI(  ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+    
+     void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+    
+     void setPatchControlPointsEXT( uint32_t patchControlPoints ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_color_write_enable ===
+
+    
+     void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+    
+     void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_multi_draw ===
+
+    
+     void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, uint32_t instanceCount, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, uint32_t instanceCount, uint32_t firstInstance, Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_opacity_micromap ===
+
+    
+     void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+    
+     void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_copy_memory_indirect ===
+
+    
+     void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_memory_decompression ===
+
+    
+     void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+    
+     void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setColorBlendEnableEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setColorBlendEquationEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setColorWriteMaskEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRasterizationStreamEXT( uint32_t rasterizationStream ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setColorBlendAdvancedEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setViewportSwizzleNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_optical_flow ===
+
+    
+     void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
+    VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class CommandBuffers : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
+  {
+  public:
+    
+
+    static android::base::expected<CommandBuffers, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkCommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateCommandBuffers( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), commandBuffers.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffers ret(nullptr);
+        ret.reserve( allocateInfo.commandBufferCount );
+        for ( auto const & commandBuffer : commandBuffers )
+        {
+          ret.emplace_back( device, commandBuffer, static_cast<VkCommandPool>( allocateInfo.commandPool ) );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkCommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateCommandBuffers( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), commandBuffers.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        this->reserve( allocateInfo.commandBufferCount );
+        for ( auto const & commandBuffer : commandBuffers )
+        {
+          this->emplace_back( device, commandBuffer, static_cast<VkCommandPool>( allocateInfo.commandPool ) );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkAllocateCommandBuffers" );
+      }
+    }
+#endif
+
+    CommandBuffers( std::nullptr_t ) {}
+
+    CommandBuffers() = delete;
+    CommandBuffers( CommandBuffers const & ) = delete;
+    CommandBuffers( CommandBuffers && rhs ) = default;
+    CommandBuffers & operator=( CommandBuffers const & ) = delete;
+    CommandBuffers & operator=( CommandBuffers && rhs ) = default;
+  };
+
+
+  class CuFunctionNVX
+  {
+  public:
+    using CType = VkCuFunctionNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
+
+  public:
+
+
+    static android::base::expected<CuFunctionNVX, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkCuFunctionNVX function;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCuFunctionNVX( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuFunctionNVX*>( &function ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX(device, function, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCuFunctionNVX( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkCuFunctionNVX*>( &function ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateCuFunctionNVX" );
+      }
+    }
+#endif
+
+    CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCuFunctionNVX function, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_function( function ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    CuFunctionNVX( std::nullptr_t ) {}
+
+    ~CuFunctionNVX()
+    {
+      clear();
+    }
+
+    CuFunctionNVX() = delete;
+      CuFunctionNVX( CuFunctionNVX const & ) = delete;
+    CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete;
+    CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_function = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_function;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_function )
+        {
+          getDispatcher()->vkDestroyCuFunctionNVX( static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_function = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_function, rhs.m_function );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class CuModuleNVX
+  {
+  public:
+    using CType = VkCuModuleNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
+
+  public:
+
+
+    static android::base::expected<CuModuleNVX, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkCuModuleNVX module;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCuModuleNVX( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuModuleNVX*>( &module ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX(device, module, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCuModuleNVX( static_cast<VkDevice>( *device ), reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkCuModuleNVX*>( &module ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateCuModuleNVX" );
+      }
+    }
+#endif
+
+    CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCuModuleNVX module, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_module( module ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    CuModuleNVX( std::nullptr_t ) {}
+
+    ~CuModuleNVX()
+    {
+      clear();
+    }
+
+    CuModuleNVX() = delete;
+      CuModuleNVX( CuModuleNVX const & ) = delete;
+    CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      CuModuleNVX & operator=( CuModuleNVX const & ) = delete;
+    CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_module = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_module;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_module )
+        {
+          getDispatcher()->vkDestroyCuModuleNVX( static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_module = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::CuModuleNVX release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_module, rhs.m_module );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DebugReportCallbackEXT
+  {
+  public:
+    using CType = VkDebugReportCallbackEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
+
+  public:
+
+
+    static android::base::expected<DebugReportCallbackEXT, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDebugReportCallbackEXT callback;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT(instance, callback, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDebugReportCallbackEXT" );
+      }
+    }
+#endif
+
+    DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkDebugReportCallbackEXT callback, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_callback( callback ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {}
+
+
+    DebugReportCallbackEXT( std::nullptr_t ) {}
+
+    ~DebugReportCallbackEXT()
+    {
+      clear();
+    }
+
+    DebugReportCallbackEXT() = delete;
+      DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete;
+    DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ), m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete;
+    DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
+          m_callback = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_callback;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_callback )
+        {
+          getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( m_callback ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_instance = nullptr;
+      m_callback = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release()
+    {
+
+      m_instance = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_callback, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Instance getInstance() const
+    {
+      return m_instance;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_instance, rhs.m_instance );
+      std::swap( m_callback, rhs.m_callback );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Instance m_instance = {};
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DebugUtilsMessengerEXT
+  {
+  public:
+    using CType = VkDebugUtilsMessengerEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<DebugUtilsMessengerEXT, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDebugUtilsMessengerEXT messenger;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT(instance, messenger, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDebugUtilsMessengerEXT" );
+      }
+    }
+#endif
+
+    DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkDebugUtilsMessengerEXT messenger, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_messenger( messenger ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {}
+
+
+    DebugUtilsMessengerEXT( std::nullptr_t ) {}
+
+    ~DebugUtilsMessengerEXT()
+    {
+      clear();
+    }
+
+    DebugUtilsMessengerEXT() = delete;
+      DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete;
+    DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ), m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete;
+    DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
+          m_messenger = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_messenger;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_messenger )
+        {
+          getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugUtilsMessengerEXT>( m_messenger ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_instance = nullptr;
+      m_messenger = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release()
+    {
+
+      m_instance = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_messenger, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Instance getInstance() const
+    {
+      return m_instance;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_instance, rhs.m_instance );
+      std::swap( m_messenger, rhs.m_messenger );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Instance m_instance = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DeferredOperationKHR
+  {
+  public:
+    using CType = VkDeferredOperationKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<DeferredOperationKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDeferredOperationKHR operation;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR*>( &operation ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR(device, operation, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDeferredOperationKHR*>( &operation ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDeferredOperationKHR" );
+      }
+    }
+#endif
+
+    DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDeferredOperationKHR operation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_operation( operation ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DeferredOperationKHR( std::nullptr_t ) {}
+
+    ~DeferredOperationKHR()
+    {
+      clear();
+    }
+
+    DeferredOperationKHR() = delete;
+      DeferredOperationKHR( DeferredOperationKHR const & ) = delete;
+    DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete;
+    DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_operation = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_operation;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_operation )
+        {
+          getDispatcher()->vkDestroyDeferredOperationKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_operation = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_operation, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_operation, rhs.m_operation );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_deferred_host_operations ===
+
+    
+    VULKAN_HPP_NODISCARD  uint32_t getMaxConcurrency(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result getResult(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result join(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DescriptorPool
+  {
+  public:
+    using CType = VkDescriptorPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
+
+  public:
+
+
+    static android::base::expected<DescriptorPool, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDescriptorPool descriptorPool;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool(device, descriptorPool, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDescriptorPool" );
+      }
+    }
+#endif
+
+    DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_descriptorPool( descriptorPool ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DescriptorPool( std::nullptr_t ) {}
+
+    ~DescriptorPool()
+    {
+      clear();
+    }
+
+    DescriptorPool() = delete;
+      DescriptorPool( DescriptorPool const & ) = delete;
+    DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DescriptorPool & operator=( DescriptorPool const & ) = delete;
+    DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_descriptorPool )
+        {
+          getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_descriptorPool = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorPool release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorPool, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_descriptorPool, rhs.m_descriptorPool );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DescriptorSet
+  {
+  public:
+    using CType = VkDescriptorSet;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
+
+  public:
+
+
+    DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
+      : m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DescriptorSet( std::nullptr_t ) {}
+
+    ~DescriptorSet()
+    {
+      clear();
+    }
+
+    DescriptorSet() = delete;
+      DescriptorSet( DescriptorSet const & ) = delete;
+    DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ), m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DescriptorSet & operator=( DescriptorSet const & ) = delete;
+    DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} );
+          m_descriptorSet = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_descriptorSet )
+        {
+          getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), 1, reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
+        }
+      m_device = nullptr;
+      m_descriptorPool = nullptr;
+      m_descriptorSet = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorSet release()
+    {
+
+      m_device = nullptr;
+      m_descriptorPool = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSet, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_descriptorPool, rhs.m_descriptorPool );
+      std::swap( m_descriptorSet, rhs.m_descriptorSet );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_1 ===
+
+    template <typename DataType>
+     void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_descriptor_update_template ===
+
+    template <typename DataType>
+     void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+    
+    VULKAN_HPP_NODISCARD  void * getHostMappingVALVE(  ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DescriptorSets : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
+  {
+  public:
+    
+
+    static android::base::expected<DescriptorSets, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkDescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateDescriptorSets( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), descriptorSets.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSets ret(nullptr);
+        ret.reserve( allocateInfo.descriptorSetCount );
+        for ( auto const & descriptorSet : descriptorSets )
+        {
+          ret.emplace_back( device, descriptorSet, static_cast<VkDescriptorPool>( allocateInfo.descriptorPool ) );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkDescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateDescriptorSets( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), descriptorSets.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        this->reserve( allocateInfo.descriptorSetCount );
+        for ( auto const & descriptorSet : descriptorSets )
+        {
+          this->emplace_back( device, descriptorSet, static_cast<VkDescriptorPool>( allocateInfo.descriptorPool ) );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkAllocateDescriptorSets" );
+      }
+    }
+#endif
+
+    DescriptorSets( std::nullptr_t ) {}
+
+    DescriptorSets() = delete;
+    DescriptorSets( DescriptorSets const & ) = delete;
+    DescriptorSets( DescriptorSets && rhs ) = default;
+    DescriptorSets & operator=( DescriptorSets const & ) = delete;
+    DescriptorSets & operator=( DescriptorSets && rhs ) = default;
+  };
+
+
+  class DescriptorSetLayout
+  {
+  public:
+    using CType = VkDescriptorSetLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+
+  public:
+
+
+    static android::base::expected<DescriptorSetLayout, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDescriptorSetLayout descriptorSetLayout;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &descriptorSetLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout(device, descriptorSetLayout, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDescriptorSetLayout*>( &descriptorSetLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDescriptorSetLayout" );
+      }
+    }
+#endif
+
+    DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSetLayout descriptorSetLayout, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_descriptorSetLayout( descriptorSetLayout ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DescriptorSetLayout( std::nullptr_t ) {}
+
+    ~DescriptorSetLayout()
+    {
+      clear();
+    }
+
+    DescriptorSetLayout() = delete;
+      DescriptorSetLayout( DescriptorSetLayout const & ) = delete;
+    DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete;
+    DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_descriptorSetLayout )
+        {
+          getDispatcher()->vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_descriptorSetLayout = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSetLayout, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_EXT_descriptor_buffer ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DescriptorUpdateTemplate
+  {
+  public:
+    using CType = VkDescriptorUpdateTemplate;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
+
+  public:
+
+
+    static android::base::expected<DescriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDescriptorUpdateTemplate descriptorUpdateTemplate;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate(device, descriptorUpdateTemplate, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDescriptorUpdateTemplate" );
+      }
+    }
+#endif
+
+    DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_descriptorUpdateTemplate( descriptorUpdateTemplate ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DescriptorUpdateTemplate( std::nullptr_t ) {}
+
+    ~DescriptorUpdateTemplate()
+    {
+      clear();
+    }
+
+    DescriptorUpdateTemplate() = delete;
+      DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete;
+    DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete;
+    DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_descriptorUpdateTemplate )
+        {
+          getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_descriptorUpdateTemplate = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DeviceMemory
+  {
+  public:
+    using CType = VkDeviceMemory;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
+
+  public:
+
+
+    static android::base::expected<DeviceMemory, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDeviceMemory memory;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAllocateMemory( static_cast<VkDevice>( *device ), reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory(device, memory, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAllocateMemory( static_cast<VkDevice>( *device ), reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkAllocateMemory" );
+      }
+    }
+#endif
+
+    DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDeviceMemory memory, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_memory( memory ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    DeviceMemory( std::nullptr_t ) {}
+
+    ~DeviceMemory()
+    {
+      clear();
+    }
+
+    DeviceMemory() = delete;
+      DeviceMemory( DeviceMemory const & ) = delete;
+    DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DeviceMemory & operator=( DeviceMemory const & ) = delete;
+    DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_memory = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_memory;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_memory )
+        {
+          getDispatcher()->vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_memory = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DeviceMemory release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_memory, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_memory, rhs.m_memory );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+     void unmapMemory(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceSize getCommitment(  ) const  VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+    
+    VULKAN_HPP_NODISCARD  HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+    
+     void setPriorityEXT( float priority ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DisplayKHR
+  {
+  public:
+    using CType = VkDisplayKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
+
+  public:
+
+
+    static android::base::expected<DisplayKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId ) 
+    {
+      VkDisplayKHR display;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( *physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR(physicalDevice, display);
+    }
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+
+    static android::base::expected<DisplayKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput ) 
+    {
+      VkDisplayKHR display;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( *physicalDevice ), &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR(physicalDevice, display);
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+
+    static android::base::expected<DisplayKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId ) 
+    {
+      VkDisplayKHR display;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( *physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR(physicalDevice, display);
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId )
+      : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( *physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkGetDrmDisplayEXT" );
+      }
+    }
+#endif
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput )
+      : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( *physicalDevice ), &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkGetRandROutputDisplayEXT" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId )
+      : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( *physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkGetWinrtDisplayNV" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
+      : m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
+    {}
+
+
+    DisplayKHR( std::nullptr_t ) {}
+
+    ~DisplayKHR()
+    {
+      clear();
+    }
+
+    DisplayKHR() = delete;
+      DisplayKHR( DisplayKHR const & ) = delete;
+    DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ), m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DisplayKHR & operator=( DisplayKHR const & ) = delete;
+    DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} );
+          m_display = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_display;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_display )
+        {
+          getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
+        }
+      m_physicalDevice = nullptr;
+      m_display = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DisplayKHR release()
+    {
+
+      m_physicalDevice = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_display, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const
+    {
+      return m_physicalDevice;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_physicalDevice, rhs.m_physicalDevice );
+      std::swap( m_display, rhs.m_display );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_display ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> getModeProperties(  ) const ;
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const; 
+#endif
+
+  //=== VK_KHR_get_display_properties2 ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> getModeProperties2(  ) const ;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+    
+     void acquireWinrtNV(  ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
+    VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class DisplayKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
+  {
+  public:
+    
+
+    static android::base::expected<DisplayKHRs, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher();
+      std::vector<VkDisplayKHR> displays;
+      uint32_t displayCount;
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, nullptr ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
+        {
+          displays.resize( displayCount );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, displays.data() ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+      DisplayKHRs ret(nullptr);
+      ret.reserve( displayCount );
+      for ( auto const & displayKHR : displays )
+      {
+        ret.emplace_back( physicalDevice, displayKHR );
+      }
+      return std::move(ret);
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex )
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher();
+      std::vector<VkDisplayKHR> displays;
+      uint32_t displayCount;
+      VULKAN_HPP_NAMESPACE::Result result;
+      do
+      {
+        result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, nullptr ) );
+        if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
+        {
+          displays.resize( displayCount );
+          result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, displays.data() ) );
+        }
+      } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+        this->reserve( displayCount );
+        for ( auto const & displayKHR : displays )
+        {
+          this->emplace_back( physicalDevice, displayKHR );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" );
+      }
+    }
+#endif
+
+    DisplayKHRs( std::nullptr_t ) {}
+
+    DisplayKHRs() = delete;
+    DisplayKHRs( DisplayKHRs const & ) = delete;
+    DisplayKHRs( DisplayKHRs && rhs ) = default;
+    DisplayKHRs & operator=( DisplayKHRs const & ) = delete;
+    DisplayKHRs & operator=( DisplayKHRs && rhs ) = default;
+  };
+
+
+  class DisplayModeKHR
+  {
+  public:
+    using CType = VkDisplayModeKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
+
+  public:
+
+
+    static android::base::expected<DisplayModeKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkDisplayModeKHR displayModeKHR;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( display.getDispatcher()->vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( display.getPhysicalDevice() ), static_cast<VkDisplayKHR>( *display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &displayModeKHR ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR(display, displayModeKHR);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( display.getDispatcher()->vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( display.getPhysicalDevice() ), static_cast<VkDisplayKHR>( *display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &displayModeKHR ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDisplayModeKHR" );
+      }
+    }
+#endif
+
+    DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR )
+      : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() )
+    {}
+
+
+    DisplayModeKHR( std::nullptr_t ) {}
+
+    ~DisplayModeKHR()
+    {
+      clear();
+    }
+
+    DisplayModeKHR() = delete;
+      DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {}
+    DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ), m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      DisplayModeKHR & operator=( DisplayModeKHR const & rhs )
+      {
+        m_displayModeKHR = rhs.m_displayModeKHR;
+        m_dispatcher    = rhs.m_dispatcher;
+        return *this;
+      }
+    DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+
+          m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} );
+          m_displayModeKHR = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+
+        m_physicalDevice = nullptr;
+        m_displayModeKHR = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR release()
+    {
+
+        m_physicalDevice = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_displayModeKHR, nullptr );
+    }
+
+
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_physicalDevice, rhs.m_physicalDevice );
+      std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_display ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Event
+  {
+  public:
+    using CType = VkEvent;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
+
+  public:
+
+
+    static android::base::expected<Event, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkEvent event;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateEvent( static_cast<VkDevice>( *device ), reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event(device, event, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateEvent( static_cast<VkDevice>( *device ), reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkEvent*>( &event ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateEvent" );
+      }
+    }
+#endif
+
+    Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkEvent event, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_event( event ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Event( std::nullptr_t ) {}
+
+    ~Event()
+    {
+      clear();
+    }
+
+    Event() = delete;
+      Event( Event const & ) = delete;
+    Event( Event && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Event & operator=( Event const & ) = delete;
+    Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_event = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_event )
+        {
+          getDispatcher()->vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_event = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Event release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_event, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_event, rhs.m_event );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result getStatus(  ) const ;
+
+    
+     void set(  ) const ;
+
+    
+     void reset(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Event m_event = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Fence
+  {
+  public:
+    using CType = VkFence;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
+
+  public:
+
+
+    static android::base::expected<Fence, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkFence fence;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFence( static_cast<VkDevice>( *device ), reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence(device, fence, allocator);
+    }
+
+
+    static android::base::expected<Fence, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkFence fence;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence(device, fence, allocator);
+    }
+
+
+    static android::base::expected<Fence, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkFence fence;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast<VkDevice>( *device ), static_cast<VkDisplayKHR>( *display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence(device, fence, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFence( static_cast<VkDevice>( *device ), reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateFence" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkRegisterDeviceEventEXT" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast<VkDevice>( *device ), static_cast<VkDisplayKHR>( *display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkRegisterDisplayEventEXT" );
+      }
+    }
+#endif
+
+    Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkFence fence, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_fence( fence ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Fence( std::nullptr_t ) {}
+
+    ~Fence()
+    {
+      clear();
+    }
+
+    Fence() = delete;
+      Fence( Fence const & ) = delete;
+    Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Fence & operator=( Fence const & ) = delete;
+    Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_fence = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_fence )
+        {
+          getDispatcher()->vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_fence = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Fence release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_fence, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_fence, rhs.m_fence );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result getStatus(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Fence m_fence = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Framebuffer
+  {
+  public:
+    using CType = VkFramebuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
+
+  public:
+
+
+    static android::base::expected<Framebuffer, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkFramebuffer framebuffer;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFramebuffer( static_cast<VkDevice>( *device ), reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer(device, framebuffer, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFramebuffer( static_cast<VkDevice>( *device ), reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateFramebuffer" );
+      }
+    }
+#endif
+
+    Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkFramebuffer framebuffer, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_framebuffer( framebuffer ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Framebuffer( std::nullptr_t ) {}
+
+    ~Framebuffer()
+    {
+      clear();
+    }
+
+    Framebuffer() = delete;
+      Framebuffer( Framebuffer const & ) = delete;
+    Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Framebuffer & operator=( Framebuffer const & ) = delete;
+    Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_framebuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_framebuffer )
+        {
+          getDispatcher()->vkDestroyFramebuffer( static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_framebuffer = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Framebuffer release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_framebuffer, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_framebuffer, rhs.m_framebuffer );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_QCOM_tile_properties ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> getTilePropertiesQCOM(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Image
+  {
+  public:
+    using CType = VkImage;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
+
+  public:
+
+
+    static android::base::expected<Image, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkImage image;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImage( static_cast<VkDevice>( *device ), reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image(device, image, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImage( static_cast<VkDevice>( *device ), reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkImage*>( &image ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateImage" );
+      }
+    }
+#endif
+
+    Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkImage image, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_image( image ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Image( std::nullptr_t ) {}
+
+    ~Image()
+    {
+      clear();
+    }
+
+    Image() = delete;
+      Image( Image const & ) = delete;
+    Image( Image && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Image & operator=( Image const & ) = delete;
+    Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_image = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_image )
+        {
+          getDispatcher()->vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_image = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Image release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_image, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_image, rhs.m_image );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> getSparseMemoryRequirements(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::SubresourceLayout getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT(  ) const ;
+
+  //=== VK_EXT_image_compression_control ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const  VULKAN_HPP_NOEXCEPT;
+
+    template <typename X, typename Y, typename... Z>
+    VULKAN_HPP_NODISCARD  StructureChain<X, Y, Z...> getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Image m_image = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class ImageView
+  {
+  public:
+    using CType = VkImageView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
+
+  public:
+
+
+    static android::base::expected<ImageView, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkImageView imageView;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImageView( static_cast<VkDevice>( *device ), reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView*>( &imageView ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView(device, imageView, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImageView( static_cast<VkDevice>( *device ), reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkImageView*>( &imageView ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateImageView" );
+      }
+    }
+#endif
+
+    ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkImageView imageView, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_imageView( imageView ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    ImageView( std::nullptr_t ) {}
+
+    ~ImageView()
+    {
+      clear();
+    }
+
+    ImageView() = delete;
+      ImageView( ImageView const & ) = delete;
+    ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      ImageView & operator=( ImageView const & ) = delete;
+    ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_imageView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_imageView )
+        {
+          getDispatcher()->vkDestroyImageView( static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_imageView = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::ImageView release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_imageView, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_imageView, rhs.m_imageView );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_NVX_image_view_handle ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::ImageView m_imageView = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class IndirectCommandsLayoutNV
+  {
+  public:
+    using CType = VkIndirectCommandsLayoutNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<IndirectCommandsLayoutNV, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkIndirectCommandsLayoutNV indirectCommandsLayout;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV*>( &indirectCommandsLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV(device, indirectCommandsLayout, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkIndirectCommandsLayoutNV*>( &indirectCommandsLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateIndirectCommandsLayoutNV" );
+      }
+    }
+#endif
+
+    IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkIndirectCommandsLayoutNV indirectCommandsLayout, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_indirectCommandsLayout( indirectCommandsLayout ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    IndirectCommandsLayoutNV( std::nullptr_t ) {}
+
+    ~IndirectCommandsLayoutNV()
+    {
+      clear();
+    }
+
+    IndirectCommandsLayoutNV() = delete;
+      IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete;
+    IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete;
+    IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_indirectCommandsLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayout;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_indirectCommandsLayout )
+        {
+          getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_indirectCommandsLayout = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class MicromapEXT
+  {
+  public:
+    using CType = VkMicromapEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<MicromapEXT, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkMicromapEXT micromap;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateMicromapEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkMicromapEXT*>( &micromap ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT(device, micromap, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateMicromapEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkMicromapEXT*>( &micromap ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateMicromapEXT" );
+      }
+    }
+#endif
+
+    MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkMicromapEXT micromap, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_micromap( micromap ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    MicromapEXT( std::nullptr_t ) {}
+
+    ~MicromapEXT()
+    {
+      clear();
+    }
+
+    MicromapEXT() = delete;
+      MicromapEXT( MicromapEXT const & ) = delete;
+    MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      MicromapEXT & operator=( MicromapEXT const & ) = delete;
+    MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_micromap = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_micromap;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_micromap )
+        {
+          getDispatcher()->vkDestroyMicromapEXT( static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( m_micromap ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_micromap = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::MicromapEXT release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_micromap, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_micromap, rhs.m_micromap );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class OpticalFlowSessionNV
+  {
+  public:
+    using CType = VkOpticalFlowSessionNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<OpticalFlowSessionNV, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkOpticalFlowSessionNV session;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkOpticalFlowSessionNV*>( &session ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV(device, session, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( *device ), reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkOpticalFlowSessionNV*>( &session ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateOpticalFlowSessionNV" );
+      }
+    }
+#endif
+
+    OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkOpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_session( session ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    OpticalFlowSessionNV( std::nullptr_t ) {}
+
+    ~OpticalFlowSessionNV()
+    {
+      clear();
+    }
+
+    OpticalFlowSessionNV() = delete;
+      OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete;
+    OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete;
+    OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_session = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_session;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_session )
+        {
+          getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( m_session ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_session = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_session, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_session, rhs.m_session );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_NV_optical_flow ===
+
+    
+     void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class PerformanceConfigurationINTEL
+  {
+  public:
+    using CType = VkPerformanceConfigurationINTEL;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<PerformanceConfigurationINTEL, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) 
+    {
+      VkPerformanceConfigurationINTEL configuration;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( &configuration ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL(device, configuration);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo )
+      : m_device( *device ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( &configuration ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" );
+      }
+    }
+#endif
+
+    PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
+      : m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    PerformanceConfigurationINTEL( std::nullptr_t ) {}
+
+    ~PerformanceConfigurationINTEL()
+    {
+      clear();
+    }
+
+    PerformanceConfigurationINTEL() = delete;
+      PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete;
+    PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete;
+    PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_configuration = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_configuration;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_configuration )
+        {
+          getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
+        }
+      m_device = nullptr;
+      m_configuration = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release()
+    {
+
+      m_device = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_configuration, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_configuration, rhs.m_configuration );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class PipelineCache
+  {
+  public:
+    using CType = VkPipelineCache;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
+
+  public:
+
+
+    static android::base::expected<PipelineCache, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipelineCache pipelineCache;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineCache( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache(device, pipelineCache, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineCache( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreatePipelineCache" );
+      }
+    }
+#endif
+
+    PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_pipelineCache( pipelineCache ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    PipelineCache( std::nullptr_t ) {}
+
+    ~PipelineCache()
+    {
+      clear();
+    }
+
+    PipelineCache() = delete;
+      PipelineCache( PipelineCache const & ) = delete;
+    PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      PipelineCache & operator=( PipelineCache const & ) = delete;
+    PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_pipelineCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_pipelineCache )
+        {
+          getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_pipelineCache = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineCache release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineCache, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_pipelineCache, rhs.m_pipelineCache );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<uint8_t> getData(  ) const ;
+
+    
+     void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Pipeline
+  {
+  public:
+    using CType = VkPipeline;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+
+  public:
+
+
+    static android::base::expected<Pipeline, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipeline pipeline;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateComputePipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+      if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        return android::base::unexpected(result);
+      }
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline(device, pipeline, allocator);
+    }
+
+
+    static android::base::expected<Pipeline, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipeline pipeline;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+      if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        return android::base::unexpected(result);
+      }
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline(device, pipeline, allocator);
+    }
+
+
+    static android::base::expected<Pipeline, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipeline pipeline;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ), deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0, pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+      if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )&& ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )&& ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        return android::base::unexpected(result);
+      }
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline(device, pipeline, allocator);
+    }
+
+
+    static android::base::expected<Pipeline, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipeline pipeline;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+      if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        return android::base::unexpected(result);
+      }
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline(device, pipeline, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateComputePipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipeline*>( &m_pipeline ) ) );
+      if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipeline*>( &m_pipeline ) ) );
+      if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ), deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0, pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipeline*>( &m_pipeline ) ) );
+      if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipeline*>( &m_pipeline ) ) );
+      if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess )&& ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" );
+      }
+    }
+#endif
+
+    Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPipeline pipeline, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr, VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      : m_device( *device ), m_pipeline( pipeline ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_constructorSuccessCode( successCode ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Pipeline( std::nullptr_t ) {}
+
+    ~Pipeline()
+    {
+      clear();
+    }
+
+    Pipeline() = delete;
+      Pipeline( Pipeline const & ) = delete;
+    Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Pipeline & operator=( Pipeline const & ) = delete;
+    Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+          m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_pipeline )
+        {
+          getDispatcher()->vkDestroyPipeline( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_pipeline = nullptr;
+      m_allocator = nullptr;
+        m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Pipeline release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipeline, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
+    {
+      return m_constructorSuccessCode;
+    }
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_pipeline, rhs.m_pipeline );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_AMD_shader_info ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const ;
+
+  //=== VK_NV_ray_tracing ===
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const ;
+
+    
+     void compileDeferredNV( uint32_t shader ) const ;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::vector<DataType> getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::DeviceSize getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+    VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Pipelines : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>
+  {
+  public:
+    
+
+    static android::base::expected<Pipelines, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateComputePipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipelines ret(nullptr);
+        ret.reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          ret.emplace_back( device, pipeline, allocator, result );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+
+    static android::base::expected<Pipelines, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipelines ret(nullptr);
+        ret.reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          ret.emplace_back( device, pipeline, allocator, result );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+
+    static android::base::expected<Pipelines, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ), deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0, pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipelines ret(nullptr);
+        ret.reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          ret.emplace_back( device, pipeline, allocator, result );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+
+    static android::base::expected<Pipelines, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipelines ret(nullptr);
+        ret.reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          ret.emplace_back( device, pipeline, allocator, result );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateComputePipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        this->reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          this->emplace_back( device, pipeline, allocator, result );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkCreateComputePipelines" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        this->reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          this->emplace_back( device, pipeline, allocator, result );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkCreateGraphicsPipelines" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ), deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0, pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        this->reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          this->emplace_back( device, pipeline, allocator, result );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkCreateRayTracingPipelinesKHR" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkPipeline> pipelines( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ), pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0, createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), pipelines.data() ) );
+      if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+      {
+        this->reserve( createInfos.size() );
+        for ( auto const & pipeline : pipelines )
+        {
+          this->emplace_back( device, pipeline, allocator, result );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkCreateRayTracingPipelinesNV" );
+      }
+    }
+#endif
+
+    Pipelines( std::nullptr_t ) {}
+
+    Pipelines() = delete;
+    Pipelines( Pipelines const & ) = delete;
+    Pipelines( Pipelines && rhs ) = default;
+    Pipelines & operator=( Pipelines const & ) = delete;
+    Pipelines & operator=( Pipelines && rhs ) = default;
+  };
+
+
+  class PipelineLayout
+  {
+  public:
+    using CType = VkPipelineLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
+
+  public:
+
+
+    static android::base::expected<PipelineLayout, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPipelineLayout pipelineLayout;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineLayout( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout(device, pipelineLayout, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineLayout( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreatePipelineLayout" );
+      }
+    }
+#endif
+
+    PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPipelineLayout pipelineLayout, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_pipelineLayout( pipelineLayout ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    PipelineLayout( std::nullptr_t ) {}
+
+    ~PipelineLayout()
+    {
+      clear();
+    }
+
+    PipelineLayout() = delete;
+      PipelineLayout( PipelineLayout const & ) = delete;
+    PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      PipelineLayout & operator=( PipelineLayout const & ) = delete;
+    PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_pipelineLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_pipelineLayout )
+        {
+          getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( m_pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_pipelineLayout = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineLayout release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineLayout, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class PrivateDataSlot
+  {
+  public:
+    using CType = VkPrivateDataSlot;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<PrivateDataSlot, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkPrivateDataSlot privateDataSlot;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePrivateDataSlot( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot*>( &privateDataSlot ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot(device, privateDataSlot, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePrivateDataSlot( static_cast<VkDevice>( *device ), reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkPrivateDataSlot*>( &privateDataSlot ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreatePrivateDataSlot" );
+      }
+    }
+#endif
+
+    PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPrivateDataSlot privateDataSlot, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_privateDataSlot( privateDataSlot ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    PrivateDataSlot( std::nullptr_t ) {}
+
+    ~PrivateDataSlot()
+    {
+      clear();
+    }
+
+    PrivateDataSlot() = delete;
+      PrivateDataSlot( PrivateDataSlot const & ) = delete;
+    PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete;
+    PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_privateDataSlot = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlot;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_privateDataSlot )
+        {
+          getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( m_privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_privateDataSlot = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_privateDataSlot, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class QueryPool
+  {
+  public:
+    using CType = VkQueryPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
+
+  public:
+
+
+    static android::base::expected<QueryPool, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkQueryPool queryPool;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateQueryPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool(device, queryPool, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateQueryPool( static_cast<VkDevice>( *device ), reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateQueryPool" );
+      }
+    }
+#endif
+
+    QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueryPool queryPool, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_queryPool( queryPool ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    QueryPool( std::nullptr_t ) {}
+
+    ~QueryPool()
+    {
+      clear();
+    }
+
+    QueryPool() = delete;
+      QueryPool( QueryPool const & ) = delete;
+    QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      QueryPool & operator=( QueryPool const & ) = delete;
+    QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_queryPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_queryPool )
+        {
+          getDispatcher()->vkDestroyQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_queryPool = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::QueryPool release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queryPool, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_queryPool, rhs.m_queryPool );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>> getResults( uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    template <typename DataType>
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, DataType> getResult( uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_VERSION_1_2 ===
+
+    
+     void reset( uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_EXT_host_query_reset ===
+
+    
+     void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Queue
+  {
+  public:
+    using CType = VkQueue;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
+
+  public:
+
+
+    Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex )
+      : m_dispatcher( device.getDispatcher() )
+    {
+      getDispatcher()->vkGetDeviceQueue( static_cast<VkDevice>( *device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &m_queue ) );
+    }
+
+    Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo )
+      : m_dispatcher( device.getDispatcher() )
+    {
+      getDispatcher()->vkGetDeviceQueue2( static_cast<VkDevice>( *device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue*>( &m_queue ) );
+    }
+
+    Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue )
+      : m_queue( queue ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Queue( std::nullptr_t ) {}
+
+    ~Queue()
+    {
+      clear();
+    }
+
+    Queue() = delete;
+      Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {}
+    Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Queue & operator=( Queue const & rhs )
+      {
+        m_queue = rhs.m_queue;
+        m_dispatcher    = rhs.m_dispatcher;
+        return *this;
+      }
+    Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+
+          m_queue = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+
+        m_queue = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Queue release()
+    {
+
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queue, nullptr );
+    }
+
+
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_queue, rhs.m_queue );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+     void submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+     void waitIdle(  ) const ;
+
+    
+     void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_VERSION_1_3 ===
+
+    
+     void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_KHR_swapchain ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const ;
+
+  //=== VK_EXT_debug_utils ===
+
+    
+     void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void endDebugUtilsLabelEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+    
+     void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> getCheckpointDataNV(  ) const ;
+
+  //=== VK_INTEL_performance_query ===
+
+    
+     void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const ;
+
+  //=== VK_KHR_synchronization2 ===
+
+    
+     void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> getCheckpointData2NV(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Queue m_queue = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class RenderPass
+  {
+  public:
+    using CType = VkRenderPass;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
+
+  public:
+
+
+    static android::base::expected<RenderPass, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkRenderPass renderPass;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRenderPass( static_cast<VkDevice>( *device ), reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass(device, renderPass, allocator);
+    }
+
+
+    static android::base::expected<RenderPass, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkRenderPass renderPass;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRenderPass2( static_cast<VkDevice>( *device ), reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass(device, renderPass, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRenderPass( static_cast<VkDevice>( *device ), reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateRenderPass" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateRenderPass2( static_cast<VkDevice>( *device ), reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateRenderPass2" );
+      }
+    }
+#endif
+
+    RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkRenderPass renderPass, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_renderPass( renderPass ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    RenderPass( std::nullptr_t ) {}
+
+    ~RenderPass()
+    {
+      clear();
+    }
+
+    RenderPass() = delete;
+      RenderPass( RenderPass const & ) = delete;
+    RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      RenderPass & operator=( RenderPass const & ) = delete;
+    RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_renderPass = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_renderPass )
+        {
+          getDispatcher()->vkDestroyRenderPass( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_renderPass = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::RenderPass release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_renderPass, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_renderPass, rhs.m_renderPass );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_0 ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity(  ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Sampler
+  {
+  public:
+    using CType = VkSampler;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+
+  public:
+
+
+    static android::base::expected<Sampler, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSampler sampler;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSampler( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler(device, sampler, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSampler( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateSampler" );
+      }
+    }
+#endif
+
+    Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkSampler sampler, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_sampler( sampler ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Sampler( std::nullptr_t ) {}
+
+    ~Sampler()
+    {
+      clear();
+    }
+
+    Sampler() = delete;
+      Sampler( Sampler const & ) = delete;
+    Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Sampler & operator=( Sampler const & ) = delete;
+    Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_sampler = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_sampler )
+        {
+          getDispatcher()->vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( m_sampler ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_sampler = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Sampler release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_sampler, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_sampler, rhs.m_sampler );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Sampler m_sampler = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class SamplerYcbcrConversion
+  {
+  public:
+    using CType = VkSamplerYcbcrConversion;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
+
+  public:
+
+
+    static android::base::expected<SamplerYcbcrConversion, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSamplerYcbcrConversion ycbcrConversion;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion(device, ycbcrConversion, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateSamplerYcbcrConversion" );
+      }
+    }
+#endif
+
+    SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkSamplerYcbcrConversion ycbcrConversion, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_ycbcrConversion( ycbcrConversion ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    SamplerYcbcrConversion( std::nullptr_t ) {}
+
+    ~SamplerYcbcrConversion()
+    {
+      clear();
+    }
+
+    SamplerYcbcrConversion() = delete;
+      SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete;
+    SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete;
+    SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_ycbcrConversion = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ycbcrConversion;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_ycbcrConversion )
+        {
+          getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_ycbcrConversion = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_ycbcrConversion, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class Semaphore
+  {
+  public:
+    using CType = VkSemaphore;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
+
+  public:
+
+
+    static android::base::expected<Semaphore, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSemaphore semaphore;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSemaphore( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore(device, semaphore, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSemaphore( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateSemaphore" );
+      }
+    }
+#endif
+
+    Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkSemaphore semaphore, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_semaphore( semaphore ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    Semaphore( std::nullptr_t ) {}
+
+    ~Semaphore()
+    {
+      clear();
+    }
+
+    Semaphore() = delete;
+      Semaphore( Semaphore const & ) = delete;
+    Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      Semaphore & operator=( Semaphore const & ) = delete;
+    Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_semaphore = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_semaphore )
+        {
+          getDispatcher()->vkDestroySemaphore( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_semaphore = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::Semaphore release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_semaphore, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_semaphore, rhs.m_semaphore );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_VERSION_1_2 ===
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getCounterValue(  ) const ;
+
+  //=== VK_KHR_timeline_semaphore ===
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getCounterValueKHR(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class ShaderModule
+  {
+  public:
+    using CType = VkShaderModule;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+
+  public:
+
+
+    static android::base::expected<ShaderModule, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkShaderModule shaderModule;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateShaderModule( static_cast<VkDevice>( *device ), reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule(device, shaderModule, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateShaderModule( static_cast<VkDevice>( *device ), reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateShaderModule" );
+      }
+    }
+#endif
+
+    ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkShaderModule shaderModule, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_shaderModule( shaderModule ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    ShaderModule( std::nullptr_t ) {}
+
+    ~ShaderModule()
+    {
+      clear();
+    }
+
+    ShaderModule() = delete;
+      ShaderModule( ShaderModule const & ) = delete;
+    ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      ShaderModule & operator=( ShaderModule const & ) = delete;
+    ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_shaderModule = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_shaderModule )
+        {
+          getDispatcher()->vkDestroyShaderModule( static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_shaderModule = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::ShaderModule release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shaderModule, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_shaderModule, rhs.m_shaderModule );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_EXT_shader_module_identifier ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT(  ) const  VULKAN_HPP_NOEXCEPT;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class SurfaceKHR
+  {
+  public:
+    using CType = VkSurfaceKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
+
+  public:
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateViSurfaceNN( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+
+    static android::base::expected<SurfaceKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSurfaceKHR surface;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR(instance, surface, allocator);
+    }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateAndroidSurfaceKHR" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDirectFBSurfaceEXT" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" );
+      }
+    }
+#endif
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateHeadlessSurfaceEXT" );
+      }
+    }
+#endif
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateIOSSurfaceMVK" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateMacOSSurfaceMVK" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateMetalSurfaceEXT" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateScreenSurfaceQNX" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateViSurfaceNN( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateViSurfaceNN" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateWaylandSurfaceKHR" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateWin32SurfaceKHR" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateXcbSurfaceKHR" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast<VkInstance>( *instance ), reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateXlibSurfaceKHR" );
+      }
+    }
+#endif
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+    SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkSurfaceKHR surface, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_instance( *instance ), m_surface( surface ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( instance.getDispatcher() )
+    {}
+
+
+    SurfaceKHR( std::nullptr_t ) {}
+
+    ~SurfaceKHR()
+    {
+      clear();
+    }
+
+    SurfaceKHR() = delete;
+      SurfaceKHR( SurfaceKHR const & ) = delete;
+    SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ), m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      SurfaceKHR & operator=( SurfaceKHR const & ) = delete;
+    SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
+          m_surface = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surface;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_surface )
+        {
+          getDispatcher()->vkDestroySurfaceKHR( static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( m_surface ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_instance = nullptr;
+      m_surface = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::SurfaceKHR release()
+    {
+
+      m_instance = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_surface, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Instance getInstance() const
+    {
+      return m_instance;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_instance, rhs.m_instance );
+      std::swap( m_surface, rhs.m_surface );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Instance m_instance = {};
+    VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class SwapchainKHR
+  {
+  public:
+    using CType = VkSwapchainKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
+
+  public:
+
+
+    static android::base::expected<SwapchainKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkSwapchainKHR swapchain;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSwapchainKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR(device, swapchain, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSwapchainKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateSwapchainKHR" );
+      }
+    }
+#endif
+
+    SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkSwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_swapchain( swapchain ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    SwapchainKHR( std::nullptr_t ) {}
+
+    ~SwapchainKHR()
+    {
+      clear();
+    }
+
+    SwapchainKHR() = delete;
+      SwapchainKHR( SwapchainKHR const & ) = delete;
+    SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      SwapchainKHR & operator=( SwapchainKHR const & ) = delete;
+    SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_swapchain = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchain;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_swapchain )
+        {
+          getDispatcher()->vkDestroySwapchainKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_swapchain = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::SwapchainKHR release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_swapchain, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_swapchain, rhs.m_swapchain );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_swapchain ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::Image> getImages(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t> acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const ;
+
+  //=== VK_EXT_display_control ===
+
+    
+    VULKAN_HPP_NODISCARD  uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const ;
+
+  //=== VK_GOOGLE_display_timing ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE(  ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> getPastPresentationTimingGOOGLE(  ) const ;
+
+  //=== VK_KHR_shared_presentable_image ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result getStatus(  ) const ;
+
+  //=== VK_AMD_display_native_hdr ===
+
+    
+     void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const  VULKAN_HPP_NOEXCEPT;
+
+  //=== VK_KHR_present_wait ===
+
+    
+    VULKAN_HPP_NODISCARD  VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const ;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+    
+     void acquireFullScreenExclusiveModeEXT(  ) const ;
+
+    
+     void releaseFullScreenExclusiveModeEXT(  ) const ;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class SwapchainKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
+  {
+  public:
+    
+
+    static android::base::expected<SwapchainKHRs, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkSwapchainKHR> swapchains( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( *device ), createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), swapchains.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs ret(nullptr);
+        ret.reserve( createInfos.size() );
+        for ( auto const & swapchainKHR : swapchains )
+        {
+          ret.emplace_back( device, swapchainKHR, allocator );
+        }
+        return std::move(ret);
+      }
+      else
+      {
+        return android::base::unexpected(result);
+      }
+    }
+
+    
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+      std::vector<VkSwapchainKHR> swapchains( createInfos.size() );
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( *device ), createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), swapchains.data() ) );
+      if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        this->reserve( createInfos.size() );
+        for ( auto const & swapchainKHR : swapchains )
+        {
+          this->emplace_back( device, swapchainKHR, allocator );
+        }
+      }
+      else
+      {
+        throwResultException( result, "vkCreateSharedSwapchainsKHR" );
+      }
+    }
+#endif
+
+    SwapchainKHRs( std::nullptr_t ) {}
+
+    SwapchainKHRs() = delete;
+    SwapchainKHRs( SwapchainKHRs const & ) = delete;
+    SwapchainKHRs( SwapchainKHRs && rhs ) = default;
+    SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete;
+    SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default;
+  };
+
+
+  class ValidationCacheEXT
+  {
+  public:
+    using CType = VkValidationCacheEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
+
+  public:
+
+
+    static android::base::expected<ValidationCacheEXT, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkValidationCacheEXT validationCache;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateValidationCacheEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT(device, validationCache, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateValidationCacheEXT( static_cast<VkDevice>( *device ), reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateValidationCacheEXT" );
+      }
+    }
+#endif
+
+    ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkValidationCacheEXT validationCache, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_validationCache( validationCache ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    ValidationCacheEXT( std::nullptr_t ) {}
+
+    ~ValidationCacheEXT()
+    {
+      clear();
+    }
+
+    ValidationCacheEXT() = delete;
+      ValidationCacheEXT( ValidationCacheEXT const & ) = delete;
+    ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete;
+    ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_validationCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCache;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_validationCache )
+        {
+          getDispatcher()->vkDestroyValidationCacheEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_validationCache = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_validationCache, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_validationCache, rhs.m_validationCache );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_EXT_validation_cache ===
+
+    
+     void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const ;
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<uint8_t> getData(  ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class VideoSessionKHR
+  {
+  public:
+    using CType = VkVideoSessionKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<VideoSessionKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkVideoSessionKHR videoSession;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionKHR*>( &videoSession ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR(device, videoSession, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkVideoSessionKHR*>( &videoSession ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateVideoSessionKHR" );
+      }
+    }
+#endif
+
+    VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkVideoSessionKHR videoSession, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_videoSession( videoSession ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    VideoSessionKHR( std::nullptr_t ) {}
+
+    ~VideoSessionKHR()
+    {
+      clear();
+    }
+
+    VideoSessionKHR() = delete;
+      VideoSessionKHR( VideoSessionKHR const & ) = delete;
+    VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete;
+    VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_videoSession = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSession;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_videoSession )
+        {
+          getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_videoSession = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSession, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_videoSession, rhs.m_videoSession );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_video_queue ===
+
+    
+    VULKAN_HPP_NODISCARD  std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> getMemoryRequirements(  ) const ;
+
+    
+     void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+  class VideoSessionParametersKHR
+  {
+  public:
+    using CType = VkVideoSessionParametersKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+
+
+    static android::base::expected<VideoSessionParametersKHR, VULKAN_HPP_NAMESPACE::Result> create( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) 
+    {
+      VkVideoSessionParametersKHR videoSessionParameters;
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>(static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionParametersKHR*>( &videoSessionParameters ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        return android::base::unexpected(result);
+      }
+
+      return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR(device, videoSessionParameters, allocator);
+    }
+
+
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+    VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {
+      VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( *device ), reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ), reinterpret_cast<VkVideoSessionParametersKHR*>( &videoSessionParameters ) ) );
+      if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+      {
+        throwResultException( result, "vkCreateVideoSessionParametersKHR" );
+      }
+    }
+#endif
+
+    VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkVideoSessionParametersKHR videoSessionParameters, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+      : m_device( *device ), m_videoSessionParameters( videoSessionParameters ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
+    {}
+
+
+    VideoSessionParametersKHR( std::nullptr_t ) {}
+
+    ~VideoSessionParametersKHR()
+    {
+      clear();
+    }
+
+    VideoSessionParametersKHR() = delete;
+      VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete;
+    VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
+      : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ), m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+    {}
+      VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete;
+    VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( this != &rhs )
+      {
+          clear();
+          m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+          m_videoSessionParameters = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} );
+          m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+        m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+      }
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_videoSessionParameters;
+    }
+
+    void clear() VULKAN_HPP_NOEXCEPT
+    {
+        if ( m_videoSessionParameters )
+        {
+          getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+        }
+      m_device = nullptr;
+      m_videoSessionParameters = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+    }
+
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release()
+    {
+
+      m_device = nullptr;
+      m_allocator = nullptr;
+        m_dispatcher = nullptr;
+        return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSessionParameters, nullptr );
+    }
+
+
+    VULKAN_HPP_NAMESPACE::Device getDevice() const
+    {
+      return m_device;
+    }
+
+    VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+    {
+      VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+      return m_dispatcher;
+    }
+
+    void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+
+      std::swap( m_device, rhs.m_device );
+      std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
+      std::swap( m_allocator, rhs.m_allocator );
+      std::swap( m_dispatcher, rhs.m_dispatcher );
+    }
+
+
+  //=== VK_KHR_video_queue ===
+
+    
+     void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const ;
+
+
+  private:
+
+    
+    VULKAN_HPP_NAMESPACE::Device m_device = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {};
+    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+  };
+
+
+
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+
+
+  //=== VK_VERSION_1_0 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> Instance::enumeratePhysicalDevices(  ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+    getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+    
+    
+    return features;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+    getDispatcher()->vkGetPhysicalDeviceFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const  
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+    
+    return imageFormatProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+    getDispatcher()->vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> PhysicalDevice::getQueueFamilyProperties(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+    getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), name.c_str() );
+    
+    
+    return result;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), name.c_str() );
+    
+    
+    return result;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> Context::enumerateInstanceExtensionProperties( Optional<const std::string> layerName ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> Context::enumerateInstanceLayerProperties(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> PhysicalDevice::enumerateDeviceLayerProperties(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueSubmit( static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::waitIdle(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::waitIdle(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const  
+  {
+
+
+    
+    void * pData;
+    VkResult result = getDispatcher()->vkMapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" );
+    
+    return pData;
+  }
+
+  
+   VULKAN_HPP_INLINE void DeviceMemory::unmapMemory(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkFlushMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkInvalidateMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+    getDispatcher()->vkGetDeviceMemoryCommitment( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+    
+    
+    return committedMemoryInBytes;
+  }
+
+  
+   VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkBindBufferMemory( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkBindImageMemory( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    getDispatcher()->vkGetBufferMemoryRequirements( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    getDispatcher()->vkGetImageMemoryRequirements( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> Image::getSparseMemoryRequirements(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> properties;
+    uint32_t propertyCount;
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueBindSparse( static_cast<VkQueue>( m_queue ), bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkResetFences( static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkWaitForFences( static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+   VULKAN_HPP_INLINE void Event::set(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::set" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Event::reset(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::reset" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator );
+  }
+#endif
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>> QueryPool::getResults( uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const  
+  {
+
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, DataType> QueryPool::getResult( uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const  
+  {
+
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount, sizeof( DataType ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+    getDispatcher()->vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+    
+    
+    return layout;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<uint8_t> PipelineCache::getData(  ) const  
+  {
+
+
+    
+    std::vector<uint8_t> data;
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkMergePipelineCaches( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkResetDescriptorPool( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet> Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ), descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::Extent2D granularity;
+    getDispatcher()->vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+    
+    
+    return granularity;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkResetCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer> Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::end(  ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBindPipeline( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const  
+  {
+
+    if ( buffers.size() != offsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDrawIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDrawIndexedIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+
+  template <typename DataType>
+   VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( DataType ), reinterpret_cast<const void *>( data.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ), attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ), events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBeginQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdWriteTimestamp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    
+    
+    
+  }
+
+  template <typename ValuesType>
+   VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( ValuesType ), reinterpret_cast<const void *>( values.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_VERSION_1_1 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion(  ) const  
+  {
+
+
+    
+    uint32_t apiVersion;
+    VkResult result = getDispatcher()->vkEnumerateInstanceVersion( &apiVersion );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" );
+    
+    return apiVersion;
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkBindBufferMemory2( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkBindImageMemory2( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    
+    
+    return peerMemoryFeatures;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroups(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return physicalDeviceGroupProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    getDispatcher()->vkGetPhysicalDeviceFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const  
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return imageFormatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const  
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename StructureChain>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2(  ) const  
+  {
+
+
+    
+    std::vector<StructureChain> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
+    uint32_t propertyCount;
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkTrimCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
+  }
+#endif
+
+  template <typename DataType>
+   VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+    
+    
+    return externalBufferProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+    
+    
+    return externalFenceProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    
+    
+    return externalSemaphoreProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return structureChain;
+  }
+
+  //=== VK_VERSION_1_2 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue(  ) const  
+  {
+
+
+    
+    uint64_t value;
+    VkResult result = getDispatcher()->vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" );
+    
+    return value;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    VkDeviceAddress result = getDispatcher()->vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    uint64_t result = getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+
+  //=== VK_VERSION_1_3 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolProperties(  ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return toolProperties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkSetPrivateData( static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    uint64_t data;
+    getDispatcher()->vkGetPrivateData( static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+    
+    
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const  
+  {
+
+    if ( events.size() != dependencyInfos.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ), events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdWriteTimestamp2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence ) const  
+  {
+
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueSubmit2( static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endRendering(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const  
+  {
+
+    if ( buffers.size() != offsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()";
+    #endif
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()";
+    #endif
+  }
+    if ( !strides.empty() && buffers.size() != strides.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    
+    getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  
+  {
+
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  //=== VK_KHR_surface ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> needs extension <VK_KHR_surface> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::Bool32 supported;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+    
+    return supported;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> needs extension <VK_KHR_surface> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+    
+    return surfaceCapabilities;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> needs extension <VK_KHR_surface> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return surfaceFormats;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> needs extension <VK_KHR_surface> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return presentModes;
+  }
+
+  //=== VK_KHR_swapchain ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Image> SwapchainKHR::getImages(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Image> swapchainImages;
+    uint32_t swapchainImageCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &swapchainImageCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" );
+    VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+    if ( swapchainImageCount < swapchainImages.size() )
+    {
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return swapchainImages;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t> SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    uint32_t imageIndex;
+    VkResult result = getDispatcher()->vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function <vkQueuePresentKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && "Function <vkGetDeviceGroupPresentCapabilitiesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+    VkResult result = getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+    
+    return deviceGroupPresentCapabilities;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && "Function <vkGetDeviceGroupSurfacePresentModesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+    
+    return modes;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Rect2D> PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && "Function <vkGetPhysicalDevicePresentRectanglesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Rect2D> rects;
+    uint32_t rectCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+    VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+    if ( rectCount < rects.size() )
+    {
+      rects.resize( rectCount );
+    }
+    return rects;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> needs extension <VK_KHR_swapchain> enabled!" );
+
+    
+    uint32_t imageIndex;
+    VkResult result = getDispatcher()->vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
+  }
+
+  //=== VK_KHR_display ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> PhysicalDevice::getDisplayPropertiesKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> needs extension <VK_KHR_display> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> PhysicalDevice::getDisplayPlanePropertiesKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> needs extension <VK_KHR_display> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> DisplayKHR::getModeProperties(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> needs extension <VK_KHR_display> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> needs extension <VK_KHR_display> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+    VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayModeKHR>( m_displayModeKHR ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" );
+    
+    return capabilities;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  //=== VK_KHR_display_swapchain ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR> Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
+  }
+#endif
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> needs extension <VK_KHR_xlib_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dpy, visualID );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> needs extension <VK_KHR_xcb_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &connection, visual_id );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> needs extension <VK_KHR_wayland_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &display );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && "Function <vkGetPhysicalDeviceWin32PresentationSupportKHR> needs extension <VK_KHR_win32_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> needs extension <VK_EXT_debug_report> enabled!" );
+
+    
+    
+    getDispatcher()->vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+    
+    
+    
+  }
+
+  //=== VK_EXT_debug_marker ===
+
+  
+   VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function <vkCmdDebugMarkerEndEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_video_queue ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+    
+    return capabilities;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties;
+    uint32_t videoFormatPropertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
+      {
+        videoFormatProperties.resize( videoFormatPropertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+    VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+    if ( videoFormatPropertyCount < videoFormatProperties.size() )
+    {
+      videoFormatProperties.resize( videoFormatPropertyCount );
+    }
+    return videoFormatProperties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> VideoSessionKHR::getMemoryRequirements(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> memoryRequirements;
+    uint32_t memoryRequirementsCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), &memoryRequirementsCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
+      {
+        memoryRequirements.resize( memoryRequirementsCount );
+        result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), &memoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+    if ( memoryRequirementsCount < memoryRequirements.size() )
+    {
+      memoryRequirements.resize( memoryRequirementsCount );
+    }
+    return memoryRequirements;
+  }
+
+  
+   VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), bindSessionMemoryInfos.size(), reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_video_decode_queue ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> needs extension <VK_KHR_video_decode_queue> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_transform_feedback ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+    if ( buffers.size() != offsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()";
+    #endif
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function <vkCmdBeginQueryIndexedEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function <vkCmdEndQueryIndexedEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function <vkCmdDrawIndirectByteCountEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
+    
+    
+    
+  }
+
+  //=== VK_NVX_binary_import ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> needs extension <VK_NVX_binary_import> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_NVX_image_view_handle ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> needs extension <VK_NVX_image_view_handle> enabled!" );
+
+    
+    
+    uint32_t result = getDispatcher()->vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
+    
+    
+    return result;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> needs extension <VK_NVX_image_view_handle> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
+    VkResult result = getDispatcher()->vkGetImageViewAddressNVX( static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" );
+    
+    return properties;
+  }
+
+  //=== VK_AMD_draw_indirect_count ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && "Function <vkCmdDrawIndirectCountAMD> needs extension <VK_AMD_draw_indirect_count> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && "Function <vkCmdDrawIndexedIndirectCountAMD> needs extension <VK_AMD_draw_indirect_count> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  //=== VK_AMD_shader_info ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<uint8_t> Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> needs extension <VK_AMD_shader_info> enabled!" );
+
+    
+    std::vector<uint8_t> info;
+    size_t infoSize;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && infoSize )
+      {
+        info.resize( infoSize );
+        result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" );
+    VULKAN_HPP_ASSERT( infoSize <= info.size() );
+    if ( infoSize < info.size() )
+    {
+      info.resize( infoSize );
+    }
+    return info;
+  }
+
+  //=== VK_KHR_dynamic_rendering ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function <vkCmdEndRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> needs extension <VK_NV_external_memory_capabilities> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+    
+    return externalImageFormatProperties;
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> needs extension <VK_NV_external_memory_win32> enabled!" );
+
+    
+    HANDLE handle;
+    VkResult result = getDispatcher()->vkGetMemoryWin32HandleNV( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" );
+    
+    return handle;
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+    
+    return imageFormatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2KHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename StructureChain>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2KHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    std::vector<StructureChain> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
+    uint32_t propertyCount;
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  //=== VK_KHR_device_group ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> needs extension <VK_KHR_device_group> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    
+    
+    return peerMemoryFeatures;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function <vkCmdSetDeviceMaskKHR> needs extension <VK_KHR_device_group> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function <vkCmdDispatchBaseKHR> needs extension <VK_KHR_device_group> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+
+  
+   VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function <vkTrimCommandPoolKHR> needs extension <VK_KHR_maintenance1> enabled!" );
+
+    
+    
+    getDispatcher()->vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_device_group_creation ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroupsKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && "Function <vkEnumeratePhysicalDeviceGroupsKHR> needs extension <VK_KHR_device_group_creation> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return physicalDeviceGroupProperties;
+  }
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> needs extension <VK_KHR_external_memory_capabilities> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+    
+    
+    return externalBufferProperties;
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE HANDLE Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
+
+    
+    HANDLE handle;
+    VkResult result = getDispatcher()->vkGetMemoryWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
+    
+    return handle;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+    VkResult result = getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
+    
+    return memoryWin32HandleProperties;
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> needs extension <VK_KHR_external_memory_fd> enabled!" );
+
+    
+    int fd;
+    VkResult result = getDispatcher()->vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
+    
+    return fd;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> needs extension <VK_KHR_external_memory_fd> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
+    VkResult result = getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+    
+    return memoryFdProperties;
+  }
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> needs extension <VK_KHR_external_semaphore_capabilities> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    
+    
+    return externalSemaphoreProperties;
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+  
+   VULKAN_HPP_INLINE void Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkImportSemaphoreWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE HANDLE Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
+
+    
+    HANDLE handle;
+    VkResult result = getDispatcher()->vkGetSemaphoreWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+    
+    return handle;
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+  
+   VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> needs extension <VK_KHR_external_semaphore_fd> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> needs extension <VK_KHR_external_semaphore_fd> enabled!" );
+
+    
+    int fd;
+    VkResult result = getDispatcher()->vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+    
+    return fd;
+  }
+
+  //=== VK_KHR_push_descriptor ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> needs extension <VK_KHR_push_descriptor> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
+    
+    
+    
+  }
+
+  template <typename DataType>
+   VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, DataType const & data ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && "Function <vkCmdPushDescriptorSetWithTemplateKHR> needs extension <VK_KHR_push_descriptor> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_conditional_rendering ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT && "Function <vkCmdEndConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_descriptor_update_template ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && "Function <vkDestroyDescriptorUpdateTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
+
+    
+    
+    getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+
+  template <typename DataType>
+   VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && "Function <vkUpdateDescriptorSetWithTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
+
+    
+    
+    getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> needs extension <VK_NV_clip_space_w_scaling> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetViewportWScalingNV( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+  
+   VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> needs extension <VK_EXT_acquire_xlib_display> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, static_cast<VkDisplayKHR>( display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> needs extension <VK_EXT_display_surface_counter> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+    
+    return surfaceCapabilities;
+  }
+
+  //=== VK_EXT_display_control ===
+
+  
+   VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> needs extension <VK_EXT_display_control> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkDisplayPowerControlEXT( static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> needs extension <VK_EXT_display_control> enabled!" );
+
+    
+    uint64_t counterValue;
+    VkResult result = getDispatcher()->vkGetSwapchainCounterEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" );
+    
+    return counterValue;
+  }
+
+  //=== VK_GOOGLE_display_timing ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
+    VkResult result = getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" );
+    
+    return displayTimingProperties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> SwapchainKHR::getPastPresentationTimingGOOGLE(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> presentationTimings;
+    uint32_t presentationTimingCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &presentationTimingCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentationTimingCount )
+      {
+        presentationTimings.resize( presentationTimingCount );
+        result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" );
+    VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+    if ( presentationTimingCount < presentationTimings.size() )
+    {
+      presentationTimings.resize( presentationTimingCount );
+    }
+    return presentationTimings;
+  }
+
+  //=== VK_EXT_discard_rectangles ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> needs extension <VK_EXT_discard_rectangles> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_hdr_metadata ===
+
+  
+   VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> needs extension <VK_EXT_hdr_metadata> enabled!" );
+    if ( swapchains.size() != metadata.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ), swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_create_renderpass2 ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_shared_presentable_image ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> needs extension <VK_KHR_shared_presentable_image> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> needs extension <VK_KHR_external_fence_capabilities> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+    
+    
+    return externalFenceProperties;
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+  
+   VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> needs extension <VK_KHR_external_fence_win32> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkImportFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE HANDLE Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> needs extension <VK_KHR_external_fence_win32> enabled!" );
+
+    
+    HANDLE handle;
+    VkResult result = getDispatcher()->vkGetFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+    
+    return handle;
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+  
+   VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
+
+    
+    int fd;
+    VkResult result = getDispatcher()->vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+    
+    return fd;
+  }
+
+  //=== VK_KHR_performance_query ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> needs extension <VK_KHR_performance_query> enabled!" );
+
+    
+    std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> data;
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> & counters = data.first;
+    std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR> & counterDescriptions = data.second;
+    uint32_t counterCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr );
+      if ( ( result == VK_SUCCESS ) && counterCount )
+      {
+        counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+        result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+    VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+    if ( counterCount < counters.size() )
+    {
+      counters.resize( counterCount );
+counterDescriptions.resize( counterCount );
+    }
+    return data;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> needs extension <VK_KHR_performance_query> enabled!" );
+
+    
+    uint32_t numPasses;
+    getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
+    
+    
+    return numPasses;
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> needs extension <VK_KHR_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function <vkReleaseProfilingLockKHR> needs extension <VK_KHR_performance_query> enabled!" );
+
+    
+    
+    getDispatcher()->vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+    
+    return surfaceCapabilities;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+    VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+    if ( surfaceFormatCount < surfaceFormats.size() )
+    {
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return surfaceFormats;
+  }
+
+  template <typename StructureChain>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
+    
+    std::vector<StructureChain> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
+      {
+        structureChains.resize( surfaceFormatCount );
+        surfaceFormats.resize( surfaceFormatCount );
+        for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+        {
+          surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+        }
+        result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      if ( surfaceFormatCount < surfaceFormats.size() )
+      {
+        structureChains.resize( surfaceFormatCount );
+      }
+      for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+      }
+    return structureChains;
+  }
+
+  //=== VK_KHR_get_display_properties2 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> PhysicalDevice::getDisplayProperties2KHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && "Function <vkGetPhysicalDeviceDisplayProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> PhysicalDevice::getDisplayPlaneProperties2KHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> DisplayKHR::getModeProperties2(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
+    VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+    
+    return capabilities;
+  }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+  
+   VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function <vkQueueEndDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function <vkCmdEndDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+
+    
+    
+    getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && "Function <vkGetAndroidHardwareBufferPropertiesANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
+    VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && "Function <vkGetAndroidHardwareBufferPropertiesANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
+    VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE struct AHardwareBuffer * Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && "Function <vkGetMemoryAndroidHardwareBufferANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+
+    
+    struct AHardwareBuffer * buffer;
+    VkResult result = getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
+    
+    return buffer;
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> needs extension <VK_EXT_sample_locations> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> needs extension <VK_EXT_sample_locations> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
+    getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+    
+    
+    return multisampleProperties;
+  }
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && "Function <vkGetImageSparseMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  //=== VK_KHR_acceleration_structure ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+    if ( infos.size() != pBuildRangeInfos.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBuildAccelerationStructuresKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && "Function <vkCmdBuildAccelerationStructuresIndirectKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+    if ( infos.size() != indirectDeviceAddresses.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()";
+    #endif
+  }
+    if ( infos.size() != indirectStrides.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()";
+    #endif
+  }
+    if ( infos.size() != pMaxPrimitiveCounts.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), indirectStrides.data(), pMaxPrimitiveCounts.data() );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+    if ( infos.size() != pBuildRangeInfos.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()";
+    #endif
+  }
+
+    
+    
+    VkResult result = getDispatcher()->vkBuildAccelerationStructuresKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && "Function <vkCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && "Function <vkCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> Device::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && "Function <vkWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ), accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && "Function <vkWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ), accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), sizeof( DataType ), reinterpret_cast<void *>( &data ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+    
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && "Function <vkCmdCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && "Function <vkCmdCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && "Function <vkGetAccelerationStructureDeviceAddressKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+    getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+    
+    
+    return compatibility;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && "Function <vkGetAccelerationStructureBuildSizesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+    if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
+    {
+      #ifndef VULKAN_HPP_NO_EXCEPTIONS
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+      #else
+      LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount";
+      #endif
+    }
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
+    getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), maxPrimitiveCounts.data(), reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+    
+    
+    return sizeInfo;
+  }
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR && "Function <vkDestroySamplerYcbcrConversionKHR> needs extension <VK_KHR_sampler_ycbcr_conversion> enabled!" );
+
+    
+    
+    getDispatcher()->vkDestroySamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_bind_memory2 ===
+
+  
+   VULKAN_HPP_INLINE void Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBindBufferMemory2KHR( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+    
+    
+  }
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && "Function <vkGetImageDrmFormatModifierPropertiesEXT> needs extension <VK_EXT_image_drm_format_modifier> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
+    VkResult result = getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" );
+    
+    return properties;
+  }
+
+  //=== VK_EXT_validation_cache ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> needs extension <VK_EXT_validation_cache> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<uint8_t> ValidationCacheEXT::getData(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> needs extension <VK_EXT_validation_cache> enabled!" );
+
+    
+    std::vector<uint8_t> data;
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = getDispatcher()->vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return data;
+  }
+
+  //=== VK_NV_shading_rate_image ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function <vkCmdBindShadingRateImageNV> needs extension <VK_NV_shading_rate_image> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBindShadingRateImageNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> needs extension <VK_NV_shading_rate_image> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> needs extension <VK_NV_shading_rate_image> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size(), reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_NV_ray_tracing ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
+    getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+    getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBindAccelerationStructureMemoryNV( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function <vkCmdCopyAccelerationStructureNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function <vkCmdTraceRaysNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
+  }
+#endif
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && "Function <vkGetRayTracingShaderGroupHandlesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && "Function <vkGetRayTracingShaderGroupHandlesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> AccelerationStructureNV::getHandle( size_t dataSize ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( m_accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( m_accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
+    
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && "Function <vkCmdWriteAccelerationStructuresPropertiesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ), accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function <vkCompileDeferredNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), shader );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" );
+    
+    
+  }
+
+  //=== VK_KHR_maintenance3 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return structureChain;
+  }
+
+  //=== VK_KHR_draw_indirect_count ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && "Function <vkCmdDrawIndirectCountKHR> needs extension <VK_KHR_draw_indirect_count> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && "Function <vkCmdDrawIndexedIndirectCountKHR> needs extension <VK_KHR_draw_indirect_count> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  //=== VK_EXT_external_memory_host ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> needs extension <VK_EXT_external_memory_host> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+    VkResult result = getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+    
+    return memoryHostPointerProperties;
+  }
+
+  //=== VK_AMD_buffer_marker ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function <vkCmdWriteBufferMarkerAMD> needs extension <VK_AMD_buffer_marker> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+    
+    
+    
+  }
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> PhysicalDevice::getCalibrateableTimeDomainsEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> timeDomains;
+    uint32_t timeDomainCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+    VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+    if ( timeDomainCount < timeDomains.size() )
+    {
+      timeDomains.resize( timeDomainCount );
+    }
+    return timeDomains;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t> Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+
+    
+    std::pair<std::vector<uint64_t>,uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
+    std::vector<uint64_t> & timestamps = data.first;
+    uint64_t & maxDeviation = data.second;
+    VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ), timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+    
+    return data;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<uint64_t, uint64_t> Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+
+    
+    std::pair<uint64_t,uint64_t> data;
+    uint64_t & timestamp = data.first;
+    uint64_t & maxDeviation = data.second;
+    VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ), 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
+    
+    return data;
+  }
+
+  //=== VK_NV_mesh_shader ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function <vkCmdDrawMeshTasksNV> needs extension <VK_NV_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function <vkCmdDrawMeshTasksIndirectNV> needs extension <VK_NV_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function <vkCmdDrawMeshTasksIndirectCountNV> needs extension <VK_NV_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  //=== VK_NV_scissor_exclusive ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> needs extension <VK_NV_scissor_exclusive> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast<VkCommandBuffer>( m_commandBuffer ), firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+  template <typename CheckpointMarkerType>
+   VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const void *>( &checkpointMarker ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> Queue::getCheckpointDataNV(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> checkpointData;
+    uint32_t checkpointDataCount;
+    getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+
+  //=== VK_KHR_timeline_semaphore ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+
+    
+    uint64_t value;
+    VkResult result = getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" );
+    
+    return value;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
+    
+    
+  }
+
+  //=== VK_INTEL_performance_query ===
+
+  
+   VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL && "Function <vkUninitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && "Function <vkQueueSetPerformanceConfigurationINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
+    VkResult result = getDispatcher()->vkGetPerformanceParameterINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
+    
+    return value;
+  }
+
+  //=== VK_AMD_display_native_hdr ===
+
+  
+   VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function <vkSetLocalDimmingAMD> needs extension <VK_AMD_display_native_hdr> enabled!" );
+
+    
+    
+    getDispatcher()->vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkBool32>( localDimmingEnable ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> PhysicalDevice::getFragmentShadingRatesKHR(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> fragmentShadingRates;
+    uint32_t fragmentShadingRateCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
+      {
+        fragmentShadingRates.resize( fragmentShadingRateCount );
+        result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+    VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+    if ( fragmentShadingRateCount < fragmentShadingRates.size() )
+    {
+      fragmentShadingRates.resize( fragmentShadingRateCount );
+    }
+    return fragmentShadingRates;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_buffer_device_address ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressEXT && "Function <vkGetBufferDeviceAddressEXT> needs extension <VK_EXT_buffer_device_address> enabled!" );
+
+    
+    
+    VkDeviceAddress result = getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+
+  //=== VK_EXT_tooling_info ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolPropertiesEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && "Function <vkGetPhysicalDeviceToolPropertiesEXT> needs extension <VK_EXT_tooling_info> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return toolProperties;
+  }
+
+  //=== VK_KHR_present_wait ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> needs extension <VK_KHR_present_wait> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), presentId, timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  //=== VK_NV_cooperative_matrix ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> PhysicalDevice::getCooperativeMatrixPropertiesNV(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> needs extension <VK_NV_cooperative_matrix> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> needs extension <VK_NV_coverage_reduction_mode> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> combinations;
+    uint32_t combinationCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+    VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+    if ( combinationCount < combinations.size() )
+    {
+      combinations.resize( combinationCount );
+    }
+    return combinations;
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
+    uint32_t presentModeCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+    if ( presentModeCount < presentModes.size() )
+    {
+      presentModes.resize( presentModeCount );
+    }
+    return presentModes;
+  }
+
+  
+   VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && "Function <vkGetDeviceGroupSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+    
+    return modes;
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  //=== VK_KHR_buffer_device_address ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressKHR && "Function <vkGetBufferDeviceAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+    
+    
+    VkDeviceAddress result = getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && "Function <vkGetBufferOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+    
+    
+    uint64_t result = getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+    
+    
+    uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+
+  //=== VK_EXT_line_rasterization ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && "Function <vkCmdSetLineStippleEXT> needs extension <VK_EXT_line_rasterization> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
+    
+    
+    
+  }
+
+  //=== VK_EXT_host_query_reset ===
+
+  
+   VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function <vkResetQueryPoolEXT> needs extension <VK_EXT_host_query_reset> enabled!" );
+
+    
+    
+    getDispatcher()->vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
+    
+    
+    
+  }
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && "Function <vkCmdSetCullModeEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && "Function <vkCmdSetFrontFaceEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && "Function <vkCmdSetPrimitiveTopologyEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && "Function <vkCmdSetViewportWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && "Function <vkCmdSetScissorWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && "Function <vkCmdBindVertexBuffers2EXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+    if ( buffers.size() != offsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()";
+    #endif
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()";
+    #endif
+  }
+    if ( !strides.empty() && buffers.size() != strides.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && "Function <vkCmdSetDepthTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && "Function <vkCmdSetDepthWriteEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && "Function <vkCmdSetDepthCompareOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && "Function <vkCmdSetDepthBoundsTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && "Function <vkCmdSetStencilTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && "Function <vkCmdSetStencilOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_deferred_host_operations ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && "Function <vkGetDeferredOperationMaxConcurrencyKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+
+    
+    
+    uint32_t result = getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+    
+    
+    return result;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR && "Function <vkGetPipelineExecutablePropertiesKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> properties;
+    uint32_t executableCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+    VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+    if ( executableCount < properties.size() )
+    {
+      properties.resize( executableCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR && "Function <vkGetPipelineExecutableStatisticsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> statistics;
+    uint32_t statisticCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+    VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+    if ( statisticCount < statistics.size() )
+    {
+      statistics.resize( statisticCount );
+    }
+    return statistics;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && "Function <vkGetPipelineExecutableInternalRepresentationsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> internalRepresentations;
+    uint32_t internalRepresentationCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+    VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+    if ( internalRepresentationCount < internalRepresentations.size() )
+    {
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return internalRepresentations;
+  }
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+  
+   VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> needs extension <VK_EXT_swapchain_maintenance1> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
+    
+    
+  }
+
+  //=== VK_NV_device_generated_commands ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV && "Function <vkCmdBindPipelineShaderGroupNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator );
+  }
+#endif
+
+  //=== VK_EXT_acquire_drm_display ===
+
+  
+   VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> needs extension <VK_EXT_acquire_drm_display> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId );
+  }
+#endif
+
+  //=== VK_EXT_private_data ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> needs extension <VK_EXT_private_data> enabled!" );
+
+    
+    
+    getDispatcher()->vkDestroyPrivateDataSlotEXT( static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSetPrivateDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
+
+    
+    uint64_t data;
+    getDispatcher()->vkGetPrivateDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+    
+    
+    return data;
+  }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> needs extension <VK_KHR_video_encode_queue> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
+    
+    
+    
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> needs extension <VK_EXT_metal_objects> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
+    getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+    
+    
+    return metalObjectsInfo;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> needs extension <VK_EXT_metal_objects> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
+    getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+    
+    
+    return structureChain;
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function <vkCmdResetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+    if ( events.size() != dependencyInfos.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && "Function <vkCmdWriteTimestamp2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteTimestamp2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function <vkCmdWriteBufferMarker2AMD> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> Queue::getCheckpointData2NV(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> needs extension <VK_KHR_synchronization2> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> checkpointData;
+    uint32_t checkpointDataCount;
+    getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+    
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    if ( checkpointDataCount < checkpointData.size() )
+    {
+      checkpointData.resize( checkpointDataCount );
+    }
+    return checkpointData;
+  }
+
+  //=== VK_EXT_descriptor_buffer ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
+    getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
+    
+    
+    return layoutSizeInBytes;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT && "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
+    
+    
+    return offset;
+  }
+
+  template <typename DescriptorType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DescriptorType descriptor;
+    getDispatcher()->vkGetDescriptorEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
+    
+    
+    return descriptor;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+    if ( bufferIndices.size() != offsets.size() )
+  {
+    #ifndef VULKAN_HPP_NO_EXCEPTIONS
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
+    #else
+    LOG(FATAL) << VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()";
+    #endif
+  }
+
+    
+    
+    getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, bufferIndices.size(), bufferIndices.data(), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT && "Function <vkCmdBindDescriptorBufferEmbeddedSamplersEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
+    
+    
+    
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT && "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT && "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT && "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT && "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> needs extension <VK_EXT_descriptor_buffer> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
+    
+    return data;
+  }
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && "Function <vkCmdSetFragmentShadingRateEnumNV> needs extension <VK_NV_fragment_shading_rate_enums> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_mesh_shader ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function <vkCmdDrawMeshTasksEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function <vkCmdDrawMeshTasksIndirectEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksIndirectEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function <vkCmdDrawMeshTasksIndirectCountEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+    
+    
+    
+  }
+
+  //=== VK_KHR_copy_commands2 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_image_compression_control ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && "Function <vkGetImageSubresourceLayout2EXT> needs extension <VK_EXT_image_compression_control> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+    getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ), reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+    
+    
+    return layout;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && "Function <vkGetImageSubresourceLayout2EXT> needs extension <VK_EXT_image_compression_control> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+    getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ), reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+    
+    
+    return structureChain;
+  }
+
+  //=== VK_EXT_device_fault ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>> Device::getFaultInfoEXT(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> needs extension <VK_EXT_device_fault> enabled!" );
+
+    
+    std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+    VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+    VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+    VkResult result = getDispatcher()->vkGetDeviceFaultInfoEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+  
+   VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> needs extension <VK_NV_acquire_winrt_display> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" );
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId );
+  }
+#endif
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> needs extension <VK_EXT_directfb_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dfb );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), width, height, depth );
+    
+    
+    
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator );
+  }
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator );
+  }
+#endif
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && "Function <vkGetRayTracingShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && "Function <vkGetRayTracingShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+    
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && "Function <vkGetRayTracingShaderGroupStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    
+    VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceSize>( result );
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && "Function <vkCmdSetRayTracingPipelineStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize );
+    
+    
+    
+  }
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> needs extension <VK_EXT_vertex_input_dynamic_state> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexBindingDescriptions.size(), reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), vertexAttributeDescriptions.size(), reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE zx_handle_t Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
+
+    
+    zx_handle_t zirconHandle;
+    VkResult result = getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
+    
+    return zirconHandle;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
+    VkResult result = getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), zirconHandle, reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+    
+    return memoryZirconHandleProperties;
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+  
+   VULKAN_HPP_INLINE void Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE zx_handle_t Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
+
+    
+    zx_handle_t zirconHandle;
+    VkResult result = getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
+    
+    return zirconHandle;
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( m_collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( m_collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" );
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && "Function <vkGetBufferCollectionPropertiesFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
+    VkResult result = getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( m_collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" );
+    
+    return properties;
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D> RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> needs extension <VK_HUAWEI_subpass_shading> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
+    VkResult result = getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+    
+    return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function <vkCmdSubpassShadingHUAWEI> needs extension <VK_HUAWEI_subpass_shading> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+    
+    
+    
+  }
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function <vkCmdBindInvocationMaskHUAWEI> needs extension <VK_HUAWEI_invocation_mask> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBindInvocationMaskHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+    
+    
+    
+  }
+
+  //=== VK_NV_external_memory_rdma ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> needs extension <VK_NV_external_memory_rdma> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
+    VkResult result = getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
+    
+    return address;
+  }
+
+  //=== VK_EXT_pipeline_properties ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> needs extension <VK_EXT_pipeline_properties> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
+    VkResult result = getDispatcher()->vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
+    
+    return pipelineProperties;
+  }
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && "Function <vkCmdSetPatchControlPointsEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && "Function <vkCmdSetRasterizerDiscardEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && "Function <vkCmdSetDepthBiasEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function <vkCmdSetLogicOpEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && "Function <vkCmdSetPrimitiveRestartEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
+    
+    
+    
+  }
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
+  }
+#endif
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> needs extension <VK_QNX_screen_surface> enabled!" );
+
+    
+    
+    VkBool32 result = getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &window );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
+  }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> needs extension <VK_EXT_color_write_enable> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function <vkCmdTraceRaysIndirect2KHR> needs extension <VK_KHR_ray_tracing_maintenance1> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_multi_draw ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, uint32_t instanceCount, uint32_t firstInstance ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> needs extension <VK_EXT_multi_draw> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexInfo.size(), reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), instanceCount, firstInstance, vertexInfo.stride() );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, uint32_t instanceCount, uint32_t firstInstance, Optional<const int32_t> vertexOffset ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> needs extension <VK_EXT_multi_draw> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), indexInfo.size(), reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), instanceCount, firstInstance, indexInfo.stride(), static_cast<const int32_t *>( vertexOffset ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_opacity_micromap ===
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::MicromapEXT Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdBuildMicromapsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyMicromapEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<DataType> Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType> data( dataSize / sizeof( DataType ) );
+    VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ), micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+    
+    return data;
+  }
+
+  template <typename DataType>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE DataType Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    DataType data;
+    VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ), micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), sizeof( DataType ), reinterpret_cast<void *>( &data ), stride );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+    
+    return data;
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), micromaps.size(), reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+    
+    
+    
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+    getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+    
+    
+    return compatibility;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+    getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+    
+    
+    return sizeInfo;
+  }
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function <vkCmdDrawClusterHUAWEI> needs extension <VK_HUAWEI_cluster_culling_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI && "Function <vkCmdDrawClusterIndirectHUAWEI> needs extension <VK_HUAWEI_cluster_culling_shader> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDrawClusterIndirectHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+  
+   VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT && "Function <vkSetDeviceMemoryPriorityEXT> needs extension <VK_EXT_pageable_device_local_memory> enabled!" );
+
+    
+    
+    getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), priority );
+    
+    
+    
+  }
+
+  //=== VK_KHR_maintenance4 ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z>
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE && "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> needs extension <VK_VALVE_descriptor_set_host_mapping> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
+    getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
+    
+    
+    return hostMapping;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE && "Function <vkGetDescriptorSetHostMappingVALVE> needs extension <VK_VALVE_descriptor_set_host_mapping> enabled!" );
+
+    
+    void * pData;
+    getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), &pData );
+    
+    
+    return pData;
+  }
+
+  //=== VK_NV_copy_memory_indirect ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function <vkCmdCopyMemoryIndirectNV> needs extension <VK_NV_copy_memory_indirect> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMemoryIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> needs extension <VK_NV_copy_memory_indirect> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), imageSubresources.size(), stride, static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
+    
+    
+    
+  }
+
+  //=== VK_NV_memory_decompression ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> needs extension <VK_NV_memory_decompression> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ), decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV && "Function <vkCmdDecompressMemoryIndirectCountNV> needs extension <VK_NV_memory_decompression> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
+    
+    
+    
+  }
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT && "Function <vkCmdSetTessellationDomainOriginEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && "Function <vkCmdSetDepthClampEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && "Function <vkCmdSetPolygonModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT && "Function <vkCmdSetRasterizationSamplesEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetSampleMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT && "Function <vkCmdSetAlphaToCoverageEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && "Function <vkCmdSetAlphaToOneEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && "Function <vkCmdSetLogicOpEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && "Function <vkCmdSetColorBlendEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT && "Function <vkCmdSetColorBlendEquationEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && "Function <vkCmdSetColorWriteMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT && "Function <vkCmdSetRasterizationStreamEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT && "Function <vkCmdSetConservativeRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT && "Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && "Function <vkCmdSetDepthClipEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT && "Function <vkCmdSetSampleLocationsEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT && "Function <vkCmdSetColorBlendAdvancedEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT && "Function <vkCmdSetProvokingVertexModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT && "Function <vkCmdSetLineRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && "Function <vkCmdSetLineStippleEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT && "Function <vkCmdSetDepthClipNegativeOneToOneEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV && "Function <vkCmdSetViewportWScalingEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && "Function <vkCmdSetViewportSwizzleNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV && "Function <vkCmdSetCoverageToColorEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV && "Function <vkCmdSetCoverageToColorLocationNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV && "Function <vkCmdSetCoverageModulationModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV && "Function <vkCmdSetCoverageModulationTableEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageModulationTableEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV && "Function <vkCmdSetCoverageModulationTableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageModulationTableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV && "Function <vkCmdSetShadingRateImageEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV && "Function <vkCmdSetRepresentativeFragmentTestEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( representativeFragmentTestEnable ) );
+    
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && "Function <vkCmdSetCoverageReductionModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+    
+    
+    
+  }
+
+  //=== VK_EXT_shader_module_identifier ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT(  ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> needs extension <VK_EXT_shader_module_identifier> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+    getDispatcher()->vkGetShaderModuleIdentifierEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+    
+    
+    return identifier;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT && "Function <vkGetShaderModuleCreateInfoIdentifierEXT> needs extension <VK_EXT_shader_module_identifier> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+    getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+    
+    
+    return identifier;
+  }
+
+  //=== VK_NV_optical_flow ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV && "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> imageFormatProperties;
+    uint32_t formatCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && formatCount )
+      {
+        imageFormatProperties.resize( formatCount );
+        result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+    VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+    if ( formatCount < imageFormatProperties.size() )
+    {
+      imageFormatProperties.resize( formatCount );
+    }
+    return imageFormatProperties;
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const 
+  {
+    return VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, createInfo, allocator );
+  }
+#endif
+
+  
+   VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+    
+    
+    VkResult result = getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( m_session ), static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), static_cast<VkImageView>( view ), static_cast<VkImageLayout>( layout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" );
+    
+    
+  }
+
+  
+   VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+    
+    
+    getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+    
+    
+    
+  }
+
+  //=== VK_QCOM_tile_properties ===
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM(  ) const  
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> needs extension <VK_QCOM_tile_properties> enabled!" );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> properties;
+    uint32_t propertiesCount;
+    VkResult result;
+    do
+    {
+      result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), &propertiesCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertiesCount )
+      {
+        properties.resize( propertiesCount );
+        result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    
+    VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+    if ( propertiesCount < properties.size() )
+    {
+      properties.resize( propertiesCount );
+    }
+    return properties;
+  }
+
+  
+  VULKAN_HPP_NODISCARD  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const  VULKAN_HPP_NOEXCEPT 
+  {
+VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> needs extension <VK_QCOM_tile_properties> enabled!" );
+
+    
+    VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
+    getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+
+  } // namespace VULKAN_HPP_RAII_NAMESPACE
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_screen.h b/host/libs/graphics_detector/include/vulkan/vulkan_screen.h
new file mode 100644
index 0000000..f0ef40a
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_screen.h
@@ -0,0 +1,54 @@
+#ifndef VULKAN_SCREEN_H_
+#define VULKAN_SCREEN_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_QNX_screen_surface 1
+#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1
+#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface"
+typedef VkFlags VkScreenSurfaceCreateFlagsQNX;
+typedef struct VkScreenSurfaceCreateInfoQNX {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkScreenSurfaceCreateFlagsQNX    flags;
+    struct _screen_context*          context;
+    struct _screen_window*           window;
+} VkScreenSurfaceCreateInfoQNX;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX(
+    VkInstance                                  instance,
+    const VkScreenSurfaceCreateInfoQNX*         pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct _screen_window*                      window);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_static_assertions.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_static_assertions.hpp
new file mode 100644
index 0000000..3ed04db
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_static_assertions.hpp
@@ -0,0 +1,4195 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_STATIC_ASSERTIONS_HPP
+#  define VULKAN_STATIC_ASSERTIONS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+//=========================
+//=== static_assertions ===
+//=========================
+
+
+  //=== VK_VERSION_1_0 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent2D>::value, "Extent2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent3D>::value, "Extent3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset2D>::value, "Offset2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset3D>::value, "Offset3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Rect2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Rect2D>::value, "Rect2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "BaseInStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "BaseOutStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "BufferMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "DispatchIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "DrawIndexedIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "DrawIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "ImageMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "MemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "AllocationCallbacks is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "ApplicationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "FormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "ImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Instance>::value, "Instance is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "InstanceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "MemoryHeap is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryType>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryType>::value, "MemoryType is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, "PhysicalDevice is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "PhysicalDeviceFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "PhysicalDeviceLimits is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "PhysicalDeviceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "QueueFamilyProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Device>::value, "Device is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "DeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "DeviceQueueCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "ExtensionProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "LayerProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Queue>::value, "Queue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "SubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "MappedMemoryRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "MemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemory>::value, "DeviceMemory is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "MemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "BindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "ImageSubresource is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "SparseImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "SparseImageMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "SparseImageMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "SparseImageMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "SparseMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Fence>::value, "Fence is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "FenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Semaphore>::value, "Semaphore is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "SemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Event>::value, "Event is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "EventCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPool>::value, "QueryPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "QueryPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Buffer>::value, "Buffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "BufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferView>::value, "BufferView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "BufferViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Image>::value, "Image is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "ImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "SubresourceLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "ComponentMapping is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "ImageSubresourceRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageView>::value, "ImageView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "ImageViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModule>::value, "ShaderModule is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "ShaderModuleCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCache>::value, "PipelineCache is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "PipelineCacheCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "ComputePipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Pipeline>::value, "Pipeline is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value, "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value, "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value, "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value, "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value, "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value, "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "SpecializationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "SpecializationMapEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "StencilOpState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "VertexInputAttributeDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "VertexInputBindingDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Viewport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Viewport>::value, "Viewport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayout>::value, "PipelineLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "PipelineLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "PushConstantRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Sampler>::value, "Sampler is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "SamplerCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "CopyDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "DescriptorBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "DescriptorImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPool>::value, "DescriptorPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "DescriptorPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "DescriptorPoolSize is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSet>::value, "DescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "DescriptorSetAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::value, "DescriptorSetLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "DescriptorSetLayoutBinding is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "WriteDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "AttachmentDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "AttachmentReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Framebuffer>::value, "Framebuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "FramebufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPass>::value, "RenderPass is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "RenderPassCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "SubpassDependency is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "SubpassDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPool>::value, "CommandPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "CommandPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBuffer>::value, "CommandBuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "CommandBufferAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "CommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "CommandBufferInheritanceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "BufferCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "BufferImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "ClearAttachment is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) == sizeof( VkClearColorValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearColorValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearColorValue>::value, "ClearColorValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "ClearDepthStencilValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearRect>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearRect>::value, "ClearRect is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) == sizeof( VkClearValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearValue>::value, "ClearValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "ImageBlit is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "ImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "ImageResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "ImageSubresourceLayers is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "RenderPassBeginInfo is not nothrow_move_constructible!" );
+
+  //=== VK_VERSION_1_1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "BindBufferMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "BindImageMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value, "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "MemoryDedicatedRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "DeviceGroupSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value, "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "MemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "PhysicalDeviceProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "FormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "ImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "QueueFamilyProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "SparseImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value, "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value, "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value, "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "InputAttachmentAspectReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "ImageViewUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value, "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value, "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value, "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value, "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "DeviceQueueInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "ProtectedSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value, "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::value, "SamplerYcbcrConversion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::value, "DescriptorUpdateTemplate is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value, "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "ExternalMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value, "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "ExternalImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "ExternalBufferProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "PhysicalDeviceIDProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "ExportMemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "ExternalFenceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "ExportFenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value, "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "ExternalSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value, "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "DescriptorSetLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value, "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" );
+
+  //=== VK_VERSION_1_2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "ImageFormatListCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "RenderPassCreateInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "AttachmentDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "AttachmentReference2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "SubpassDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "SubpassDependency2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "SubpassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "SubpassEndInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "ConformanceVersion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value, "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value, "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value, "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value, "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value, "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value, "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value, "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value, "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value, "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value, "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value, "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value, "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value, "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value, "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value, "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value, "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value, "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value, "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "SemaphoreWaitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "SemaphoreSignalInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value, "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "BufferDeviceAddressInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value, "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value, "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value, "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" );
+
+  //=== VK_VERSION_1_3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value, "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "PipelineCreationFeedback is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) == sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value, "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "PhysicalDeviceToolProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value, "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::value, "PrivateDataSlot is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value, "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "MemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "BufferMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "ImageMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "DependencyInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "SubmitInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "SemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "CommandBufferSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value, "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) == sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value, "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value, "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "CopyBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "CopyImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "CopyBufferToImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "CopyImageToBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "BlitImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "ResolveImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "BufferCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "ImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "ImageBlit2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "BufferImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "ImageResolve2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value, "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) == sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value, "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value, "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value, "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value, "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value, "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value, "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value, "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "RenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "RenderingAttachmentInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "PipelineRenderingCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value, "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value, "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) == sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value, "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) == sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value, "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value, "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "FormatProperties3 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value, "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value, "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "DeviceImageMemoryRequirements is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceKHR>::value, "SurfaceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "SurfaceFormatKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "SwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, "SwapchainKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "PresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "AcquireNextImageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_display ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayKHR>::value, "DisplayKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::value, "DisplayModeKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "DisplayModeParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "DisplayModePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "DisplayPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_display_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "DisplayPresentInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::value, "DebugReportCallbackEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_rasterization_order ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value, "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_debug_marker ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_video_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::value, "VideoSessionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::value, "VideoSessionParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR ) == sizeof( VkQueueFamilyQueryResultStatusPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value, "QueueFamilyQueryResultStatusPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR ) == sizeof( VkQueueFamilyVideoPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value, "QueueFamilyVideoPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR ) == sizeof( VkVideoProfileInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value, "VideoProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR ) == sizeof( VkVideoProfileListInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value, "VideoProfileListInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "VideoCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "VideoFormatPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR ) == sizeof( VkVideoPictureResourceInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value, "VideoPictureResourceInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR ) == sizeof( VkVideoReferenceSlotInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value, "VideoReferenceSlotInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR ) == sizeof( VkVideoSessionMemoryRequirementsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, "VideoSessionMemoryRequirementsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR ) == sizeof( VkBindVideoSessionMemoryInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value, "BindVideoSessionMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value, "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value, "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "VideoEndCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "VideoCodingControlInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_video_decode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR ) == sizeof( VkVideoDecodeUsageInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value, "VideoDecodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "VideoDecodeInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_dedicated_allocation ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value, "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value, "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value, "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_transform_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value, "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_NVX_binary_import ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleNVX>::value, "CuModuleNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::value, "CuFunctionNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "CuModuleCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "CuLaunchInfoNVX is not nothrow_move_constructible!" );
+
+  //=== VK_NVX_image_view_handle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "ImageViewHandleInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value, "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value, "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value, "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH264ReferenceListsInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>::value, "VideoEncodeH264ReferenceListsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT ) == sizeof( VkVideoEncodeH264EmitPictureParametersInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>::value, "VideoEncodeH264EmitPictureParametersInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT ) == sizeof( VkVideoEncodeH264NaluSliceInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value, "VideoEncodeH264NaluSliceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT ) == sizeof( VkVideoEncodeH264ProfileInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value, "VideoEncodeH264ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == sizeof( VkVideoEncodeH264RateControlInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value, "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "VideoEncodeH264QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == sizeof( VkVideoEncodeH264FrameSizeEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == sizeof( VkVideoEncodeH265CapabilitiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value, "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value, "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) == sizeof( VkVideoEncodeH265VclFrameInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value, "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT ) == sizeof( VkVideoEncodeH265EmitPictureParametersInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>::value, "VideoEncodeH265EmitPictureParametersInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH265DpbSlotInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value, "VideoEncodeH265NaluSliceSegmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT ) == sizeof( VkVideoEncodeH265ProfileInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value, "VideoEncodeH265ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH265ReferenceListsInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>::value, "VideoEncodeH265ReferenceListsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == sizeof( VkVideoEncodeH265RateControlInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value, "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "VideoEncodeH265QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == sizeof( VkVideoEncodeH265FrameSizeEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR ) == sizeof( VkVideoDecodeH264ProfileInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR>::value, "VideoDecodeH264ProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR ) == sizeof( VkVideoDecodeH264CapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR>::value, "VideoDecodeH264CapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR>::value, "VideoDecodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR>::value, "VideoDecodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR ) == sizeof( VkVideoDecodeH264PictureInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR>::value, "VideoDecodeH264PictureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH264DpbSlotInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR>::value, "VideoDecodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_texture_gather_bias_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value, "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_shader_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "ShaderResourceUsageAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_dynamic_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value, "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value, "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value, "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_corner_sampled_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value, "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_external_memory_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value, "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_validation_flags ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "ValidationFlagsEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_astc_decode_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value, "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_pipeline_robustness ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value, "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) == sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value, "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value, "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "MemoryFdPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "MemoryGetFdInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value, "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_push_descriptor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value, "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_conditional_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_incremental_present ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "PresentRegionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "PresentRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "RectLayerKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "ViewportWScalingNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value, "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_display_surface_counter ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "SurfaceCapabilities2EXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_display_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "DisplayPowerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "DeviceEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "DisplayEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_GOOGLE_display_timing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "PresentTimeGOOGLE is not nothrow_move_constructible!" );
+
+  //=== VK_NVX_multiview_per_view_attributes ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" );
+
+  //=== VK_NV_viewport_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "ViewportSwizzleNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value, "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_discard_rectangles ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value, "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_conservative_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value, "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_depth_clip_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_hdr_metadata ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "HdrMetadataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "XYColorEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_shared_presentable_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value, "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "ImportFenceFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "FenceGetFdInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value, "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value, "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "PerformanceCounterKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) == sizeof( VkPerformanceCounterResultKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value, "PerformanceCounterResultKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "SurfaceCapabilities2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "SurfaceFormat2KHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_get_display_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "DisplayProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "DisplayModeProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "DebugUtilsLabelEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value, "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::value, "DebugUtilsMessengerEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value, "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value, "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value, "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value, "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "ExternalFormatANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) == sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value, "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "SampleLocationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "SampleLocationsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "SubpassSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value, "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value, "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value, "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "MultisamplePropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_blend_operation_advanced ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_NV_fragment_coverage_to_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value, "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_acceleration_structure ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) == sizeof( VkDeviceOrHostAddressKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value, "DeviceOrHostAddressKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) == sizeof( VkDeviceOrHostAddressConstKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value, "DeviceOrHostAddressConstKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value, "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "AabbPositionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value, "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "TransformMatrixKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value, "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value, "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value, "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) == sizeof( VkAccelerationStructureGeometryDataKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value, "AccelerationStructureGeometryDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value, "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::value, "AccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value, "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value, "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value, "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value, "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value, "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value, "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value, "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value, "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value, "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_shader_sm_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value, "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value, "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value, "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value, "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_validation_cache ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::value, "ValidationCacheEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value, "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_portability_subset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_shading_rate_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "ShadingRatePaletteNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value, "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value, "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value, "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "CoarseSampleLocationNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_ray_tracing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "GeometryTrianglesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "GeometryAABBNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "GeometryDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "GeometryNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "AccelerationStructureInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::value, "AccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value, "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value, "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value, "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value, "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_representative_fragment_test ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_filter_cubic ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value, "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value, "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_external_memory_host ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_shader_clock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value, "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_pipeline_compiler_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value, "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_shader_core_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value, "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_video_decode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR ) == sizeof( VkVideoDecodeH265ProfileInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR>::value, "VideoDecodeH265ProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR ) == sizeof( VkVideoDecodeH265CapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR>::value, "VideoDecodeH265CapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR>::value, "VideoDecodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR>::value, "VideoDecodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR ) == sizeof( VkVideoDecodeH265PictureInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR>::value, "VideoDecodeH265PictureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH265DpbSlotInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR>::value, "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_global_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value, "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value, "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value, "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value, "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_vertex_attribute_divisor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value, "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value, "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_frame_token ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "PresentFrameTokenGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_compute_shader_derivatives ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value, "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value, "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_shader_image_footprint ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_scissor_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value, "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "CheckpointDataNV is not nothrow_move_constructible!" );
+
+  //=== VK_INTEL_shader_integer_functions2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" );
+
+  //=== VK_INTEL_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) == sizeof( VkPerformanceValueDataINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value, "PerformanceValueDataINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "PerformanceValueINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value, "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value, "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::value, "PerformanceConfigurationINTEL is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_pci_bus_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value, "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_display_native_hdr ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value, "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_fragment_density_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value, "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) == sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value, "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_shader_core_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value, "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_device_coherent_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_shader_image_atomic_int64 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_memory_budget ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_memory_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_surface_protected_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_dedicated_allocation_image_aliasing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_buffer_device_address ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_validation_features ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "ValidationFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_present_wait ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value, "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_cooperative_matrix ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value, "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_fragment_shader_interlock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_ycbcr_image_arrays ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_provoking_vertex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value, "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value, "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value, "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value, "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_line_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value, "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value, "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value, "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_shader_atomic_float ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_index_type_uint8 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_deferred_host_operations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::value, "DeferredOperationKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "PipelineInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "PipelineExecutableInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) == sizeof( VkPipelineExecutableStatisticValueKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value, "PipelineExecutableStatisticValueKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_shader_atomic_float2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value, "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_surface_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT ) == sizeof( VkSurfacePresentModeEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT>::value, "SurfacePresentModeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT ) == sizeof( VkSurfacePresentScalingCapabilitiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT>::value, "SurfacePresentScalingCapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT ) == sizeof( VkSurfacePresentModeCompatibilityEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT>::value, "SurfacePresentModeCompatibilityEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT ) == sizeof( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT>::value, "PhysicalDeviceSwapchainMaintenance1FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT ) == sizeof( VkSwapchainPresentFenceInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT>::value, "SwapchainPresentFenceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT ) == sizeof( VkSwapchainPresentModesCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT>::value, "SwapchainPresentModesCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT ) == sizeof( VkSwapchainPresentModeInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT>::value, "SwapchainPresentModeInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT ) == sizeof( VkSwapchainPresentScalingCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT>::value, "SwapchainPresentScalingCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT ) == sizeof( VkReleaseSwapchainImagesInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT>::value, "ReleaseSwapchainImagesInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_NV_device_generated_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value, "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::value, "IndirectCommandsLayoutNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "IndirectCommandsStreamNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value, "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "GeneratedCommandsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value, "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_inherited_viewport_scissor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) == sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value, "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) == sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value, "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_texel_buffer_alignment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_render_pass_transform ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_device_memory_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value, "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_robustness2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value, "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value, "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_custom_border_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value, "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_present_barrier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value, "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value, "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value, "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_present_id ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "PresentIdKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value, "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "VideoEncodeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value, "VideoEncodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value, "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value, "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT ) == sizeof( VkExportMetalObjectCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value, "ExportMetalObjectCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT ) == sizeof( VkExportMetalObjectsInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value, "ExportMetalObjectsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT ) == sizeof( VkExportMetalDeviceInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value, "ExportMetalDeviceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT ) == sizeof( VkExportMetalCommandQueueInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value, "ExportMetalCommandQueueInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT ) == sizeof( VkExportMetalBufferInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value, "ExportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT ) == sizeof( VkImportMetalBufferInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value, "ImportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT ) == sizeof( VkExportMetalTextureInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value, "ExportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT ) == sizeof( VkImportMetalTextureInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value, "ImportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT ) == sizeof( VkExportMetalIOSurfaceInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value, "ExportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT ) == sizeof( VkImportMetalIOSurfaceInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value, "ImportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT ) == sizeof( VkExportMetalSharedEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value, "ExportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT ) == sizeof( VkImportMetalSharedEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value, "ImportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value, "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "CheckpointData2NV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_descriptor_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT>::value, "PhysicalDeviceDescriptorBufferPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>::value, "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT>::value, "PhysicalDeviceDescriptorBufferFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT ) == sizeof( VkDescriptorAddressInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT>::value, "DescriptorAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT ) == sizeof( VkDescriptorBufferBindingInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT>::value, "DescriptorBufferBindingInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT ) == sizeof( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT>::value, "DescriptorBufferBindingPushDescriptorBufferHandleEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorDataEXT ) == sizeof( VkDescriptorDataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorDataEXT>::value, "DescriptorDataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT ) == sizeof( VkDescriptorGetInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT>::value, "DescriptorGetInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT ) == sizeof( VkBufferCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT>::value, "BufferCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT ) == sizeof( VkImageCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT>::value, "ImageCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT ) == sizeof( VkImageViewCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT>::value, "ImageViewCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT ) == sizeof( VkSamplerCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT>::value, "SamplerCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDescriptorDataCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT>::value, "OpaqueCaptureDescriptorDataCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT ) == sizeof( VkAccelerationStructureCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT>::value, "AccelerationStructureCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_graphics_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) == sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value, "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) == sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value, "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value, "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_AMD_shader_early_and_late_fragment_tests ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) == sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value, "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_fragment_shader_barycentric ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value, "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value, "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_shader_subgroup_uniform_control_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value, "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value, "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value, "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) == sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value, "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) == sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value, "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value, "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV ) == sizeof( VkAccelerationStructureMotionInstanceDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value, "AccelerationStructureMotionInstanceDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) == sizeof( VkAccelerationStructureMatrixMotionInstanceNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value, "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value, "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "SRTDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) == sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value, "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value, "PhysicalDeviceMeshShaderFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value, "PhysicalDeviceMeshShaderPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT ) == sizeof( VkDrawMeshTasksIndirectCommandEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value, "DrawMeshTasksIndirectCommandEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_ycbcr_2plane_444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value, "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_fragment_density_map2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_rotated_copy_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_workgroup_memory_explicit_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_image_compression_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT ) == sizeof( VkPhysicalDeviceImageCompressionControlFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value, "PhysicalDeviceImageCompressionControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT ) == sizeof( VkImageCompressionControlEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value, "ImageCompressionControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "SubresourceLayout2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "ImageSubresource2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value, "ImageCompressionPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_attachment_feedback_loop_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value, "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_4444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value, "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_device_fault ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value, "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value, "DeviceFaultCountsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value, "DeviceFaultInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value, "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value, "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value, "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_rgba10x6_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value, "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value, "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value, "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value, "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value, "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_ray_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value, "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value, "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_physical_device_drm ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_device_address_binding_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) == sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value, "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value, "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_depth_clip_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value, "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) == sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value, "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_primitive_topology_list_restart ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) == sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value, "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value, "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value, "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value, "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value, "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == sizeof( VkBufferCollectionFUCHSIA ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::value, "BufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value, "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value, "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value, "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value, "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value, "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value, "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value, "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" );
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value, "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+  //=== VK_NV_external_memory_rdma ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value, "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_pipeline_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT ) == sizeof( VkPipelinePropertiesIdentifierEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value, "PipelinePropertiesIdentifierEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT ) == sizeof( VkPhysicalDevicePipelinePropertiesFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value, "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_multisampled_render_to_single_sampled ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) == sizeof( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value, "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT ) == sizeof( VkSubpassResolvePerformanceQueryEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value, "SubpassResolvePerformanceQueryEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT ) == sizeof( VkMultisampledRenderToSingleSampledInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value, "MultisampledRenderToSingleSampledInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value, "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value, "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_primitives_generated_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ) == sizeof( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value, "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value, "PhysicalDeviceRayTracingMaintenance1FeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR ) == sizeof( VkTraceRaysIndirectCommand2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value, "TraceRaysIndirectCommand2KHR is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_image_view_min_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value, "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_multi_draw ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value, "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value, "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "MultiDrawInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_image_2d_view_of_3d ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT ) == sizeof( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value, "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_opacity_micromap ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value, "MicromapBuildInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value, "MicromapUsageEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value, "MicromapCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapEXT>::value, "MicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value, "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value, "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value, "MicromapVersionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value, "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value, "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value, "CopyMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value, "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) == sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value, "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value, "MicromapTriangleEXT is not nothrow_move_constructible!" );
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI>::value, "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI ) == sizeof( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI>::value, "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_border_color_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value, "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) == sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value, "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) == sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value, "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) == sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value, "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value, "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value, "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_depth_clamp_zero_one ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value, "PhysicalDeviceDepthClampZeroOneFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_non_seamless_cube_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) == sizeof( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value, "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_fragment_density_map_offset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) == sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value, "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) == sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value, "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value, "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_NV_copy_memory_indirect ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>::value, "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV>::value, "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV>::value, "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV>::value, "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_memory_decompression ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV>::value, "DecompressMemoryRegionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) == sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV>::value, "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) == sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>::value, "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" );
+
+  //=== VK_NV_linear_color_attachment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value, "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_image_compression_control_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) == sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value, "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_image_processing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value, "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingFeaturesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value, "PhysicalDeviceImageProcessingFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingPropertiesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value, "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value, "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value, "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value, "ColorBlendEquationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value, "ColorBlendAdvancedEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_subpass_merge_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value, "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT ) == sizeof( VkRenderPassCreationControlEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value, "RenderPassCreationControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT ) == sizeof( VkRenderPassCreationFeedbackInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value, "RenderPassCreationFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT ) == sizeof( VkRenderPassCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value, "RenderPassCreationFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value, "RenderPassSubpassFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value, "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_LUNARG_direct_driver_loading ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG ) == sizeof( VkDirectDriverLoadingInfoLUNARG ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>::value, "DirectDriverLoadingInfoLUNARG is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG ) == sizeof( VkDirectDriverLoadingListLUNARG ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value, "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_shader_module_identifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value, "PhysicalDeviceShaderModuleIdentifierFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value, "PhysicalDeviceShaderModuleIdentifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT ) == sizeof( VkPipelineShaderStageModuleIdentifierCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value, "PipelineShaderStageModuleIdentifierCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ) == sizeof( VkShaderModuleIdentifierEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value, "ShaderModuleIdentifierEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) == sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value, "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_NV_optical_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value, "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value, "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value, "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::value, "OpticalFlowSessionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value, "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value, "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value, "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_legacy_dithering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value, "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_pipeline_protected_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value, "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_tile_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value, "PhysicalDeviceTilePropertiesFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM ) == sizeof( VkTilePropertiesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, "TilePropertiesQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_SEC_amigo_profiling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC ) == sizeof( VkPhysicalDeviceAmigoProfilingFeaturesSEC ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value, "PhysicalDeviceAmigoProfilingFeaturesSEC is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC ) == sizeof( VkAmigoProfilingSubmitInfoSEC ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value, "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" );
+
+  //=== VK_QCOM_multiview_per_view_viewports ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) == sizeof( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>::value, "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM is not nothrow_move_constructible!" );
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV>::value, "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) == sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV>::value, "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" );
+
+  //=== VK_EXT_mutable_descriptor_type ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) == sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value, "PhysicalDeviceMutableDescriptorTypeFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT ) == sizeof( VkMutableDescriptorTypeListEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value, "MutableDescriptorTypeListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT ) == sizeof( VkMutableDescriptorTypeCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value, "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" );
+
+  //=== VK_ARM_shader_core_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) == sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value, "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) == sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value, "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" );
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_structs.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_structs.hpp
new file mode 100644
index 0000000..31e5d1b
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_structs.hpp
@@ -0,0 +1,101862 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_STRUCTS_HPP
+#  define VULKAN_STRUCTS_HPP
+
+#include <cstring>  // strcmp
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //===============
+  //=== STRUCTS ===
+  //===============
+
+
+  struct AabbPositionsKHR
+  {
+    using NativeType = VkAabbPositionsKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
+    : minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
+    {}
+
+
+    AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minX = minX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minY = minY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minZ = minZ_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxX = maxX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxY = maxY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxZ = maxZ_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
+    }
+
+    operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAabbPositionsKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( minX, minY, minZ, maxX, maxY, maxZ );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AabbPositionsKHR const & ) const = default;
+#else
+    bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( minX == rhs.minX )
+          && ( minY == rhs.minY )
+          && ( minZ == rhs.minZ )
+          && ( maxX == rhs.maxX )
+          && ( maxY == rhs.maxY )
+          && ( maxZ == rhs.maxZ );
+#endif
+    }
+
+    bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float minX = {};
+    float minY = {};
+    float minZ = {};
+    float maxX = {};
+    float maxY = {};
+    float maxZ = {};
+
+  };
+  using AabbPositionsNV = AabbPositionsKHR;
+
+  union DeviceOrHostAddressConstKHR
+  {
+    using NativeType = VkDeviceOrHostAddressConstKHR;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+      : deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ )
+      : hostAddress( hostAddress_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostAddress = hostAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkDeviceOrHostAddressConstKHR const &() const
+    {
+      return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>( this );
+    }
+
+    operator VkDeviceOrHostAddressConstKHR &()
+    {
+      return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+    const void * hostAddress;
+#else
+    VkDeviceAddress deviceAddress;
+    const void * hostAddress;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct AccelerationStructureGeometryTrianglesDataKHR
+  {
+    using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), maxVertex( maxVertex_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexFormat = vertexFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexData = vertexData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexStride = vertexStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxVertex = maxVertex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexData = indexData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformData = transformData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+    uint32_t maxVertex = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
+  {
+    using Type = AccelerationStructureGeometryTrianglesDataKHR;
+  };
+
+  struct AccelerationStructureGeometryAabbsDataKHR
+  {
+    using NativeType = VkAccelerationStructureGeometryAabbsDataKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), data( data_ ), stride( stride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, data, stride );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
+  {
+    using Type = AccelerationStructureGeometryAabbsDataKHR;
+  };
+
+  struct AccelerationStructureGeometryInstancesDataKHR
+  {
+    using NativeType = VkAccelerationStructureGeometryInstancesDataKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), arrayOfPointers( arrayOfPointers_ ), data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayOfPointers = arrayOfPointers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, arrayOfPointers, data );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
+  {
+    using Type = AccelerationStructureGeometryInstancesDataKHR;
+  };
+
+  union AccelerationStructureGeometryDataKHR
+  {
+    using NativeType = VkAccelerationStructureGeometryDataKHR;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
+      : triangles( triangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
+      : aabbs( aabbs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
+      : instances( instances_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangles = triangles_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbs = aabbs_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instances = instances_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkAccelerationStructureGeometryDataKHR const &() const
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryDataKHR &()
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
+#else
+    VkAccelerationStructureGeometryTrianglesDataKHR triangles;
+    VkAccelerationStructureGeometryAabbsDataKHR aabbs;
+    VkAccelerationStructureGeometryInstancesDataKHR instances;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct AccelerationStructureGeometryKHR
+  {
+    using NativeType = VkAccelerationStructureGeometryKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometry = geometry_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, geometryType, geometry, flags );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
+    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
+  {
+    using Type = AccelerationStructureGeometryKHR;
+  };
+
+  union DeviceOrHostAddressKHR
+  {
+    using NativeType = VkDeviceOrHostAddressKHR;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+      : deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ )
+      : hostAddress( hostAddress_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostAddress = hostAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkDeviceOrHostAddressKHR const &() const
+    {
+      return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>( this );
+    }
+
+    operator VkDeviceOrHostAddressKHR &()
+    {
+      return *reinterpret_cast<VkDeviceOrHostAddressKHR*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+    void * hostAddress;
+#else
+    VkDeviceAddress deviceAddress;
+    void * hostAddress;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct AccelerationStructureBuildGeometryInfoKHR
+  {
+    using NativeType = VkAccelerationStructureBuildGeometryInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ), pGeometries( geometries_.data() ), ppGeometries( pGeometries_.data() ), scratchData( scratchData_ )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1);
+#else
+      if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccelerationStructure = srcAccelerationStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccelerationStructure = dstAccelerationStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = geometryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGeometries = pGeometries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureBuildGeometryInfoKHR & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = static_cast<uint32_t>( geometries_.size() );
+      pGeometries = geometries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppGeometries = ppGeometries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureBuildGeometryInfoKHR & setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = static_cast<uint32_t>( pGeometries_.size() );
+      ppGeometries = pGeometries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scratchData = scratchData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
+    uint32_t geometryCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
+  {
+    using Type = AccelerationStructureBuildGeometryInfoKHR;
+  };
+
+  struct AccelerationStructureBuildRangeInfoKHR
+  {
+    using NativeType = VkAccelerationStructureBuildRangeInfoKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveCount = primitiveCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveOffset = primitiveOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformOffset = transformOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( primitiveCount == rhs.primitiveCount )
+          && ( primitiveOffset == rhs.primitiveOffset )
+          && ( firstVertex == rhs.firstVertex )
+          && ( transformOffset == rhs.transformOffset );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t primitiveCount = {};
+    uint32_t primitiveOffset = {};
+    uint32_t firstVertex = {};
+    uint32_t transformOffset = {};
+
+  };
+
+  struct AccelerationStructureBuildSizesInfoKHR
+  {
+    using NativeType = VkAccelerationStructureBuildSizesInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructureSize( accelerationStructureSize_ ), updateScratchSize( updateScratchSize_ ), buildScratchSize( buildScratchSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureSize = accelerationStructureSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      updateScratchSize = updateScratchSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buildScratchSize = buildScratchSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructureSize == rhs.accelerationStructureSize )
+          && ( updateScratchSize == rhs.updateScratchSize )
+          && ( buildScratchSize == rhs.buildScratchSize );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
+  {
+    using Type = AccelerationStructureBuildSizesInfoKHR;
+  };
+
+  struct AccelerationStructureCaptureDescriptorDataInfoEXT
+  {
+    using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructure( accelerationStructure_ ), accelerationStructureNV( accelerationStructureNV_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast<AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureNV = accelerationStructureNV_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default;
+#else
+    bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure )
+          && ( accelerationStructureNV == rhs.accelerationStructureNV );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT>
+  {
+    using Type = AccelerationStructureCaptureDescriptorDataInfoEXT;
+  };
+
+  struct AccelerationStructureCreateInfoKHR
+  {
+    using NativeType = VkAccelerationStructureCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), createFlags( createFlags_ ), buffer( buffer_ ), offset( offset_ ), size( size_ ), type( type_ ), deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      createFlags = createFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( createFlags == rhs.createFlags )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( type == rhs.type )
+          && ( deviceAddress == rhs.deviceAddress );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
+  {
+    using Type = AccelerationStructureCreateInfoKHR;
+  };
+
+  struct GeometryTrianglesNV
+  {
+    using NativeType = VkGeometryTrianglesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
+    {}
+
+
+    GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexData = vertexData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexStride = vertexStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexFormat = vertexFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexData = indexData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexOffset = indexOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformData = transformData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformOffset = transformOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
+    }
+
+    operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexData, vertexOffset, vertexCount, vertexStride, vertexFormat, indexData, indexOffset, indexCount, indexType, transformData, transformOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeometryTrianglesNV const & ) const = default;
+#else
+    bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexData == rhs.vertexData )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( vertexCount == rhs.vertexCount )
+          && ( vertexStride == rhs.vertexStride )
+          && ( vertexFormat == rhs.vertexFormat )
+          && ( indexData == rhs.indexData )
+          && ( indexOffset == rhs.indexOffset )
+          && ( indexCount == rhs.indexCount )
+          && ( indexType == rhs.indexType )
+          && ( transformData == rhs.transformData )
+          && ( transformOffset == rhs.transformOffset );
+#endif
+    }
+
+    bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
+    uint32_t vertexCount = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+    VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Buffer indexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
+    uint32_t indexCount = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    VULKAN_HPP_NAMESPACE::Buffer transformData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
+  {
+    using Type = GeometryTrianglesNV;
+  };
+
+  struct GeometryAABBNV
+  {
+    using NativeType = VkGeometryAABBNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
+    {}
+
+
+    GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbData = aabbData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      numAABBs = numAABBs_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryAABBNV*>( this );
+    }
+
+    operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryAABBNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, aabbData, numAABBs, stride, offset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeometryAABBNV const & ) const = default;
+#else
+    bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( aabbData == rhs.aabbData )
+          && ( numAABBs == rhs.numAABBs )
+          && ( stride == rhs.stride )
+          && ( offset == rhs.offset );
+#endif
+    }
+
+    bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
+    uint32_t numAABBs = {};
+    uint32_t stride = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryAabbNV>
+  {
+    using Type = GeometryAABBNV;
+  };
+
+  struct GeometryDataNV
+  {
+    using NativeType = VkGeometryDataNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
+    : triangles( triangles_ ), aabbs( aabbs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
+    {}
+
+
+    GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangles = triangles_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbs = aabbs_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryDataNV*>( this );
+    }
+
+    operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryDataNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const &, VULKAN_HPP_NAMESPACE::GeometryAABBNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( triangles, aabbs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeometryDataNV const & ) const = default;
+#else
+    bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( triangles == rhs.triangles )
+          && ( aabbs == rhs.aabbs );
+#endif
+    }
+
+    bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
+    VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
+
+  };
+
+  struct GeometryNV
+  {
+    using NativeType = VkGeometryNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
+    {}
+
+
+    GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometry = geometry_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryNV*>( this );
+    }
+
+    operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::GeometryDataNV const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, geometryType, geometry, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeometryNV const & ) const = default;
+#else
+    bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( geometryType == rhs.geometryType )
+          && ( geometry == rhs.geometry )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+    VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
+    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryNV>
+  {
+    using Type = GeometryNV;
+  };
+
+  struct AccelerationStructureInfoNV
+  {
+    using NativeType = VkAccelerationStructureInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = geometryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGeometries = pGeometries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = static_cast<uint32_t>( geometries_.size() );
+      pGeometries = geometries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GeometryNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( instanceCount == rhs.instanceCount )
+          && ( geometryCount == rhs.geometryCount )
+          && ( pGeometries == rhs.pGeometries );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
+    uint32_t instanceCount = {};
+    uint32_t geometryCount = {};
+    const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
+  {
+    using Type = AccelerationStructureInfoNV;
+  };
+
+  struct AccelerationStructureCreateInfoNV
+  {
+    using NativeType = VkAccelerationStructureCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), compactedSize( compactedSize_ ), info( info_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compactedSize = compactedSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
+    {
+      info = info_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, compactedSize, info );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compactedSize == rhs.compactedSize )
+          && ( info == rhs.info );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
+  {
+    using Type = AccelerationStructureCreateInfoNV;
+  };
+
+  struct AccelerationStructureDeviceAddressInfoKHR
+  {
+    using NativeType = VkAccelerationStructureDeviceAddressInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructure );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
+  {
+    using Type = AccelerationStructureDeviceAddressInfoKHR;
+  };
+
+  struct AccelerationStructureGeometryMotionTrianglesDataNV
+  {
+    using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexData( vertexData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureGeometryMotionTrianglesDataNV & operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexData = vertexData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexData );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV>
+  {
+    using Type = AccelerationStructureGeometryMotionTrianglesDataNV;
+  };
+
+  struct TransformMatrixKHR
+  {
+    using NativeType = VkTransformMatrixKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const & matrix_ = {}) VULKAN_HPP_NOEXCEPT
+    : matrix( matrix_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
+    {}
+
+
+    TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
+    {
+      matrix = matrix_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
+    }
+
+    operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTransformMatrixKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( matrix );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TransformMatrixKHR const & ) const = default;
+#else
+    bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( matrix == rhs.matrix );
+#endif
+    }
+
+    bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
+
+  };
+  using TransformMatrixNV = TransformMatrixKHR;
+
+  struct AccelerationStructureInstanceKHR
+  {
+    using NativeType = VkAccelerationStructureInstanceKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
+    : transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCustomIndex = instanceCustomIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mask = mask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureReference = accelerationStructureReference_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
+    }
+
+    operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( transform == rhs.transform )
+          && ( instanceCustomIndex == rhs.instanceCustomIndex )
+          && ( mask == rhs.mask )
+          && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
+          && ( flags == rhs.flags )
+          && ( accelerationStructureReference == rhs.accelerationStructureReference );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
+    uint32_t instanceCustomIndex : 24;
+    uint32_t mask : 8;
+    uint32_t instanceShaderBindingTableRecordOffset : 24;
+    VkGeometryInstanceFlagsKHR flags : 8;
+    uint64_t accelerationStructureReference = {};
+
+  };
+  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
+
+  struct AccelerationStructureMatrixMotionInstanceNV
+  {
+    using NativeType = VkAccelerationStructureMatrixMotionInstanceNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
+    : transformT0( transformT0_ ), transformT1( transformT1_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformT0 = transformT0_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformT1 = transformT1_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCustomIndex = instanceCustomIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mask = mask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
+      return *this;
+    }
+
+    AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureReference = accelerationStructureReference_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV*>( this );
+    }
+
+    operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( transformT0 == rhs.transformT0 )
+          && ( transformT1 == rhs.transformT1 )
+          && ( instanceCustomIndex == rhs.instanceCustomIndex )
+          && ( mask == rhs.mask )
+          && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
+          && ( flags == rhs.flags )
+          && ( accelerationStructureReference == rhs.accelerationStructureReference );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {};
+    VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {};
+    uint32_t instanceCustomIndex : 24;
+    uint32_t mask : 8;
+    uint32_t instanceShaderBindingTableRecordOffset : 24;
+    VkGeometryInstanceFlagsKHR flags : 8;
+    uint64_t accelerationStructureReference = {};
+
+  };
+
+  struct AccelerationStructureMemoryRequirementsInfoNV
+  {
+    using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, accelerationStructure );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( accelerationStructure == rhs.accelerationStructure );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
+  {
+    using Type = AccelerationStructureMemoryRequirementsInfoNV;
+  };
+
+  struct AccelerationStructureMotionInfoNV
+  {
+    using NativeType = VkAccelerationStructureMotionInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxInstances( maxInstances_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxInstances = maxInstances_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMotionInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxInstances, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInstances == rhs.maxInstances )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV;
+    const void * pNext = {};
+    uint32_t maxInstances = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureMotionInfoNV>
+  {
+    using Type = AccelerationStructureMotionInfoNV;
+  };
+
+  struct SRTDataNV
+  {
+    using NativeType = VkSRTDataNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SRTDataNV(float sx_ = {}, float a_ = {}, float b_ = {}, float pvx_ = {}, float sy_ = {}, float c_ = {}, float pvy_ = {}, float sz_ = {}, float pvz_ = {}, float qx_ = {}, float qy_ = {}, float qz_ = {}, float qw_ = {}, float tx_ = {}, float ty_ = {}, float tz_ = {}) VULKAN_HPP_NOEXCEPT
+    : sx( sx_ ), a( a_ ), b( b_ ), pvx( pvx_ ), sy( sy_ ), c( c_ ), pvy( pvy_ ), sz( sz_ ), pvz( pvz_ ), qx( qx_ ), qy( qy_ ), qz( qz_ ), qw( qw_ ), tx( tx_ ), ty( ty_ ), tz( tz_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) )
+    {}
+
+
+    SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SRTDataNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sx = sx_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT
+    {
+      a = a_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b = b_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pvx = pvx_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sy = sy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT
+    {
+      c = c_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pvy = pvy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sz = sz_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pvz = pvz_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qx = qx_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qy = qy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qz = qz_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qw = qw_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tx = tx_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ty = ty_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tz = tz_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSRTDataNV*>( this );
+    }
+
+    operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSRTDataNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SRTDataNV const & ) const = default;
+#else
+    bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sx == rhs.sx )
+          && ( a == rhs.a )
+          && ( b == rhs.b )
+          && ( pvx == rhs.pvx )
+          && ( sy == rhs.sy )
+          && ( c == rhs.c )
+          && ( pvy == rhs.pvy )
+          && ( sz == rhs.sz )
+          && ( pvz == rhs.pvz )
+          && ( qx == rhs.qx )
+          && ( qy == rhs.qy )
+          && ( qz == rhs.qz )
+          && ( qw == rhs.qw )
+          && ( tx == rhs.tx )
+          && ( ty == rhs.ty )
+          && ( tz == rhs.tz );
+#endif
+    }
+
+    bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float sx = {};
+    float a = {};
+    float b = {};
+    float pvx = {};
+    float sy = {};
+    float c = {};
+    float pvy = {};
+    float sz = {};
+    float pvz = {};
+    float qx = {};
+    float qy = {};
+    float qz = {};
+    float qw = {};
+    float tx = {};
+    float ty = {};
+    float tz = {};
+
+  };
+
+  struct AccelerationStructureSRTMotionInstanceNV
+  {
+    using NativeType = VkAccelerationStructureSRTMotionInstanceNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
+    : transformT0( transformT0_ ), transformT1( transformT1_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformT0 = transformT0_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformT1 = transformT1_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCustomIndex = instanceCustomIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mask = mask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
+      return *this;
+    }
+
+    AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureReference = accelerationStructureReference_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV*>( this );
+    }
+
+    operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::SRTDataNV const &, VULKAN_HPP_NAMESPACE::SRTDataNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default;
+#else
+    bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( transformT0 == rhs.transformT0 )
+          && ( transformT1 == rhs.transformT1 )
+          && ( instanceCustomIndex == rhs.instanceCustomIndex )
+          && ( mask == rhs.mask )
+          && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
+          && ( flags == rhs.flags )
+          && ( accelerationStructureReference == rhs.accelerationStructureReference );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {};
+    VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {};
+    uint32_t instanceCustomIndex : 24;
+    uint32_t mask : 8;
+    uint32_t instanceShaderBindingTableRecordOffset : 24;
+    VkGeometryInstanceFlagsKHR flags : 8;
+    uint64_t accelerationStructureReference = {};
+
+  };
+
+  union AccelerationStructureMotionInstanceDataNV
+  {
+    using NativeType = VkAccelerationStructureMotionInstanceDataNV;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} )
+      : staticInstance( staticInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
+      : matrixMotionInstance( matrixMotionInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
+      : srtMotionInstance( srtMotionInstance_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      staticInstance = staticInstance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      matrixMotionInstance = matrixMotionInstance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srtMotionInstance = srtMotionInstance_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkAccelerationStructureMotionInstanceDataNV const &() const
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMotionInstanceDataNV*>( this );
+    }
+
+    operator VkAccelerationStructureMotionInstanceDataNV &()
+    {
+      return *reinterpret_cast<VkAccelerationStructureMotionInstanceDataNV*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance;
+#else
+    VkAccelerationStructureInstanceKHR staticInstance;
+    VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
+    VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct AccelerationStructureMotionInstanceNV
+  {
+    using NativeType = VkAccelerationStructureMotionInstanceNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), flags( flags_ ), data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV*>( this );
+    }
+
+    operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( type, flags, data );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {};
+
+  };
+
+  struct MicromapUsageEXT
+  {
+    using NativeType = VkMicromapUsageEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MicromapUsageEXT(uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {}) VULKAN_HPP_NOEXCEPT
+    : count( count_ ), subdivisionLevel( subdivisionLevel_ ), format( format_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) )
+    {}
+
+
+    MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapUsageEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
+    {
+      count = count_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subdivisionLevel = subdivisionLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapUsageEXT*>( this );
+    }
+
+    operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapUsageEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( count, subdivisionLevel, format );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MicromapUsageEXT const & ) const = default;
+#else
+    bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( count == rhs.count )
+          && ( subdivisionLevel == rhs.subdivisionLevel )
+          && ( format == rhs.format );
+#endif
+    }
+
+    bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t count = {};
+    uint32_t subdivisionLevel = {};
+    uint32_t format = {};
+
+  };
+
+  struct AccelerationStructureTrianglesOpacityMicromapEXT
+  {
+    using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT(VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, uint32_t baseTriangle_ = {}, uint32_t usageCountsCount_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), indexType( indexType_ ), indexBuffer( indexBuffer_ ), indexStride( indexStride_ ), baseTriangle( baseTriangle_ ), usageCountsCount( usageCountsCount_ ), pUsageCounts( pUsageCounts_ ), ppUsageCounts( ppUsageCounts_ ), micromap( micromap_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, uint32_t baseTriangle_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr )
+    : pNext( pNext_ ), indexType( indexType_ ), indexBuffer( indexBuffer_ ), indexStride( indexStride_ ), baseTriangle( baseTriangle_ ), usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ), pUsageCounts( usageCounts_.data() ), ppUsageCounts( pUsageCounts_.data() ), micromap( micromap_ )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1);
+#else
+      if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexBuffer = indexBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexStride = indexStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseTriangle = baseTriangle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = usageCountsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUsageCounts = pUsageCounts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+      pUsageCounts = usageCounts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppUsageCounts = ppUsageCounts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+      ppUsageCounts = pUsageCounts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      micromap = micromap_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
+    }
+
+    operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
+    uint32_t baseTriangle = {};
+    uint32_t usageCountsCount = {};
+    const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+    const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT>
+  {
+    using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
+  };
+
+  struct AccelerationStructureVersionInfoKHR
+  {
+    using NativeType = VkAccelerationStructureVersionInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pVersionData( pVersionData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVersionData = pVersionData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pVersionData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
+#else
+    bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pVersionData == rhs.pVersionData );
+#endif
+    }
+
+    bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR;
+    const void * pNext = {};
+    const uint8_t * pVersionData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
+  {
+    using Type = AccelerationStructureVersionInfoKHR;
+  };
+
+  struct AcquireNextImageInfoKHR
+  {
+    using NativeType = VkAcquireNextImageInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
+    }
+
+    operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint64_t const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::Fence const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
+#else
+    bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( timeout == rhs.timeout )
+          && ( semaphore == rhs.semaphore )
+          && ( fence == rhs.fence )
+          && ( deviceMask == rhs.deviceMask );
+#endif
+    }
+
+    bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+    uint64_t timeout = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    uint32_t deviceMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
+  {
+    using Type = AcquireNextImageInfoKHR;
+  };
+
+  struct AcquireProfilingLockInfoKHR
+  {
+    using NativeType = VkAcquireProfilingLockInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), timeout( timeout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+    operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, timeout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
+#else
+    bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( timeout == rhs.timeout );
+#endif
+    }
+
+    bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
+    uint64_t timeout = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
+  {
+    using Type = AcquireProfilingLockInfoKHR;
+  };
+
+  struct AllocationCallbacks
+  {
+    using NativeType = VkAllocationCallbacks;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AllocationCallbacks(void * pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
+    : pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
+    {}
+
+
+    AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnAllocation = pfnAllocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnReallocation = pfnReallocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnFree = pfnFree_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalAllocation = pfnInternalAllocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalFree = pfnInternalFree_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAllocationCallbacks*>( this );
+    }
+
+    operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAllocationCallbacks*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<void * const &, PFN_vkAllocationFunction const &, PFN_vkReallocationFunction const &, PFN_vkFreeFunction const &, PFN_vkInternalAllocationNotification const &, PFN_vkInternalFreeNotification const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree );
+    }
+#endif
+
+
+
+
+
+    bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( pUserData == rhs.pUserData )
+          && ( pfnAllocation == rhs.pfnAllocation )
+          && ( pfnReallocation == rhs.pfnReallocation )
+          && ( pfnFree == rhs.pfnFree )
+          && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
+          && ( pfnInternalFree == rhs.pfnInternalFree );
+#endif
+    }
+
+    bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    void * pUserData = {};
+    PFN_vkAllocationFunction pfnAllocation = {};
+    PFN_vkReallocationFunction pfnReallocation = {};
+    PFN_vkFreeFunction pfnFree = {};
+    PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
+    PFN_vkInternalFreeNotification pfnInternalFree = {};
+
+  };
+
+  struct AmigoProfilingSubmitInfoSEC
+  {
+    using NativeType = VkAmigoProfilingSubmitInfoSEC;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC(uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), firstDrawTimestamp( firstDrawTimestamp_ ), swapBufferTimestamp( swapBufferTimestamp_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AmigoProfilingSubmitInfoSEC( *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs ) )
+    {}
+
+
+    AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstDrawTimestamp = firstDrawTimestamp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapBufferTimestamp = swapBufferTimestamp_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC*>( this );
+    }
+
+    operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAmigoProfilingSubmitInfoSEC*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default;
+#else
+    bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( firstDrawTimestamp == rhs.firstDrawTimestamp )
+          && ( swapBufferTimestamp == rhs.swapBufferTimestamp );
+#endif
+    }
+
+    bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC;
+    const void * pNext = {};
+    uint64_t firstDrawTimestamp = {};
+    uint64_t swapBufferTimestamp = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAmigoProfilingSubmitInfoSEC>
+  {
+    using Type = AmigoProfilingSubmitInfoSEC;
+  };
+
+  struct ComponentMapping
+  {
+    using NativeType = VkComponentMapping;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
+    : r( r_ ), g( g_ ), b( b_ ), a( a_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
+    {}
+
+
+    ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
+    {
+      r = r_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
+    {
+      g = g_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b = b_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
+    {
+      a = a_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComponentMapping*>( this );
+    }
+
+    operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComponentMapping*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( r, g, b, a );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ComponentMapping const & ) const = default;
+#else
+    bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( r == rhs.r )
+          && ( g == rhs.g )
+          && ( b == rhs.b )
+          && ( a == rhs.a );
+#endif
+    }
+
+    bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+
+  };
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct AndroidHardwareBufferFormatProperties2ANDROID
+  {
+    using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
+    {}
+
+
+    AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( externalFormat == rhs.externalFormat )
+          && ( formatFeatures == rhs.formatFeatures )
+          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+#endif
+    }
+
+    bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint64_t externalFormat = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
+    VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
+  {
+    using Type = AndroidHardwareBufferFormatProperties2ANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct AndroidHardwareBufferFormatPropertiesANDROID
+  {
+    using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
+    {}
+
+
+    AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( externalFormat == rhs.externalFormat )
+          && ( formatFeatures == rhs.formatFeatures )
+          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+#endif
+    }
+
+    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint64_t externalFormat = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
+    VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
+  {
+    using Type = AndroidHardwareBufferFormatPropertiesANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct AndroidHardwareBufferPropertiesANDROID
+  {
+    using NativeType = VkAndroidHardwareBufferPropertiesANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
+    {}
+
+
+    AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, allocationSize, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
+  {
+    using Type = AndroidHardwareBufferPropertiesANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct AndroidHardwareBufferUsageANDROID
+  {
+    using NativeType = VkAndroidHardwareBufferUsageANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), androidHardwareBufferUsage( androidHardwareBufferUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
+    {}
+
+
+    AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, androidHardwareBufferUsage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
+#endif
+    }
+
+    bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
+    void * pNext = {};
+    uint64_t androidHardwareBufferUsage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
+  {
+    using Type = AndroidHardwareBufferUsageANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct AndroidSurfaceCreateInfoKHR
+  {
+    using NativeType = VkAndroidSurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR const &, struct ANativeWindow * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, window );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+#endif
+    }
+
+    bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
+    struct ANativeWindow * window = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
+  {
+    using Type = AndroidSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ApplicationInfo
+  {
+    using NativeType = VkApplicationInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ApplicationInfo(const char * pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char * pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
+    {}
+
+
+    ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationName = pApplicationName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      applicationVersion = applicationVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEngineName = pEngineName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      engineVersion = engineVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      apiVersion = apiVersion_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkApplicationInfo*>( this );
+    }
+
+    operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkApplicationInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, uint32_t const &, const char * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+     if ( pApplicationName != rhs.pApplicationName )
+        if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) return cmp;
+     if ( pEngineName != rhs.pEngineName )
+        if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) return cmp;
+      if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) )
+          && ( applicationVersion == rhs.applicationVersion )
+          && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) )
+          && ( engineVersion == rhs.engineVersion )
+          && ( apiVersion == rhs.apiVersion );
+    }
+
+    bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
+    const void * pNext = {};
+    const char * pApplicationName = {};
+    uint32_t applicationVersion = {};
+    const char * pEngineName = {};
+    uint32_t engineVersion = {};
+    uint32_t apiVersion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eApplicationInfo>
+  {
+    using Type = ApplicationInfo;
+  };
+
+  struct AttachmentDescription
+  {
+    using NativeType = VkAttachmentDescription;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
+    {}
+
+
+    AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription*>( this );
+    }
+
+    operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentDescription const & ) const = default;
+#else
+    bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+#endif
+    }
+
+    bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  struct AttachmentDescription2
+  {
+    using NativeType = VkAttachmentDescription2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
+    {}
+
+
+    AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription2*>( this );
+    }
+
+    operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentDescription2 const & ) const = default;
+#else
+    bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+#endif
+    }
+
+    bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentDescription2>
+  {
+    using Type = AttachmentDescription2;
+  };
+  using AttachmentDescription2KHR = AttachmentDescription2;
+
+  struct AttachmentDescriptionStencilLayout
+  {
+    using NativeType = VkAttachmentDescriptionStencilLayout;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
+    {}
+
+
+    AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilInitialLayout = stencilInitialLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilFinalLayout = stencilFinalLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
+    }
+
+    operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
+#else
+    bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilInitialLayout == rhs.stencilInitialLayout )
+          && ( stencilFinalLayout == rhs.stencilFinalLayout );
+#endif
+    }
+
+    bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
+  {
+    using Type = AttachmentDescriptionStencilLayout;
+  };
+  using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
+
+  struct AttachmentReference
+  {
+    using NativeType = VkAttachmentReference;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : attachment( attachment_ ), layout( layout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
+    {}
+
+
+    AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference*>( this );
+    }
+
+    operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( attachment, layout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentReference const & ) const = default;
+#else
+    bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( attachment == rhs.attachment )
+          && ( layout == rhs.layout );
+#endif
+    }
+
+    bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t attachment = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  struct AttachmentReference2
+  {
+    using NativeType = VkAttachmentReference2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
+    {}
+
+
+    AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference2*>( this );
+    }
+
+    operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachment, layout, aspectMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentReference2 const & ) const = default;
+#else
+    bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachment == rhs.attachment )
+          && ( layout == rhs.layout )
+          && ( aspectMask == rhs.aspectMask );
+#endif
+    }
+
+    bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
+    const void * pNext = {};
+    uint32_t attachment = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentReference2>
+  {
+    using Type = AttachmentReference2;
+  };
+  using AttachmentReference2KHR = AttachmentReference2;
+
+  struct AttachmentReferenceStencilLayout
+  {
+    using NativeType = VkAttachmentReferenceStencilLayout;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stencilLayout( stencilLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
+    {}
+
+
+    AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLayout = stencilLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
+    }
+
+    operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stencilLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
+#else
+    bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilLayout == rhs.stencilLayout );
+#endif
+    }
+
+    bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
+  {
+    using Type = AttachmentReferenceStencilLayout;
+  };
+  using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
+
+  struct AttachmentSampleCountInfoAMD
+  {
+    using NativeType = VkAttachmentSampleCountInfoAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD(uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentSamples( pColorAttachmentSamples_ ), depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) ), pColorAttachmentSamples( colorAttachmentSamples_.data() ), depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachmentSamples = pColorAttachmentSamples_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    AttachmentSampleCountInfoAMD & setColorAttachmentSamples( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachmentSamples_.size() );
+      pColorAttachmentSamples = colorAttachmentSamples_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD*>( this );
+    }
+
+    operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentSampleCountInfoAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default;
+#else
+    bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachmentSamples == rhs.pColorAttachmentSamples )
+          && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
+#endif
+    }
+
+    bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD;
+    const void * pNext = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentSampleCountInfoAMD>
+  {
+    using Type = AttachmentSampleCountInfoAMD;
+  };
+  using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;
+
+  struct Extent2D
+  {
+    using NativeType = VkExtent2D;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) )
+    {}
+
+
+    Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent2D*>( this );
+    }
+
+    operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent2D*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( width, height );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Extent2D const & ) const = default;
+#else
+    bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( width == rhs.width )
+          && ( height == rhs.height );
+#endif
+    }
+
+    bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t width = {};
+    uint32_t height = {};
+
+  };
+
+  struct SampleLocationEXT
+  {
+    using NativeType = VkSampleLocationEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
+    {}
+
+
+    SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationEXT*>( this );
+    }
+
+    operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SampleLocationEXT const & ) const = default;
+#else
+    bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+#endif
+    }
+
+    bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float x = {};
+    float y = {};
+
+  };
+
+  struct SampleLocationsInfoEXT
+  {
+    using NativeType = VkSampleLocationsInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsPerPixel = sampleLocationsPerPixel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationGridSize = sampleLocationGridSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsCount = sampleLocationsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
+      pSampleLocations = sampleLocations_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
+    }
+
+    operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
+#else
+    bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
+          && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
+          && ( sampleLocationsCount == rhs.sampleLocationsCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+#endif
+    }
+
+    bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
+    uint32_t sampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
+  {
+    using Type = SampleLocationsInfoEXT;
+  };
+
+  struct AttachmentSampleLocationsEXT
+  {
+    using NativeType = VkAttachmentSampleLocationsEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
+    {}
+
+
+    AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentIndex = attachmentIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+    operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( attachmentIndex, sampleLocationsInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
+#else
+    bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( attachmentIndex == rhs.attachmentIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+#endif
+    }
+
+    bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t attachmentIndex = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+
+  struct BaseInStructure
+  {
+    using NativeType = VkBaseInStructure;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : sType( sType_ ), pNext( pNext_ )
+    {}
+
+    BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
+    {}
+
+
+    BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseInStructure*>( this );
+    }
+
+    operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseInStructure*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BaseInStructure const & ) const = default;
+#else
+    bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+#endif
+    }
+
+    bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
+    const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
+
+  };
+
+  struct BaseOutStructure
+  {
+    using NativeType = VkBaseOutStructure;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : sType( sType_ ), pNext( pNext_ )
+    {}
+
+    BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
+    {}
+
+
+    BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseOutStructure*>( this );
+    }
+
+    operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseOutStructure*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BaseOutStructure const & ) const = default;
+#else
+    bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+#endif
+    }
+
+    bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
+    struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
+
+  };
+
+  struct BindAccelerationStructureMemoryInfoNV
+  {
+    using NativeType = VkBindAccelerationStructureMemoryInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindAccelerationStructureMemoryInfoNV & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
+    }
+
+    operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
+#else
+    bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
+#endif
+    }
+
+    bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t * pDeviceIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
+  {
+    using Type = BindAccelerationStructureMemoryInfoNV;
+  };
+
+  struct BindBufferMemoryDeviceGroupInfo
+  {
+    using NativeType = VkBindBufferMemoryDeviceGroupInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
+    }
+
+    operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
+#else
+    bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
+#endif
+    }
+
+    bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+    const void * pNext = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t * pDeviceIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
+  {
+    using Type = BindBufferMemoryDeviceGroupInfo;
+  };
+  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+
+  struct BindBufferMemoryInfo
+  {
+    using NativeType = VkBindBufferMemoryInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
+    {}
+
+
+    BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
+    }
+
+    operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer, memory, memoryOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindBufferMemoryInfo const & ) const = default;
+#else
+    bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+#endif
+    }
+
+    bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
+  {
+    using Type = BindBufferMemoryInfo;
+  };
+  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+
+  struct Offset2D
+  {
+    using NativeType = VkOffset2D;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) )
+    {}
+
+
+    Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset2D*>( this );
+    }
+
+    operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset2D*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<int32_t const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Offset2D const & ) const = default;
+#else
+    bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+#endif
+    }
+
+    bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    int32_t x = {};
+    int32_t y = {};
+
+  };
+
+  struct Rect2D
+  {
+    using NativeType = VkRect2D;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) )
+    {}
+
+
+    Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRect2D*>( this );
+    }
+
+    operator VkRect2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRect2D*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( offset, extent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Rect2D const & ) const = default;
+#else
+    bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent );
+#endif
+    }
+
+    bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+
+  };
+
+  struct BindImageMemoryDeviceGroupInfo
+  {
+    using NativeType = VkBindImageMemoryDeviceGroupInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
+      pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+    operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
+#else
+    bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices )
+          && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
+          && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
+#endif
+    }
+
+    bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
+    const void * pNext = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t * pDeviceIndices = {};
+    uint32_t splitInstanceBindRegionCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
+  {
+    using Type = BindImageMemoryDeviceGroupInfo;
+  };
+  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+
+  struct BindImageMemoryInfo
+  {
+    using NativeType = VkBindImageMemoryInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
+    {}
+
+
+    BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
+    }
+
+    operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image, memory, memoryOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindImageMemoryInfo const & ) const = default;
+#else
+    bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+#endif
+    }
+
+    bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
+  {
+    using Type = BindImageMemoryInfo;
+  };
+  using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+
+  struct BindImageMemorySwapchainInfoKHR
+  {
+    using NativeType = VkBindImageMemorySwapchainInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchain( swapchain_ ), imageIndex( imageIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
+    {}
+
+
+    BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageIndex = imageIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+    operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchain, imageIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
+#else
+    bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( imageIndex == rhs.imageIndex );
+#endif
+    }
+
+    bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+    uint32_t imageIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
+  {
+    using Type = BindImageMemorySwapchainInfoKHR;
+  };
+
+  struct BindImagePlaneMemoryInfo
+  {
+    using NativeType = VkBindImagePlaneMemoryInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
+    {}
+
+
+    BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+    operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, planeAspect );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
+#else
+    bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+#endif
+    }
+
+    bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
+  {
+    using Type = BindImagePlaneMemoryInfo;
+  };
+  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+
+  struct BindIndexBufferIndirectCommandNV
+  {
+    using NativeType = VkBindIndexBufferIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
+    : bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferAddress = bufferAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
+    }
+
+    operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( bufferAddress, size, indexType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( bufferAddress == rhs.bufferAddress )
+          && ( size == rhs.size )
+          && ( indexType == rhs.indexType );
+#endif
+    }
+
+    bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+    uint32_t size = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+
+  };
+
+  struct BindShaderGroupIndirectCommandNV
+  {
+    using NativeType = VkBindShaderGroupIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : groupIndex( groupIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupIndex = groupIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
+    }
+
+    operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( groupIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( groupIndex == rhs.groupIndex );
+#endif
+    }
+
+    bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t groupIndex = {};
+
+  };
+
+  struct SparseMemoryBind
+  {
+    using NativeType = VkSparseMemoryBind;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
+    {}
+
+
+    SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceOffset = resourceOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseMemoryBind*>( this );
+    }
+
+    operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseMemoryBind*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( resourceOffset, size, memory, memoryOffset, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseMemoryBind const & ) const = default;
+#else
+    bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( resourceOffset == rhs.resourceOffset )
+          && ( size == rhs.size )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
+  };
+
+  struct SparseBufferMemoryBindInfo
+  {
+    using NativeType = VkSparseBufferMemoryBindInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+    : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( buffer, bindCount, pBinds );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
+#else
+    bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( buffer == rhs.buffer )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+#endif
+    }
+
+    bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
+
+  };
+
+  struct SparseImageOpaqueMemoryBindInfo
+  {
+    using NativeType = VkSparseImageOpaqueMemoryBindInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+    : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( image, bindCount, pBinds );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
+#else
+    bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+#endif
+    }
+
+    bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
+
+  };
+
+  struct ImageSubresource
+  {
+    using NativeType = VkImageSubresource;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
+    {}
+
+
+    ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayer = arrayLayer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresource*>( this );
+    }
+
+    operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresource*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( aspectMask, mipLevel, arrayLayer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSubresource const & ) const = default;
+#else
+    bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( arrayLayer == rhs.arrayLayer );
+#endif
+    }
+
+    bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t mipLevel = {};
+    uint32_t arrayLayer = {};
+
+  };
+
+  struct Offset3D
+  {
+    using NativeType = VkOffset3D;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) )
+    {}
+
+
+    explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} )
+      : x( offset2D.x )
+      , y( offset2D.y )
+    , z( z_ )
+    {}
+
+    Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset3D*>( this );
+    }
+
+    operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset3D*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<int32_t const &, int32_t const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y, z );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Offset3D const & ) const = default;
+#else
+    bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+#endif
+    }
+
+    bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    int32_t x = {};
+    int32_t y = {};
+    int32_t z = {};
+
+  };
+
+  struct Extent3D
+  {
+    using NativeType = VkExtent3D;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ ), depth( depth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) )
+    {}
+
+
+    explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} )
+      : width( extent2D.width )
+      , height( extent2D.height )
+    , depth( depth_ )
+    {}
+
+    Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent3D*>( this );
+    }
+
+    operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent3D*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( width, height, depth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Extent3D const & ) const = default;
+#else
+    bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+#endif
+    }
+
+    bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t depth = {};
+
+  };
+
+  struct SparseImageMemoryBind
+  {
+    using NativeType = VkSparseImageMemoryBind;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
+    {}
+
+
+    SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresource = subresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
+    }
+
+    operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresource const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( subresource, offset, extent, memory, memoryOffset, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageMemoryBind const & ) const = default;
+#else
+    bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( subresource == rhs.subresource )
+          && ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
+  };
+
+  struct SparseImageMemoryBindInfo
+  {
+    using NativeType = VkSparseImageMemoryBindInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
+    : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( image, bindCount, pBinds );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
+#else
+    bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+#endif
+    }
+
+    bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {};
+
+  };
+
+  struct BindSparseInfo
+  {
+    using NativeType = VkBindSparseInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferBindCount = bufferBindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferBinds = pBufferBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
+      pBufferBinds = bufferBinds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOpaqueBindCount = imageOpaqueBindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageOpaqueBinds = pImageOpaqueBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
+      pImageOpaqueBinds = imageOpaqueBinds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageBindCount = imageBindCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageBinds = pImageBinds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
+      pImageBinds = imageBinds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+      pSignalSemaphores = signalSemaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindSparseInfo*>( this );
+    }
+
+    operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindSparseInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, bufferBindCount, pBufferBinds, imageOpaqueBindCount, pImageOpaqueBinds, imageBindCount, pImageBinds, signalSemaphoreCount, pSignalSemaphores );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindSparseInfo const & ) const = default;
+#else
+    bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( bufferBindCount == rhs.bufferBindCount )
+          && ( pBufferBinds == rhs.pBufferBinds )
+          && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
+          && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
+          && ( imageBindCount == rhs.imageBindCount )
+          && ( pImageBinds == rhs.pImageBinds )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+#endif
+    }
+
+    bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
+    const void * pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
+    uint32_t bufferBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {};
+    uint32_t imageOpaqueBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {};
+    uint32_t imageBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {};
+    uint32_t signalSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindSparseInfo>
+  {
+    using Type = BindSparseInfo;
+  };
+
+  struct BindVertexBufferIndirectCommandNV
+  {
+    using NativeType = VkBindVertexBufferIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferAddress = bufferAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
+    }
+
+    operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( bufferAddress, size, stride );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( bufferAddress == rhs.bufferAddress )
+          && ( size == rhs.size )
+          && ( stride == rhs.stride );
+#endif
+    }
+
+    bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+    uint32_t size = {};
+    uint32_t stride = {};
+
+  };
+
+  struct BindVideoSessionMemoryInfoKHR
+  {
+    using NativeType = VkBindVideoSessionMemoryInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryBindIndex( memoryBindIndex_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), memorySize( memorySize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BindVideoSessionMemoryInfoKHR( *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs ) )
+    {}
+
+
+    BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryBindIndex = memoryBindIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memorySize = memorySize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR*>( this );
+    }
+
+    operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindVideoSessionMemoryInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default;
+#else
+    bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryBindIndex == rhs.memoryBindIndex )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( memorySize == rhs.memorySize );
+#endif
+    }
+
+    bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR;
+    const void * pNext = {};
+    uint32_t memoryBindIndex = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindVideoSessionMemoryInfoKHR>
+  {
+    using Type = BindVideoSessionMemoryInfoKHR;
+  };
+
+  struct ImageSubresourceLayers
+  {
+    using NativeType = VkImageSubresourceLayers;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
+    {}
+
+
+    ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
+    }
+
+    operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceLayers*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSubresourceLayers const & ) const = default;
+#else
+    bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+#endif
+    }
+
+    bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t mipLevel = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+
+  struct ImageBlit2
+  {
+    using NativeType = VkImageBlit2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ImageBlit2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) )
+    {}
+
+
+    ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffsets = srcOffsets_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffsets = dstOffsets_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageBlit2*>( this );
+    }
+
+    operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageBlit2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageBlit2 const & ) const = default;
+#else
+    bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffsets == rhs.srcOffsets )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffsets == rhs.dstOffsets );
+#endif
+    }
+
+    bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageBlit2>
+  {
+    using Type = ImageBlit2;
+  };
+  using ImageBlit2KHR = ImageBlit2;
+
+  struct BlitImageInfo2
+  {
+    using NativeType = VkBlitImageInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      filter = filter_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBlitImageInfo2*>( this );
+    }
+
+    operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBlitImageInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageBlit2 * const &, VULKAN_HPP_NAMESPACE::Filter const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BlitImageInfo2 const & ) const = default;
+#else
+    bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions )
+          && ( filter == rhs.filter );
+#endif
+    }
+
+    bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {};
+    VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBlitImageInfo2>
+  {
+    using Type = BlitImageInfo2;
+  };
+  using BlitImageInfo2KHR = BlitImageInfo2;
+
+  struct BufferCaptureDescriptorDataInfoEXT
+  {
+    using NativeType = VkBufferCaptureDescriptorDataInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast<BufferCaptureDescriptorDataInfoEXT const *>( &rhs ) )
+    {}
+
+
+    BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+    operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default;
+#else
+    bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCaptureDescriptorDataInfoEXT>
+  {
+    using Type = BufferCaptureDescriptorDataInfoEXT;
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferCollectionBufferCreateInfoFUCHSIA
+  {
+    using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), collection( collection_ ), index( index_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      collection = collection_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
+    {
+      index = index_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, collection, index );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( collection == rhs.collection )
+          && ( index == rhs.index );
+#endif
+    }
+
+    bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
+    uint32_t index = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
+  {
+    using Type = BufferCollectionBufferCreateInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferCollectionConstraintsInfoFUCHSIA
+  {
+    using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA(uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minBufferCount( minBufferCount_ ), maxBufferCount( maxBufferCount_ ), minBufferCountForCamping( minBufferCountForCamping_ ), minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ), minBufferCountForSharedSlack( minBufferCountForSharedSlack_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minBufferCount = minBufferCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxBufferCount = maxBufferCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minBufferCountForCamping = minBufferCountForCamping_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
+    }
+
+    operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minBufferCount == rhs.minBufferCount )
+          && ( maxBufferCount == rhs.maxBufferCount )
+          && ( minBufferCountForCamping == rhs.minBufferCountForCamping )
+          && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack )
+          && ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
+#endif
+    }
+
+    bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
+    const void * pNext = {};
+    uint32_t minBufferCount = {};
+    uint32_t maxBufferCount = {};
+    uint32_t minBufferCountForCamping = {};
+    uint32_t minBufferCountForDedicatedSlack = {};
+    uint32_t minBufferCountForSharedSlack = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
+  {
+    using Type = BufferCollectionConstraintsInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferCollectionCreateInfoFUCHSIA
+  {
+    using NativeType = VkBufferCollectionCreateInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA(zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), collectionToken( collectionToken_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
+    {
+      collectionToken = collectionToken_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, zx_handle_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, collectionToken );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
+    }
+
+    bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
+    const void * pNext = {};
+    zx_handle_t collectionToken = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
+  {
+    using Type = BufferCollectionCreateInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferCollectionImageCreateInfoFUCHSIA
+  {
+    using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), collection( collection_ ), index( index_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      collection = collection_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
+    {
+      index = index_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, collection, index );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( collection == rhs.collection )
+          && ( index == rhs.index );
+#endif
+    }
+
+    bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
+    uint32_t index = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
+  {
+    using Type = BufferCollectionImageCreateInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct SysmemColorSpaceFUCHSIA
+  {
+    using NativeType = VkSysmemColorSpaceFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA(uint32_t colorSpace_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), colorSpace( colorSpace_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorSpace = colorSpace_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA*>( this );
+    }
+
+    operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, colorSpace );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default;
+#else
+    bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( colorSpace == rhs.colorSpace );
+#endif
+    }
+
+    bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA;
+    const void * pNext = {};
+    uint32_t colorSpace = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
+  {
+    using Type = SysmemColorSpaceFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferCollectionPropertiesFUCHSIA
+  {
+    using NativeType = VkBufferCollectionPropertiesFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, uint32_t createInfoIndex_ = {}, uint64_t sysmemPixelFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryTypeBits( memoryTypeBits_ ), bufferCount( bufferCount_ ), createInfoIndex( createInfoIndex_ ), sysmemPixelFormat( sysmemPixelFormat_ ), formatFeatures( formatFeatures_ ), sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryTypeBits = memoryTypeBits_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferCount = bufferCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      createInfoIndex = createInfoIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sysmemPixelFormat = sysmemPixelFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatFeatures = formatFeatures_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sysmemColorSpaceIndex = sysmemColorSpaceIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      suggestedYcbcrModel = suggestedYcbcrModel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      suggestedYcbcrRange = suggestedYcbcrRange_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      suggestedXChromaOffset = suggestedXChromaOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      suggestedYChromaOffset = suggestedYChromaOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA*>( this );
+    }
+
+    operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryTypeBits, bufferCount, createInfoIndex, sysmemPixelFormat, formatFeatures, sysmemColorSpaceIndex, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits )
+          && ( bufferCount == rhs.bufferCount )
+          && ( createInfoIndex == rhs.createInfoIndex )
+          && ( sysmemPixelFormat == rhs.sysmemPixelFormat )
+          && ( formatFeatures == rhs.formatFeatures )
+          && ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex )
+          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+#endif
+    }
+
+    bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
+    void * pNext = {};
+    uint32_t memoryTypeBits = {};
+    uint32_t bufferCount = {};
+    uint32_t createInfoIndex = {};
+    uint64_t sysmemPixelFormat = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
+    VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {};
+    VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
+  {
+    using Type = BufferCollectionPropertiesFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct BufferCreateInfo
+  {
+    using NativeType = VkBufferCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCreateInfo*>( this );
+    }
+
+    operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCreateInfo const & ) const = default;
+#else
+    bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( size == rhs.size )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+#endif
+    }
+
+    bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t * pQueueFamilyIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCreateInfo>
+  {
+    using Type = BufferCreateInfo;
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct BufferConstraintsInfoFUCHSIA
+  {
+    using NativeType = VkBufferConstraintsInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), createInfo( createInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), bufferCollectionConstraints( bufferCollectionConstraints_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      createInfo = createInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      requiredFormatFeatures = requiredFormatFeatures_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferCollectionConstraints = bufferCollectionConstraints_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA*>( this );
+    }
+
+    operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( createInfo == rhs.createInfo )
+          && ( requiredFormatFeatures == rhs.requiredFormatFeatures )
+          && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
+#endif
+    }
+
+    bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
+  {
+    using Type = BufferConstraintsInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct BufferCopy
+  {
+    using NativeType = VkBufferCopy;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
+    {}
+
+
+    BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCopy*>( this );
+    }
+
+    operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCopy*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcOffset, dstOffset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCopy const & ) const = default;
+#else
+    bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  struct BufferCopy2
+  {
+    using NativeType = VkBufferCopy2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferCopy2(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) )
+    {}
+
+
+    BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCopy2*>( this );
+    }
+
+    operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCopy2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcOffset, dstOffset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferCopy2 const & ) const = default;
+#else
+    bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCopy2>
+  {
+    using Type = BufferCopy2;
+  };
+  using BufferCopy2KHR = BufferCopy2;
+
+  struct BufferDeviceAddressCreateInfoEXT
+  {
+    using NativeType = VkBufferDeviceAddressCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+    operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceAddress );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceAddress == rhs.deviceAddress );
+#endif
+    }
+
+    bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
+  {
+    using Type = BufferDeviceAddressCreateInfoEXT;
+  };
+
+  struct BufferDeviceAddressInfo
+  {
+    using NativeType = VkBufferDeviceAddressInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
+    {}
+
+
+    BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
+    }
+
+    operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
+#else
+    bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
+  {
+    using Type = BufferDeviceAddressInfo;
+  };
+  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
+  using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+
+  struct BufferImageCopy
+  {
+    using NativeType = VkBufferImageCopy;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
+    {}
+
+
+    BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferImageCopy*>( this );
+    }
+
+    operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferImageCopy*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferImageCopy const & ) const = default;
+#else
+    bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+#endif
+    }
+
+    bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+    uint32_t bufferRowLength = {};
+    uint32_t bufferImageHeight = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+  };
+
+  struct BufferImageCopy2
+  {
+    using NativeType = VkBufferImageCopy2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferImageCopy2(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) )
+    {}
+
+
+    BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferImageCopy2*>( this );
+    }
+
+    operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferImageCopy2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferImageCopy2 const & ) const = default;
+#else
+    bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+#endif
+    }
+
+    bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+    uint32_t bufferRowLength = {};
+    uint32_t bufferImageHeight = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferImageCopy2>
+  {
+    using Type = BufferImageCopy2;
+  };
+  using BufferImageCopy2KHR = BufferImageCopy2;
+
+  struct BufferMemoryBarrier
+  {
+    using NativeType = VkBufferMemoryBarrier;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
+    {}
+
+
+    BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
+    }
+
+    operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferMemoryBarrier const & ) const = default;
+#else
+    bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
+  {
+    using Type = BufferMemoryBarrier;
+  };
+
+  struct BufferMemoryBarrier2
+  {
+    using NativeType = VkBufferMemoryBarrier2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
+    {}
+
+
+    BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier2*>( this );
+    }
+
+    operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryBarrier2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
+#else
+    bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
+  {
+    using Type = BufferMemoryBarrier2;
+  };
+  using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
+
+  struct BufferMemoryRequirementsInfo2
+  {
+    using NativeType = VkBufferMemoryRequirementsInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
+    {}
+
+
+    BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
+#else
+    bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
+  {
+    using Type = BufferMemoryRequirementsInfo2;
+  };
+  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+
+  struct BufferOpaqueCaptureAddressCreateInfo
+  {
+    using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
+    {}
+
+
+    BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+    }
+
+    operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, opaqueCaptureAddress );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
+#else
+    bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+#endif
+    }
+
+    bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+    const void * pNext = {};
+    uint64_t opaqueCaptureAddress = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
+  {
+    using Type = BufferOpaqueCaptureAddressCreateInfo;
+  };
+  using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+
+  struct BufferViewCreateInfo
+  {
+    using NativeType = VkBufferViewCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
+    {}
+
+
+    BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
+    }
+
+    operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, buffer, format, offset, range );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( BufferViewCreateInfo const & ) const = default;
+#else
+    bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+#endif
+    }
+
+    bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
+  {
+    using Type = BufferViewCreateInfo;
+  };
+
+  struct CalibratedTimestampInfoEXT
+  {
+    using NativeType = VkCalibratedTimestampInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), timeDomain( timeDomain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
+    {}
+
+
+    CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeDomain = timeDomain_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+    operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TimeDomainEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, timeDomain );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default;
+#else
+    bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timeDomain == rhs.timeDomain );
+#endif
+    }
+
+    bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
+  {
+    using Type = CalibratedTimestampInfoEXT;
+  };
+
+  struct CheckpointData2NV
+  {
+    using NativeType = VkCheckpointData2NV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
+    {}
+
+
+    CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCheckpointData2NV*>( this );
+    }
+
+    operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCheckpointData2NV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stage, pCheckpointMarker );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CheckpointData2NV const & ) const = default;
+#else
+    bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stage == rhs.stage )
+          && ( pCheckpointMarker == rhs.pCheckpointMarker );
+#endif
+    }
+
+    bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {};
+    void * pCheckpointMarker = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCheckpointData2NV>
+  {
+    using Type = CheckpointData2NV;
+  };
+
+  struct CheckpointDataNV
+  {
+    using NativeType = VkCheckpointDataNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
+    {}
+
+
+    CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCheckpointDataNV*>( this );
+    }
+
+    operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCheckpointDataNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stage, pCheckpointMarker );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CheckpointDataNV const & ) const = default;
+#else
+    bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stage == rhs.stage )
+          && ( pCheckpointMarker == rhs.pCheckpointMarker );
+#endif
+    }
+
+    bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
+    void * pCheckpointMarker = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCheckpointDataNV>
+  {
+    using Type = CheckpointDataNV;
+  };
+
+  union ClearColorValue
+  {
+    using NativeType = VkClearColorValue;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float,4>& float32_ = {} )
+      : float32( float32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 )
+      : float32{ { { float32_0, float32_1, float32_2, float32_3 } } }
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t,4>& int32_ )
+      : int32( int32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 )
+      : int32{ { { int32_0, int32_1, int32_2, int32_3 } } }
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
+      : uint32( uint32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 )
+      : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } }
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float32 = float32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int32 = int32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint32 = uint32_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkClearColorValue const &() const
+    {
+      return *reinterpret_cast<const VkClearColorValue*>( this );
+    }
+
+    operator VkClearColorValue &()
+    {
+      return *reinterpret_cast<VkClearColorValue*>( this );
+    }
+
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
+
+  };
+
+  struct ClearDepthStencilValue
+  {
+    using NativeType = VkClearDepthStencilValue;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
+    : depth( depth_ ), stencil( stencil_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
+    {}
+
+
+    ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencil = stencil_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
+    }
+
+    operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearDepthStencilValue*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( depth, stencil );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ClearDepthStencilValue const & ) const = default;
+#else
+    bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( depth == rhs.depth )
+          && ( stencil == rhs.stencil );
+#endif
+    }
+
+    bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float depth = {};
+    uint32_t stencil = {};
+
+  };
+
+  union ClearValue
+  {
+    using NativeType = VkClearValue;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
+      : color( color_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
+      : depthStencil( depthStencil_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthStencil = depthStencil_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkClearValue const &() const
+    {
+      return *reinterpret_cast<const VkClearValue*>( this );
+    }
+
+    operator VkClearValue &()
+    {
+      return *reinterpret_cast<VkClearValue*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::ClearColorValue color;
+    VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
+#else
+    VkClearColorValue color;
+    VkClearDepthStencilValue depthStencil;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct ClearAttachment
+  {
+    using NativeType = VkClearAttachment;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
+    {}
+
+
+    ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachment = colorAttachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValue = clearValue_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearAttachment*>( this );
+    }
+
+    operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearAttachment*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( aspectMask, colorAttachment, clearValue );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t colorAttachment = {};
+    VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
+
+  };
+
+  struct ClearRect
+  {
+    using NativeType = VkClearRect;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
+    {}
+
+
+    ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rect = rect_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearRect*>( this );
+    }
+
+    operator VkClearRect &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearRect*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( rect, baseArrayLayer, layerCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ClearRect const & ) const = default;
+#else
+    bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( rect == rhs.rect )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+#endif
+    }
+
+    bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Rect2D rect = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+
+  struct CoarseSampleLocationNV
+  {
+    using NativeType = VkCoarseSampleLocationNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
+    : pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
+    {}
+
+
+    CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelX = pixelX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelY = pixelY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sample = sample_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
+    }
+
+    operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( pixelX, pixelY, sample );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CoarseSampleLocationNV const & ) const = default;
+#else
+    bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( pixelX == rhs.pixelX )
+          && ( pixelY == rhs.pixelY )
+          && ( sample == rhs.sample );
+#endif
+    }
+
+    bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t pixelX = {};
+    uint32_t pixelY = {};
+    uint32_t sample = {};
+
+  };
+
+  struct CoarseSampleOrderCustomNV
+  {
+    using NativeType = VkCoarseSampleOrderCustomNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
+    : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRate = shadingRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleCount = sampleCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationCount = sampleLocationCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
+      pSampleLocations = sampleLocations_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+    operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
+#else
+    bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( shadingRate == rhs.shadingRate )
+          && ( sampleCount == rhs.sampleCount )
+          && ( sampleLocationCount == rhs.sampleLocationCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+#endif
+    }
+
+    bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
+    uint32_t sampleCount = {};
+    uint32_t sampleLocationCount = {};
+    const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {};
+
+  };
+
+  struct ColorBlendAdvancedEXT
+  {
+    using NativeType = VkColorBlendAdvancedEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT(VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {}) VULKAN_HPP_NOEXCEPT
+    : advancedBlendOp( advancedBlendOp_ ), srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ ), clampResults( clampResults_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
+    {}
+
+
+    ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      advancedBlendOp = advancedBlendOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcPremultiplied = srcPremultiplied_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstPremultiplied = dstPremultiplied_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendOverlap = blendOverlap_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clampResults = clampResults_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkColorBlendAdvancedEXT*>( this );
+    }
+
+    operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkColorBlendAdvancedEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
+#else
+    bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( advancedBlendOp == rhs.advancedBlendOp )
+          && ( srcPremultiplied == rhs.srcPremultiplied )
+          && ( dstPremultiplied == rhs.dstPremultiplied )
+          && ( blendOverlap == rhs.blendOverlap )
+          && ( clampResults == rhs.clampResults );
+#endif
+    }
+
+    bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+    VULKAN_HPP_NAMESPACE::Bool32 clampResults = {};
+
+  };
+
+  struct ColorBlendEquationEXT
+  {
+    using NativeType = VkColorBlendEquationEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT(VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd) VULKAN_HPP_NOEXCEPT
+    : srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
+    {}
+
+
+    ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcColorBlendFactor = srcColorBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstColorBlendFactor = dstColorBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorBlendOp = colorBlendOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAlphaBlendFactor = srcAlphaBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAlphaBlendFactor = dstAlphaBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaBlendOp = alphaBlendOp_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkColorBlendEquationEXT*>( this );
+    }
+
+    operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkColorBlendEquationEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ColorBlendEquationEXT const & ) const = default;
+#else
+    bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+          && ( colorBlendOp == rhs.colorBlendOp )
+          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+          && ( alphaBlendOp == rhs.alphaBlendOp );
+#endif
+    }
+
+    bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+
+  };
+
+  struct CommandBufferAllocateInfo
+  {
+    using NativeType = VkCommandBufferAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
+    {}
+
+
+    CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandPool = commandPool_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+    {
+      level = level_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
+    }
+
+    operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPool const &, VULKAN_HPP_NAMESPACE::CommandBufferLevel const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, commandPool, level, commandBufferCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
+#else
+    bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( commandPool == rhs.commandPool )
+          && ( level == rhs.level )
+          && ( commandBufferCount == rhs.commandBufferCount );
+#endif
+    }
+
+    bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
+    VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
+    uint32_t commandBufferCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
+  {
+    using Type = CommandBufferAllocateInfo;
+  };
+
+  struct CommandBufferInheritanceInfo
+  {
+    using NativeType = VkCommandBufferInheritanceInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
+    {}
+
+
+    CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryEnable = occlusionQueryEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryFlags = queryFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
+    }
+
+    operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::QueryControlFlags const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
+          && ( queryFlags == rhs.queryFlags )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+#endif
+    }
+
+    bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t subpass = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
+    VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
+  {
+    using Type = CommandBufferInheritanceInfo;
+  };
+
+  struct CommandBufferBeginInfo
+  {
+    using NativeType = VkCommandBufferBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
+    {}
+
+
+    CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInheritanceInfo = pInheritanceInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags const &, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pInheritanceInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferBeginInfo const & ) const = default;
+#else
+    bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pInheritanceInfo == rhs.pInheritanceInfo );
+#endif
+    }
+
+    bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
+    const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
+  {
+    using Type = CommandBufferBeginInfo;
+  };
+
+  struct CommandBufferInheritanceConditionalRenderingInfoEXT
+  {
+    using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), conditionalRenderingEnable( conditionalRenderingEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
+    {}
+
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRenderingEnable = conditionalRenderingEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, conditionalRenderingEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
+#endif
+    }
+
+    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
+  {
+    using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
+  };
+
+  struct CommandBufferInheritanceRenderPassTransformInfoQCOM
+  {
+    using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), transform( transform_ ), renderArea( renderArea_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
+    {}
+
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+    }
+
+    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Rect2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, transform, renderArea );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transform == rhs.transform )
+          && ( renderArea == rhs.renderArea );
+#endif
+    }
+
+    bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
+  {
+    using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
+  };
+
+  struct CommandBufferInheritanceRenderingInfo
+  {
+    using NativeType = VkCommandBufferInheritanceRenderingInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentFormats( pColorAttachmentFormats_ ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ ), rasterizationSamples( rasterizationSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ ), rasterizationSamples( rasterizationSamples_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachmentFormats = pColorAttachmentFormats_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
+      pColorAttachmentFormats = colorAttachmentFormats_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthAttachmentFormat = depthAttachmentFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilAttachmentFormat = stencilAttachmentFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationSamples = rasterizationSamples_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo*>( this );
+    }
+
+    operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewMask == rhs.viewMask )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
+          && ( depthAttachmentFormat == rhs.depthAttachmentFormat )
+          && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat )
+          && ( rasterizationSamples == rhs.rasterizationSamples );
+#endif
+    }
+
+    bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
+    uint32_t viewMask = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
+    VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
+  {
+    using Type = CommandBufferInheritanceRenderingInfo;
+  };
+  using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
+
+  struct Viewport
+  {
+    using NativeType = VkViewport;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
+    {}
+
+
+    Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepth = minDepth_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepth = maxDepth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewport*>( this );
+    }
+
+    operator VkViewport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewport*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y, width, height, minDepth, maxDepth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Viewport const & ) const = default;
+#else
+    bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( minDepth == rhs.minDepth )
+          && ( maxDepth == rhs.maxDepth );
+#endif
+    }
+
+    bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float x = {};
+    float y = {};
+    float width = {};
+    float height = {};
+    float minDepth = {};
+    float maxDepth = {};
+
+  };
+
+  struct CommandBufferInheritanceViewportScissorInfoNV
+  {
+    using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, uint32_t viewportDepthCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), viewportScissor2D( viewportScissor2D_ ), viewportDepthCount( viewportDepthCount_ ), pViewportDepths( pViewportDepths_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
+    {}
+
+
+    CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportScissor2D = viewportScissor2D_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportDepthCount = viewportDepthCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportDepths = pViewportDepths_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
+    }
+
+    operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewportScissor2D == rhs.viewportScissor2D )
+          && ( viewportDepthCount == rhs.viewportDepthCount )
+          && ( pViewportDepths == rhs.pViewportDepths );
+#endif
+    }
+
+    bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {};
+    uint32_t viewportDepthCount = {};
+    const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
+  {
+    using Type = CommandBufferInheritanceViewportScissorInfoNV;
+  };
+
+  struct CommandBufferSubmitInfo
+  {
+    using NativeType = VkCommandBufferSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), commandBuffer( commandBuffer_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
+    {}
+
+
+    CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBuffer = commandBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferSubmitInfo*>( this );
+    }
+
+    operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, commandBuffer, deviceMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
+#else
+    bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( commandBuffer == rhs.commandBuffer )
+          && ( deviceMask == rhs.deviceMask );
+#endif
+    }
+
+    bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
+    uint32_t deviceMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
+  {
+    using Type = CommandBufferSubmitInfo;
+  };
+  using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
+
+  struct CommandPoolCreateInfo
+  {
+    using NativeType = VkCommandPoolCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
+    {}
+
+
+    CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
+    }
+
+    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, queueFamilyIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CommandPoolCreateInfo const & ) const = default;
+#else
+    bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex );
+#endif
+    }
+
+    bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+  {
+    using Type = CommandPoolCreateInfo;
+  };
+
+  struct SpecializationMapEntry
+  {
+    using NativeType = VkSpecializationMapEntry;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
+    : constantID( constantID_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
+    {}
+
+
+    SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      constantID = constantID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+    }
+
+    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, size_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( constantID, offset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SpecializationMapEntry const & ) const = default;
+#else
+    bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( constantID == rhs.constantID )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t constantID = {};
+    uint32_t offset = {};
+    size_t size = {};
+
+  };
+
+  struct SpecializationInfo
+  {
+    using NativeType = VkSpecializationInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, size_t dataSize_ = {}, const void * pData_ = {}) VULKAN_HPP_NOEXCEPT
+    : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+    : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = mapEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMapEntries = pMapEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
+      pMapEntries = mapEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = data_.size() * sizeof(T);
+      pData = data_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationInfo*>( this );
+    }
+
+    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SpecializationInfo const & ) const = default;
+#else
+    bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( mapEntryCount == rhs.mapEntryCount )
+          && ( pMapEntries == rhs.pMapEntries )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+#endif
+    }
+
+    bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t mapEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {};
+    size_t dataSize = {};
+    const void * pData = {};
+
+  };
+
+  struct PipelineShaderStageCreateInfo
+  {
+    using NativeType = VkPipelineShaderStageCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char * pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
+    {
+      module = module_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pName = pName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSpecializationInfo = pSpecializationInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags const &, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &, VULKAN_HPP_NAMESPACE::ShaderModule const &, const char * const &, const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) return cmp;
+      if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
+     if ( pName != rhs.pName )
+        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( module == rhs.module )
+          && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) )
+          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+    }
+
+    bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+    VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+    const char * pName = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
+  {
+    using Type = PipelineShaderStageCreateInfo;
+  };
+
+  struct ComputePipelineCreateInfo
+  {
+    using NativeType = VkComputePipelineCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+    }
+
+    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
+#else
+    bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+#endif
+    }
+
+    bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
+  {
+    using Type = ComputePipelineCreateInfo;
+  };
+
+  struct ConditionalRenderingBeginInfoEXT
+  {
+    using NativeType = VkConditionalRenderingBeginInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ ), offset( offset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer, offset, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
+#else
+    bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
+  {
+    using Type = ConditionalRenderingBeginInfoEXT;
+  };
+
+  struct ConformanceVersion
+  {
+    using NativeType = VkConformanceVersion;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
+    : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
+    {}
+
+
+    ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+    {
+      major = major_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minor = minor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subminor = subminor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patch = patch_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConformanceVersion*>( this );
+    }
+
+    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConformanceVersion*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( major, minor, subminor, patch );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ConformanceVersion const & ) const = default;
+#else
+    bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( major == rhs.major )
+          && ( minor == rhs.minor )
+          && ( subminor == rhs.subminor )
+          && ( patch == rhs.patch );
+#endif
+    }
+
+    bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint8_t major = {};
+    uint8_t minor = {};
+    uint8_t subminor = {};
+    uint8_t patch = {};
+
+  };
+  using ConformanceVersionKHR = ConformanceVersion;
+
+  struct CooperativeMatrixPropertiesNV
+  {
+    using NativeType = VkCooperativeMatrixPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      MSize = MSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      NSize = NSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      KSize = KSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      AType = AType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      BType = BType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      CType = CType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      DType = DType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scope = scope_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ScopeNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
+#else
+    bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( MSize == rhs.MSize )
+          && ( NSize == rhs.NSize )
+          && ( KSize == rhs.KSize )
+          && ( AType == rhs.AType )
+          && ( BType == rhs.BType )
+          && ( CType == rhs.CType )
+          && ( DType == rhs.DType )
+          && ( scope == rhs.scope );
+#endif
+    }
+
+    bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+    void * pNext = {};
+    uint32_t MSize = {};
+    uint32_t NSize = {};
+    uint32_t KSize = {};
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
+  {
+    using Type = CooperativeMatrixPropertiesNV;
+  };
+
+  struct CopyAccelerationStructureInfoKHR
+  {
+    using NativeType = VkCopyAccelerationStructureInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
+    {}
+
+
+    CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
+    }
+
+    operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
+#else
+    bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( src == rhs.src )
+          && ( dst == rhs.dst )
+          && ( mode == rhs.mode );
+#endif
+    }
+
+    bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
+  {
+    using Type = CopyAccelerationStructureInfoKHR;
+  };
+
+  struct CopyAccelerationStructureToMemoryInfoKHR
+  {
+    using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
+    {}
+
+
+    CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+    }
+
+    operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
+  {
+    using Type = CopyAccelerationStructureToMemoryInfoKHR;
+  };
+
+  struct CopyBufferInfo2
+  {
+    using NativeType = VkCopyBufferInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyBufferInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBuffer = srcBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBuffer = dstBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyBufferInfo2*>( this );
+    }
+
+    operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyBufferInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferCopy2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyBufferInfo2 const & ) const = default;
+#else
+    bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcBuffer == rhs.srcBuffer )
+          && ( dstBuffer == rhs.dstBuffer )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+    VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyBufferInfo2>
+  {
+    using Type = CopyBufferInfo2;
+  };
+  using CopyBufferInfo2KHR = CopyBufferInfo2;
+
+  struct CopyBufferToImageInfo2
+  {
+    using NativeType = VkCopyBufferToImageInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBuffer = srcBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyBufferToImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyBufferToImageInfo2*>( this );
+    }
+
+    operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyBufferToImageInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
+#else
+    bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcBuffer == rhs.srcBuffer )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
+  {
+    using Type = CopyBufferToImageInfo2;
+  };
+  using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
+
+  struct CopyCommandTransformInfoQCOM
+  {
+    using NativeType = VkCopyCommandTransformInfoQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), transform( transform_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
+    {}
+
+
+    CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
+    }
+
+    operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, transform );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
+#else
+    bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transform == rhs.transform );
+#endif
+    }
+
+    bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
+  {
+    using Type = CopyCommandTransformInfoQCOM;
+  };
+
+  struct CopyDescriptorSet
+  {
+    using NativeType = VkCopyDescriptorSet;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
+    {}
+
+
+    CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSet = srcSet_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBinding = srcBinding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcArrayElement = srcArrayElement_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+    }
+
+    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyDescriptorSet*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyDescriptorSet const & ) const = default;
+#else
+    bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSet == rhs.srcSet )
+          && ( srcBinding == rhs.srcBinding )
+          && ( srcArrayElement == rhs.srcArrayElement )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount );
+#endif
+    }
+
+    bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
+    uint32_t srcBinding = {};
+    uint32_t srcArrayElement = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
+  {
+    using Type = CopyDescriptorSet;
+  };
+
+  struct ImageCopy2
+  {
+    using NativeType = VkImageCopy2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCopy2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) )
+    {}
+
+
+    ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCopy2*>( this );
+    }
+
+    operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCopy2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCopy2 const & ) const = default;
+#else
+    bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+#endif
+    }
+
+    bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCopy2>
+  {
+    using Type = ImageCopy2;
+  };
+  using ImageCopy2KHR = ImageCopy2;
+
+  struct CopyImageInfo2
+  {
+    using NativeType = VkCopyImageInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyImageInfo2*>( this );
+    }
+
+    operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyImageInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyImageInfo2 const & ) const = default;
+#else
+    bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyImageInfo2>
+  {
+    using Type = CopyImageInfo2;
+  };
+  using CopyImageInfo2KHR = CopyImageInfo2;
+
+  struct CopyImageToBufferInfo2
+  {
+    using NativeType = VkCopyImageToBufferInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBuffer = dstBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CopyImageToBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyImageToBufferInfo2*>( this );
+    }
+
+    operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyImageToBufferInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
+#else
+    bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstBuffer == rhs.dstBuffer )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
+  {
+    using Type = CopyImageToBufferInfo2;
+  };
+  using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
+
+  struct CopyMemoryIndirectCommandNV
+  {
+    using NativeType = VkCopyMemoryIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAddress( srcAddress_ ), dstAddress( dstAddress_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMemoryIndirectCommandNV( *reinterpret_cast<CopyMemoryIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAddress = srcAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAddress = dstAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMemoryIndirectCommandNV*>( this );
+    }
+
+    operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMemoryIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcAddress, dstAddress, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcAddress == rhs.srcAddress )
+          && ( dstAddress == rhs.dstAddress )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  struct CopyMemoryToAccelerationStructureInfoKHR
+  {
+    using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
+    {}
+
+
+    CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+    }
+
+    operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
+  {
+    using Type = CopyMemoryToAccelerationStructureInfoKHR;
+  };
+
+  struct CopyMemoryToImageIndirectCommandNV
+  {
+    using NativeType = VkCopyMemoryToImageIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAddress( srcAddress_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast<CopyMemoryToImageIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAddress = srcAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMemoryToImageIndirectCommandNV*>( this );
+    }
+
+    operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMemoryToImageIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcAddress == rhs.srcAddress )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+#endif
+    }
+
+    bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
+    uint32_t bufferRowLength = {};
+    uint32_t bufferImageHeight = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+  };
+
+  struct CopyMemoryToMicromapInfoEXT
+  {
+    using NativeType = VkCopyMemoryToMicromapInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
+    {}
+
+
+    CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT*>( this );
+    }
+
+    operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+    VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyMemoryToMicromapInfoEXT>
+  {
+    using Type = CopyMemoryToMicromapInfoEXT;
+  };
+
+  struct CopyMicromapInfoEXT
+  {
+    using NativeType = VkCopyMicromapInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT(VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
+    {}
+
+
+    CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMicromapInfoEXT*>( this );
+    }
+
+    operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMicromapInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
+#else
+    bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( src == rhs.src )
+          && ( dst == rhs.dst )
+          && ( mode == rhs.mode );
+#endif
+    }
+
+    bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+    VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyMicromapInfoEXT>
+  {
+    using Type = CopyMicromapInfoEXT;
+  };
+
+  struct CopyMicromapToMemoryInfoEXT
+  {
+    using NativeType = VkCopyMicromapToMemoryInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT(VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
+    {}
+
+
+    CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT*>( this );
+    }
+
+    operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, src, dst, mode );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyMicromapToMemoryInfoEXT>
+  {
+    using Type = CopyMicromapToMemoryInfoEXT;
+  };
+
+  struct CuFunctionCreateInfoNVX
+  {
+    using NativeType = VkCuFunctionCreateInfoNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), module( module_ ), pName( pName_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
+    {}
+
+
+    CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
+    {
+      module = module_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pName = pName_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCuFunctionCreateInfoNVX*>( this );
+    }
+
+    operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCuFunctionCreateInfoNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuModuleNVX const &, const char * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, module, pName );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
+     if ( pName != rhs.pName )
+        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( module == rhs.module )
+          && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
+    }
+
+    bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CuModuleNVX module = {};
+    const char * pName = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
+  {
+    using Type = CuFunctionCreateInfoNVX;
+  };
+
+  struct CuLaunchInfoNVX
+  {
+    using NativeType = VkCuLaunchInfoNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( paramCount_ ), pParams( pParams_ ), extraCount( extraCount_ ), pExtras( pExtras_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( params_.size() ), pParams( params_.data() ), extraCount( extras_.size() ), pExtras( extras_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
+    {
+      function = function_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      gridDimX = gridDimX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      gridDimY = gridDimY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
+    {
+      gridDimZ = gridDimZ_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blockDimX = blockDimX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blockDimY = blockDimY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blockDimZ = blockDimZ_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharedMemBytes = sharedMemBytes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      paramCount = paramCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pParams = pParams_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
+    {
+      paramCount = params_.size();
+      pParams = params_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extraCount = extraCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pExtras = pExtras_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extraCount = extras_.size();
+      pExtras = extras_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCuLaunchInfoNVX*>( this );
+    }
+
+    operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCuLaunchInfoNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuFunctionNVX const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, const void * const * const &, size_t const &, const void * const * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CuLaunchInfoNVX const & ) const = default;
+#else
+    bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( function == rhs.function )
+          && ( gridDimX == rhs.gridDimX )
+          && ( gridDimY == rhs.gridDimY )
+          && ( gridDimZ == rhs.gridDimZ )
+          && ( blockDimX == rhs.blockDimX )
+          && ( blockDimY == rhs.blockDimY )
+          && ( blockDimZ == rhs.blockDimZ )
+          && ( sharedMemBytes == rhs.sharedMemBytes )
+          && ( paramCount == rhs.paramCount )
+          && ( pParams == rhs.pParams )
+          && ( extraCount == rhs.extraCount )
+          && ( pExtras == rhs.pExtras );
+#endif
+    }
+
+    bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {};
+    uint32_t gridDimX = {};
+    uint32_t gridDimY = {};
+    uint32_t gridDimZ = {};
+    uint32_t blockDimX = {};
+    uint32_t blockDimY = {};
+    uint32_t blockDimZ = {};
+    uint32_t sharedMemBytes = {};
+    size_t paramCount = {};
+    const void * const * pParams = {};
+    size_t extraCount = {};
+    const void * const * pExtras = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
+  {
+    using Type = CuLaunchInfoNVX;
+  };
+
+  struct CuModuleCreateInfoNVX
+  {
+    using NativeType = VkCuModuleCreateInfoNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX(size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = data_.size() * sizeof(T);
+      pData = data_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCuModuleCreateInfoNVX*>( this );
+    }
+
+    operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCuModuleCreateInfoNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dataSize, pData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
+#else
+    bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+#endif
+    }
+
+    bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX;
+    const void * pNext = {};
+    size_t dataSize = {};
+    const void * pData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
+  {
+    using Type = CuModuleCreateInfoNVX;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct D3D12FenceSubmitInfoKHR
+  {
+    using NativeType = VkD3D12FenceSubmitInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+      pWaitSemaphoreValues = waitSemaphoreValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+      pSignalSemaphoreValues = signalSemaphoreValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
+#else
+    bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+#endif
+    }
+
+    bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+    const void * pNext = {};
+    uint32_t waitSemaphoreValuesCount = {};
+    const uint64_t * pWaitSemaphoreValues = {};
+    uint32_t signalSemaphoreValuesCount = {};
+    const uint64_t * pSignalSemaphoreValues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
+  {
+    using Type = D3D12FenceSubmitInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct DebugMarkerMarkerInfoEXT
+  {
+    using NativeType = VkDebugMarkerMarkerInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char * pMarkerName_ = {}, std::array<float,4> const & color_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pMarkerName( pMarkerName_ ), color( color_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMarkerName = pMarkerName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pMarkerName, color );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+     if ( pMarkerName != rhs.pMarkerName )
+        if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
+      if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
+
+      return std::partial_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) )
+          && ( color == rhs.color );
+    }
+
+    bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+    const void * pNext = {};
+    const char * pMarkerName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
+  {
+    using Type = DebugMarkerMarkerInfoEXT;
+  };
+
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    using NativeType = VkDebugMarkerObjectNameInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, const char * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, objectType, object, pObjectName );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
+      if ( auto cmp = object <=> rhs.object; cmp != 0 ) return cmp;
+     if ( pObjectName != rhs.pObjectName )
+        if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
+    }
+
+    bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    const char * pObjectName = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
+  {
+    using Type = DebugMarkerObjectNameInfoEXT;
+  };
+
+  struct DebugMarkerObjectTagInfoEXT
+  {
+    using NativeType = VkDebugMarkerObjectTagInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
+#else
+    bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+#endif
+    }
+
+    bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void * pTag = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
+  {
+    using Type = DebugMarkerObjectTagInfoEXT;
+  };
+
+  struct DebugReportCallbackCreateInfoEXT
+  {
+    using NativeType = VkDebugReportCallbackCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnCallback = pfnCallback_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT const &, PFN_vkDebugReportCallbackEXT const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pfnCallback, pUserData );
+    }
+#endif
+
+
+
+
+
+    bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
+#endif
+    }
+
+    bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
+    PFN_vkDebugReportCallbackEXT pfnCallback = {};
+    void * pUserData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
+  {
+    using Type = DebugReportCallbackCreateInfoEXT;
+  };
+
+  struct DebugUtilsLabelEXT
+  {
+    using NativeType = VkDebugUtilsLabelEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char * pLabelName_ = {}, std::array<float,4> const & color_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pLabelName( pLabelName_ ), color( color_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
+    {}
+
+
+    DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLabelName = pLabelName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
+    }
+
+    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pLabelName, color );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+     if ( pLabelName != rhs.pLabelName )
+        if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
+      if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
+
+      return std::partial_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) )
+          && ( color == rhs.color );
+    }
+
+    bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+    const void * pNext = {};
+    const char * pLabelName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
+  {
+    using Type = DebugUtilsLabelEXT;
+  };
+
+  struct DebugUtilsObjectNameInfoEXT
+  {
+    using NativeType = VkDebugUtilsObjectNameInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
+      if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) return cmp;
+     if ( pObjectName != rhs.pObjectName )
+        if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
+    }
+
+    bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    const char * pObjectName = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
+  {
+    using Type = DebugUtilsObjectNameInfoEXT;
+  };
+
+  struct DebugUtilsMessengerCallbackDataEXT
+  {
+    using NativeType = VkDebugUtilsMessengerCallbackDataEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char * pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char * pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char * pMessageIdName_, int32_t messageIdNumber_, const char * pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessageIdName = pMessageIdName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageIdNumber = messageIdNumber_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessage = pMessage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueLabelCount = queueLabelCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueLabels = pQueueLabels_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
+      pQueueLabels = queueLabels_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cmdBufLabelCount = cmdBufLabelCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCmdBufLabels = pCmdBufLabels_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
+      pCmdBufLabels = cmdBufLabels_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = objectCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjects = pObjects_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = static_cast<uint32_t>( objects_.size() );
+      pObjects = objects_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT const &, const char * const &, int32_t const &, const char * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+     if ( pMessageIdName != rhs.pMessageIdName )
+        if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) return cmp;
+     if ( pMessage != rhs.pMessage )
+        if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) return cmp;
+      if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) return cmp;
+      if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) return cmp;
+      if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) return cmp;
+      if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) return cmp;
+      if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) )
+          && ( messageIdNumber == rhs.messageIdNumber )
+          && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) )
+          && ( queueLabelCount == rhs.queueLabelCount )
+          && ( pQueueLabels == rhs.pQueueLabels )
+          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
+          && ( pCmdBufLabels == rhs.pCmdBufLabels )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjects == rhs.pObjects );
+    }
+
+    bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
+    const char * pMessageIdName = {};
+    int32_t messageIdNumber = {};
+    const char * pMessage = {};
+    uint32_t queueLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {};
+    uint32_t cmdBufLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {};
+    uint32_t objectCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
+  {
+    using Type = DebugUtilsMessengerCallbackDataEXT;
+  };
+
+  struct DebugUtilsMessengerCreateInfoEXT
+  {
+    using NativeType = VkDebugUtilsMessengerCreateInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageSeverity = messageSeverity_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageType = messageType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &, PFN_vkDebugUtilsMessengerCallbackEXT const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData );
+    }
+#endif
+
+
+
+
+
+    bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( messageSeverity == rhs.messageSeverity )
+          && ( messageType == rhs.messageType )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+#endif
+    }
+
+    bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
+    void * pUserData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
+  {
+    using Type = DebugUtilsMessengerCreateInfoEXT;
+  };
+
+  struct DebugUtilsObjectTagInfoEXT
+  {
+    using NativeType = VkDebugUtilsObjectTagInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
+#else
+    bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+#endif
+    }
+
+    bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void * pTag = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
+  {
+    using Type = DebugUtilsObjectTagInfoEXT;
+  };
+
+  struct DecompressMemoryRegionNV
+  {
+    using NativeType = VkDecompressMemoryRegionNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAddress( srcAddress_ ), dstAddress( dstAddress_ ), compressedSize( compressedSize_ ), decompressedSize( decompressedSize_ ), decompressionMethod( decompressionMethod_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DecompressMemoryRegionNV( *reinterpret_cast<DecompressMemoryRegionNV const *>( &rhs ) )
+    {}
+
+
+    DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAddress = srcAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAddress = dstAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compressedSize = compressedSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decompressedSize = decompressedSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decompressionMethod = decompressionMethod_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDecompressMemoryRegionNV*>( this );
+    }
+
+    operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDecompressMemoryRegionNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DecompressMemoryRegionNV const & ) const = default;
+#else
+    bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcAddress == rhs.srcAddress )
+          && ( dstAddress == rhs.dstAddress )
+          && ( compressedSize == rhs.compressedSize )
+          && ( decompressedSize == rhs.decompressedSize )
+          && ( decompressionMethod == rhs.decompressionMethod );
+#endif
+    }
+
+    bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {};
+    VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {};
+
+  };
+
+  struct DedicatedAllocationBufferCreateInfoNV
+  {
+    using NativeType = VkDedicatedAllocationBufferCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dedicatedAllocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
+#else
+    bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+#endif
+    }
+
+    bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
+  {
+    using Type = DedicatedAllocationBufferCreateInfoNV;
+  };
+
+  struct DedicatedAllocationImageCreateInfoNV
+  {
+    using NativeType = VkDedicatedAllocationImageCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dedicatedAllocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default;
+#else
+    bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+#endif
+    }
+
+    bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
+  {
+    using Type = DedicatedAllocationImageCreateInfoNV;
+  };
+
+  struct DedicatedAllocationMemoryAllocateInfoNV
+  {
+    using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
+    {}
+
+
+    DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default;
+#else
+    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
+  {
+    using Type = DedicatedAllocationMemoryAllocateInfoNV;
+  };
+
+  struct MemoryBarrier2
+  {
+    using NativeType = VkMemoryBarrier2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) )
+    {}
+
+
+    MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryBarrier2*>( this );
+    }
+
+    operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryBarrier2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryBarrier2 const & ) const = default;
+#else
+    bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
+#endif
+    }
+
+    bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryBarrier2>
+  {
+    using Type = MemoryBarrier2;
+  };
+  using MemoryBarrier2KHR = MemoryBarrier2;
+
+  struct ImageSubresourceRange
+  {
+    using NativeType = VkImageSubresourceRange;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
+    {}
+
+
+    ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseMipLevel = baseMipLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      levelCount = levelCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+    }
+
+    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceRange*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSubresourceRange const & ) const = default;
+#else
+    bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( aspectMask == rhs.aspectMask )
+          && ( baseMipLevel == rhs.baseMipLevel )
+          && ( levelCount == rhs.levelCount )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+#endif
+    }
+
+    bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t baseMipLevel = {};
+    uint32_t levelCount = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+
+  struct ImageMemoryBarrier2
+  {
+    using NativeType = VkImageMemoryBarrier2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
+    {}
+
+
+    ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldLayout = oldLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      newLayout = newLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier2*>( this );
+    }
+
+    operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryBarrier2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageMemoryBarrier2 const & ) const = default;
+#else
+    bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
+#endif
+    }
+
+    bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
+  {
+    using Type = ImageMemoryBarrier2;
+  };
+  using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
+
+  struct DependencyInfo
+  {
+    using NativeType = VkDependencyInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DependencyInfo(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dependencyFlags( dependencyFlags_ ), memoryBarrierCount( memoryBarrierCount_ ), pMemoryBarriers( pMemoryBarriers_ ), bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ), pBufferMemoryBarriers( pBufferMemoryBarriers_ ), imageMemoryBarrierCount( imageMemoryBarrierCount_ ), pImageMemoryBarriers( pImageMemoryBarriers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), dependencyFlags( dependencyFlags_ ), memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) ), pMemoryBarriers( memoryBarriers_.data() ), bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) ), pBufferMemoryBarriers( bufferMemoryBarriers_.data() ), imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) ), pImageMemoryBarriers( imageMemoryBarriers_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryBarrierCount = memoryBarrierCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMemoryBarriers = pMemoryBarriers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DependencyInfo & setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
+      pMemoryBarriers = memoryBarriers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferMemoryBarriers = pBufferMemoryBarriers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DependencyInfo & setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
+      pBufferMemoryBarriers = bufferMemoryBarriers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageMemoryBarrierCount = imageMemoryBarrierCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageMemoryBarriers = pImageMemoryBarriers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DependencyInfo & setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
+      pImageMemoryBarriers = imageMemoryBarriers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDependencyInfo*>( this );
+    }
+
+    operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDependencyInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DependencyInfo const & ) const = default;
+#else
+    bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dependencyFlags == rhs.dependencyFlags )
+          && ( memoryBarrierCount == rhs.memoryBarrierCount )
+          && ( pMemoryBarriers == rhs.pMemoryBarriers )
+          && ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount )
+          && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers )
+          && ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount )
+          && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
+#endif
+    }
+
+    bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+    uint32_t memoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {};
+    uint32_t bufferMemoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {};
+    uint32_t imageMemoryBarrierCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDependencyInfo>
+  {
+    using Type = DependencyInfo;
+  };
+  using DependencyInfoKHR = DependencyInfo;
+
+  struct DescriptorAddressInfoEXT
+  {
+    using NativeType = VkDescriptorAddressInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), address( address_ ), range( range_ ), format( format_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorAddressInfoEXT( *reinterpret_cast<DescriptorAddressInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
+    {
+      address = address_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorAddressInfoEXT*>( this );
+    }
+
+    operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorAddressInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, address, range, format );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorAddressInfoEXT const & ) const = default;
+#else
+    bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( address == rhs.address )
+          && ( range == rhs.range )
+          && ( format == rhs.format );
+#endif
+    }
+
+    bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorAddressInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress address = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorAddressInfoEXT>
+  {
+    using Type = DescriptorAddressInfoEXT;
+  };
+
+  struct DescriptorBufferBindingInfoEXT
+  {
+    using NativeType = VkDescriptorBufferBindingInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), address( address_ ), usage( usage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorBufferBindingInfoEXT( *reinterpret_cast<DescriptorBufferBindingInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
+    {
+      address = address_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorBufferBindingInfoEXT*>( this );
+    }
+
+    operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorBufferBindingInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, address, usage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default;
+#else
+    bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( address == rhs.address )
+          && ( usage == rhs.usage );
+#endif
+    }
+
+    bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress address = {};
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorBufferBindingInfoEXT>
+  {
+    using Type = DescriptorBufferBindingInfoEXT;
+  };
+
+  struct DescriptorBufferBindingPushDescriptorBufferHandleEXT
+  {
+    using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast<DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs ) )
+    {}
+
+
+    DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
+    }
+
+    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default;
+#else
+    bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT>
+  {
+    using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT;
+  };
+
+  struct DescriptorBufferInfo
+  {
+    using NativeType = VkDescriptorBufferInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ ), range( range_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
+    {}
+
+
+    DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
+    }
+
+    operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( buffer, offset, range );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorBufferInfo const & ) const = default;
+#else
+    bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+#endif
+    }
+
+    bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
+  };
+
+  struct DescriptorImageInfo
+  {
+    using NativeType = VkDescriptorImageInfo;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
+    {}
+
+
+    DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
+    }
+
+    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorImageInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sampler, imageView, imageLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorImageInfo const & ) const = default;
+#else
+    bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sampler == rhs.sampler )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
+#endif
+    }
+
+    bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  union DescriptorDataEXT
+  {
+    using NativeType = VkDescriptorDataEXT;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ = {} )
+      : pSampler( pSampler_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pDescriptorImageInfo_ )
+      : pCombinedImageSampler( pDescriptorImageInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ )
+      : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ )
+      : accelerationStructure( accelerationStructure_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampler = pSampler_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPCombinedImageSampler( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCombinedImageSampler = pCombinedImageSampler_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPInputAttachmentImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachmentImage = pInputAttachmentImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampledImage = pSampledImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStorageImage = pStorageImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUniformTexelBuffer = pUniformTexelBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStorageTexelBuffer = pStorageTexelBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUniformBuffer = pUniformBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStorageBuffer = pStorageBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkDescriptorDataEXT const &() const
+    {
+      return *reinterpret_cast<const VkDescriptorDataEXT*>( this );
+    }
+
+    operator VkDescriptorDataEXT &()
+    {
+      return *reinterpret_cast<VkDescriptorDataEXT*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    const VULKAN_HPP_NAMESPACE::Sampler * pSampler;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage;
+    const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer;
+    const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer;
+    const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer;
+    const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer;
+    VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure;
+#else
+    const VkSampler * pSampler;
+    const VkDescriptorImageInfo * pCombinedImageSampler;
+    const VkDescriptorImageInfo * pInputAttachmentImage;
+    const VkDescriptorImageInfo * pSampledImage;
+    const VkDescriptorImageInfo * pStorageImage;
+    const VkDescriptorAddressInfoEXT * pUniformTexelBuffer;
+    const VkDescriptorAddressInfoEXT * pStorageTexelBuffer;
+    const VkDescriptorAddressInfoEXT * pUniformBuffer;
+    const VkDescriptorAddressInfoEXT * pStorageBuffer;
+    VkDeviceAddress accelerationStructure;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct DescriptorGetInfoEXT
+  {
+    using NativeType = VkDescriptorGetInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorGetInfoEXT( *reinterpret_cast<DescriptorGetInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( VULKAN_HPP_NAMESPACE::DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorGetInfoEXT*>( this );
+    }
+
+    operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorGetInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, VULKAN_HPP_NAMESPACE::DescriptorDataEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, data );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    VULKAN_HPP_NAMESPACE::DescriptorDataEXT data = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorGetInfoEXT>
+  {
+    using Type = DescriptorGetInfoEXT;
+  };
+
+  struct DescriptorPoolSize
+  {
+    using NativeType = VkDescriptorPoolSize;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), descriptorCount( descriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
+    {}
+
+
+    DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
+    }
+
+    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolSize*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( type, descriptorCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorPoolSize const & ) const = default;
+#else
+    bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( type == rhs.type )
+          && ( descriptorCount == rhs.descriptorCount );
+#endif
+    }
+
+    bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
+
+  };
+
+  struct DescriptorPoolCreateInfo
+  {
+    using NativeType = VkDescriptorPoolCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSets = maxSets_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = poolSizeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPoolSizes = pPoolSizes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
+      pPoolSizes = poolSizes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
+    }
+
+    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( maxSets == rhs.maxSets )
+          && ( poolSizeCount == rhs.poolSizeCount )
+          && ( pPoolSizes == rhs.pPoolSizes );
+#endif
+    }
+
+    bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
+    uint32_t maxSets = {};
+    uint32_t poolSizeCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
+  {
+    using Type = DescriptorPoolCreateInfo;
+  };
+
+  struct DescriptorPoolInlineUniformBlockCreateInfo
+  {
+    using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(uint32_t maxInlineUniformBlockBindings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
+    {}
+
+
+    DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
+    }
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxInlineUniformBlockBindings );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
+#endif
+    }
+
+    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
+    const void * pNext = {};
+    uint32_t maxInlineUniformBlockBindings = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
+  {
+    using Type = DescriptorPoolInlineUniformBlockCreateInfo;
+  };
+  using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
+
+  struct DescriptorSetAllocateInfo
+  {
+    using NativeType = VkDescriptorSetAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorPool = descriptorPool_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
+      pSetLayouts = setLayouts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPool const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorPool == rhs.descriptorPool )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pSetLayouts == rhs.pSetLayouts );
+#endif
+    }
+
+    bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
+    uint32_t descriptorSetCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
+  {
+    using Type = DescriptorSetAllocateInfo;
+  };
+
+  struct DescriptorSetBindingReferenceVALVE
+  {
+    using NativeType = VkDescriptorSetBindingReferenceVALVE;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorSetLayout( descriptorSetLayout_ ), binding( binding_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetBindingReferenceVALVE( *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs ) )
+    {}
+
+
+    DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE*>( this );
+    }
+
+    operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetBindingReferenceVALVE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorSetLayout, binding );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default;
+#else
+    bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( binding == rhs.binding );
+#endif
+    }
+
+    bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+    uint32_t binding = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetBindingReferenceVALVE>
+  {
+    using Type = DescriptorSetBindingReferenceVALVE;
+  };
+
+  struct DescriptorSetLayoutBinding
+  {
+    using NativeType = VkDescriptorSetLayoutBinding;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImmutableSamplers = pImmutableSamplers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
+      pImmutableSamplers = immutableSamplers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, const VULKAN_HPP_NAMESPACE::Sampler * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( binding == rhs.binding )
+          && ( descriptorType == rhs.descriptorType )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( stageFlags == rhs.stageFlags )
+          && ( pImmutableSamplers == rhs.pImmutableSamplers );
+#endif
+    }
+
+    bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+    const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {};
+
+  };
+
+  struct DescriptorSetLayoutBindingFlagsCreateInfo
+  {
+    using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindingFlags = pBindingFlags_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
+      pBindingFlags = bindingFlags_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, bindingCount, pBindingFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindingFlags == rhs.pBindingFlags );
+#endif
+    }
+
+    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+    const void * pNext = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
+  {
+    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
+  };
+  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
+
+  struct DescriptorSetLayoutCreateInfo
+  {
+    using NativeType = VkDescriptorSetLayoutCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindings = pBindings_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = static_cast<uint32_t>( bindings_.size() );
+      pBindings = bindings_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, bindingCount, pBindings );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindings == rhs.pBindings );
+#endif
+    }
+
+    bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
+  {
+    using Type = DescriptorSetLayoutCreateInfo;
+  };
+
+  struct DescriptorSetLayoutHostMappingInfoVALVE
+  {
+    using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE(size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorOffset( descriptorOffset_ ), descriptorSize( descriptorSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs ) )
+    {}
+
+
+    DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorOffset = descriptorOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSize = descriptorSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
+    }
+
+    operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorOffset, descriptorSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorOffset == rhs.descriptorOffset )
+          && ( descriptorSize == rhs.descriptorSize );
+#endif
+    }
+
+    bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
+    void * pNext = {};
+    size_t descriptorOffset = {};
+    uint32_t descriptorSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutHostMappingInfoVALVE>
+  {
+    using Type = DescriptorSetLayoutHostMappingInfoVALVE;
+  };
+
+  struct DescriptorSetLayoutSupport
+  {
+    using NativeType = VkDescriptorSetLayoutSupport;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supported( supported_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
+    {}
+
+
+    DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supported );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supported == rhs.supported );
+#endif
+    }
+
+    bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
+  {
+    using Type = DescriptorSetLayoutSupport;
+  };
+  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+
+  struct DescriptorSetVariableDescriptorCountAllocateInfo
+  {
+    using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t * pDescriptorCounts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorCounts = pDescriptorCounts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
+      pDescriptorCounts = descriptorCounts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pDescriptorCounts == rhs.pDescriptorCounts );
+#endif
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+    const void * pNext = {};
+    uint32_t descriptorSetCount = {};
+    const uint32_t * pDescriptorCounts = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
+  {
+    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
+  };
+  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
+
+  struct DescriptorSetVariableDescriptorCountLayoutSupport
+  {
+    using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxVariableDescriptorCount( maxVariableDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
+    {}
+
+
+    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxVariableDescriptorCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
+#else
+    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
+#endif
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+    void * pNext = {};
+    uint32_t maxVariableDescriptorCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
+  {
+    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
+  };
+  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
+
+  struct DescriptorUpdateTemplateEntry
+  {
+    using NativeType = VkDescriptorUpdateTemplateEntry;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
+    {}
+
+
+    DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, size_t const &, size_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride );
+#endif
+    }
+
+    bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    size_t offset = {};
+    size_t stride = {};
+
+  };
+  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+
+  struct DescriptorUpdateTemplateCreateInfo
+  {
+    using NativeType = VkDescriptorUpdateTemplateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
+      pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      templateType = templateType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
+    {
+      set = set_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+          && ( templateType == rhs.templateType )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( set == rhs.set );
+#endif
+    }
+
+    bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
+    uint32_t descriptorUpdateEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+    uint32_t set = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
+  {
+    using Type = DescriptorUpdateTemplateCreateInfo;
+  };
+  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+
+  struct DeviceAddressBindingCallbackDataEXT
+  {
+    using NativeType = VkDeviceAddressBindingCallbackDataEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), baseAddress( baseAddress_ ), size( size_ ), bindingType( bindingType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseAddress = baseAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingType = bindingType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT*>( this );
+    }
+
+    operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, baseAddress, size, bindingType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default;
+#else
+    bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( baseAddress == rhs.baseAddress )
+          && ( size == rhs.size )
+          && ( bindingType == rhs.bindingType );
+#endif
+    }
+
+    bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceAddressBindingCallbackDataEXT>
+  {
+    using Type = DeviceAddressBindingCallbackDataEXT;
+  };
+
+  struct DeviceBufferMemoryRequirements
+  {
+    using NativeType = VkDeviceBufferMemoryRequirements;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pCreateInfo( pCreateInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
+    {}
+
+
+    DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCreateInfo = pCreateInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceBufferMemoryRequirements*>( this );
+    }
+
+    operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceBufferMemoryRequirements*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pCreateInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default;
+#else
+    bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pCreateInfo == rhs.pCreateInfo );
+#endif
+    }
+
+    bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements;
+    const void * pNext = {};
+    const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
+  {
+    using Type = DeviceBufferMemoryRequirements;
+  };
+  using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
+
+  struct DeviceQueueCreateInfo
+  {
+    using NativeType = VkDeviceQueueCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float * pQueuePriorities_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = queueCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueuePriorities = pQueuePriorities_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = static_cast<uint32_t>( queuePriorities_.size() );
+      pQueuePriorities = queuePriorities_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
+    }
+
+    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &, const float * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
+#else
+    bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueCount == rhs.queueCount )
+          && ( pQueuePriorities == rhs.pQueuePriorities );
+#endif
+    }
+
+    bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t queueCount = {};
+    const float * pQueuePriorities = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
+  {
+    using Type = DeviceQueueCreateInfo;
+  };
+
+  struct PhysicalDeviceFeatures
+  {
+    using NativeType = VkPhysicalDeviceFeatures;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess = robustBufferAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullDrawIndexUint32 = fullDrawIndexUint32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCubeArray = imageCubeArray_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      independentBlend = independentBlend_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryShader = geometryShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationShader = tessellationShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleRateShading = sampleRateShading_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dualSrcBlend = dualSrcBlend_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiDrawIndirect = multiDrawIndirect_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectFirstInstance = drawIndirectFirstInstance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClamp = depthClamp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fillModeNonSolid = fillModeNonSolid_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBounds = depthBounds_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      wideLines = wideLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      largePoints = largePoints_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOne = alphaToOne_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiViewport = multiViewport_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerAnisotropy = samplerAnisotropy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionETC2 = textureCompressionETC2_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionBC = textureCompressionBC_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryPrecise = occlusionQueryPrecise_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatisticsQuery = pipelineStatisticsQuery_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageGatherExtended = shaderImageGatherExtended_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageMultisample = shaderStorageImageMultisample_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderClipDistance = shaderClipDistance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderCullDistance = shaderCullDistance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat64 = shaderFloat64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt64 = shaderInt64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt16 = shaderInt16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceResidency = shaderResourceResidency_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceMinLod = shaderResourceMinLod_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseBinding = sparseBinding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyBuffer = sparseResidencyBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage2D = sparseResidencyImage2D_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage3D = sparseResidencyImage3D_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency2Samples = sparseResidency2Samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency4Samples = sparseResidency4Samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency8Samples = sparseResidency8Samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency16Samples = sparseResidency16Samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyAliased = sparseResidencyAliased_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variableMultisampleRate = variableMultisampleRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedQueries = inheritedQueries_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( robustBufferAccess, fullDrawIndexUint32, imageCubeArray, independentBlend, geometryShader, tessellationShader, sampleRateShading, dualSrcBlend, logicOp, multiDrawIndirect, drawIndirectFirstInstance, depthClamp, depthBiasClamp, fillModeNonSolid, depthBounds, wideLines, largePoints, alphaToOne, multiViewport, samplerAnisotropy, textureCompressionETC2, textureCompressionASTC_LDR, textureCompressionBC, occlusionQueryPrecise, pipelineStatisticsQuery, vertexPipelineStoresAndAtomics, fragmentStoresAndAtomics, shaderTessellationAndGeometryPointSize, shaderImageGatherExtended, shaderStorageImageExtendedFormats, shaderStorageImageMultisample, shaderStorageImageReadWithoutFormat, shaderStorageImageWriteWithoutFormat, shaderUniformBufferArrayDynamicIndexing, shaderSampledImageArrayDynamicIndexing, shaderStorageBufferArrayDynamicIndexing, shaderStorageImageArrayDynamicIndexing, shaderClipDistance, shaderCullDistance, shaderFloat64, shaderInt64, shaderInt16, shaderResourceResidency, shaderResourceMinLod, sparseBinding, sparseResidencyBuffer, sparseResidencyImage2D, sparseResidencyImage3D, sparseResidency2Samples, sparseResidency4Samples, sparseResidency8Samples, sparseResidency16Samples, sparseResidencyAliased, variableMultisampleRate, inheritedQueries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( robustBufferAccess == rhs.robustBufferAccess )
+          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+          && ( imageCubeArray == rhs.imageCubeArray )
+          && ( independentBlend == rhs.independentBlend )
+          && ( geometryShader == rhs.geometryShader )
+          && ( tessellationShader == rhs.tessellationShader )
+          && ( sampleRateShading == rhs.sampleRateShading )
+          && ( dualSrcBlend == rhs.dualSrcBlend )
+          && ( logicOp == rhs.logicOp )
+          && ( multiDrawIndirect == rhs.multiDrawIndirect )
+          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+          && ( depthClamp == rhs.depthClamp )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( fillModeNonSolid == rhs.fillModeNonSolid )
+          && ( depthBounds == rhs.depthBounds )
+          && ( wideLines == rhs.wideLines )
+          && ( largePoints == rhs.largePoints )
+          && ( alphaToOne == rhs.alphaToOne )
+          && ( multiViewport == rhs.multiViewport )
+          && ( samplerAnisotropy == rhs.samplerAnisotropy )
+          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+          && ( textureCompressionBC == rhs.textureCompressionBC )
+          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+          && ( shaderClipDistance == rhs.shaderClipDistance )
+          && ( shaderCullDistance == rhs.shaderCullDistance )
+          && ( shaderFloat64 == rhs.shaderFloat64 )
+          && ( shaderInt64 == rhs.shaderInt64 )
+          && ( shaderInt16 == rhs.shaderInt16 )
+          && ( shaderResourceResidency == rhs.shaderResourceResidency )
+          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+          && ( sparseBinding == rhs.sparseBinding )
+          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+          && ( variableMultisampleRate == rhs.variableMultisampleRate )
+          && ( inheritedQueries == rhs.inheritedQueries );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
+    VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
+
+  };
+
+  struct DeviceCreateInfo
+  {
+    using NativeType = VkDeviceCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = queueCreateInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueCreateInfos = pQueueCreateInfos_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
+      pQueueCreateInfos = queueCreateInfos_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+      ppEnabledLayerNames = pEnabledLayerNames_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledFeatures = pEnabledFeatures_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
+    }
+
+    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, queueCreateInfoCount, pQueueCreateInfos, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames, pEnabledFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) return cmp;
+      if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) return cmp;
+      if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
+      for ( size_t i = 0; i < enabledLayerCount; ++i )
+      {
+        if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
+          if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
+            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
+      }
+      if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
+      for ( size_t i = 0; i < enabledExtensionCount; ++i )
+      {
+        if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
+          if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
+            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
+      }
+      if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && std::equal( ppEnabledLayerNames, ppEnabledLayerNames + enabledLayerCount, rhs.ppEnabledLayerNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
+          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+    }
+
+    bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
+    uint32_t queueCreateInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {};
+    uint32_t enabledLayerCount = {};
+    const char * const * ppEnabledLayerNames = {};
+    uint32_t enabledExtensionCount = {};
+    const char * const * ppEnabledExtensionNames = {};
+    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
+  {
+    using Type = DeviceCreateInfo;
+  };
+
+  struct DeviceDeviceMemoryReportCreateInfoEXT
+  {
+    using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
+
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, PFN_vkDeviceMemoryReportCallbackEXT const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pfnUserCallback, pUserData );
+    }
+#endif
+
+
+
+
+
+    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+#endif
+    }
+
+    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+    PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
+    void * pUserData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
+  {
+    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
+  };
+
+  struct DeviceDiagnosticsConfigCreateInfoNV
+  {
+    using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
+
+    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default;
+#else
+    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
+  {
+    using Type = DeviceDiagnosticsConfigCreateInfoNV;
+  };
+
+  struct DeviceEventInfoEXT
+  {
+    using NativeType = VkDeviceEventInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceEvent( deviceEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceEvent = deviceEvent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
+    }
+
+    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceEvent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceEventInfoEXT const & ) const = default;
+#else
+    bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceEvent == rhs.deviceEvent );
+#endif
+    }
+
+    bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
+  {
+    using Type = DeviceEventInfoEXT;
+  };
+
+  struct DeviceFaultAddressInfoEXT
+  {
+    using NativeType = VkDeviceFaultAddressInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {}) VULKAN_HPP_NOEXCEPT
+    : addressType( addressType_ ), reportedAddress( reportedAddress_ ), addressPrecision( addressPrecision_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceFaultAddressInfoEXT( *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressType = addressType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reportedAddress = reportedAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressPrecision = addressPrecision_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceFaultAddressInfoEXT*>( this );
+    }
+
+    operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceFaultAddressInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( addressType, reportedAddress, addressPrecision );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default;
+#else
+    bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( addressType == rhs.addressType )
+          && ( reportedAddress == rhs.reportedAddress )
+          && ( addressPrecision == rhs.addressPrecision );
+#endif
+    }
+
+    bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone;
+    VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {};
+
+  };
+
+  struct DeviceFaultCountsEXT
+  {
+    using NativeType = VkDeviceFaultCountsEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT(uint32_t addressInfoCount_ = {}, uint32_t vendorInfoCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), addressInfoCount( addressInfoCount_ ), vendorInfoCount( vendorInfoCount_ ), vendorBinarySize( vendorBinarySize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceFaultCountsEXT( *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressInfoCount = addressInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorInfoCount = vendorInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorBinarySize = vendorBinarySize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceFaultCountsEXT*>( this );
+    }
+
+    operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceFaultCountsEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceFaultCountsEXT const & ) const = default;
+#else
+    bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( addressInfoCount == rhs.addressInfoCount )
+          && ( vendorInfoCount == rhs.vendorInfoCount )
+          && ( vendorBinarySize == rhs.vendorBinarySize );
+#endif
+    }
+
+    bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT;
+    void * pNext = {};
+    uint32_t addressInfoCount = {};
+    uint32_t vendorInfoCount = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceFaultCountsEXT>
+  {
+    using Type = DeviceFaultCountsEXT;
+  };
+
+  struct DeviceFaultVendorInfoEXT
+  {
+    using NativeType = VkDeviceFaultVendorInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {}) VULKAN_HPP_NOEXCEPT
+    : description( description_ ), vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceFaultVendorInfoEXT( *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array<char,VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+    {
+      description = description_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorFaultCode = vendorFaultCode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorFaultData = vendorFaultData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceFaultVendorInfoEXT*>( this );
+    }
+
+    operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceFaultVendorInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint64_t const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( description, vendorFaultCode, vendorFaultData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceFaultVendorInfoEXT const & ) const = default;
+#else
+    bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( description == rhs.description )
+          && ( vendorFaultCode == rhs.vendorFaultCode )
+          && ( vendorFaultData == rhs.vendorFaultData );
+#endif
+    }
+
+    bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    uint64_t vendorFaultCode = {};
+    uint64_t vendorFaultData = {};
+
+  };
+
+  struct DeviceFaultInfoEXT
+  {
+    using NativeType = VkDeviceFaultInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {}, VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, void * pVendorBinaryData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), description( description_ ), pAddressInfos( pAddressInfos_ ), pVendorInfos( pVendorInfos_ ), pVendorBinaryData( pVendorBinaryData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceFaultInfoEXT( *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setDescription( std::array<char,VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+    {
+      description = description_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPAddressInfos( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAddressInfos = pAddressInfos_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorInfos( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVendorInfos = pVendorInfos_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorBinaryData( void * pVendorBinaryData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVendorBinaryData = pVendorBinaryData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceFaultInfoEXT*>( this );
+    }
+
+    operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceFaultInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &, VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceFaultInfoEXT const & ) const = default;
+#else
+    bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( description == rhs.description )
+          && ( pAddressInfos == rhs.pAddressInfos )
+          && ( pVendorInfos == rhs.pVendorInfos )
+          && ( pVendorBinaryData == rhs.pVendorBinaryData );
+#endif
+    }
+
+    bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {};
+    VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {};
+    void * pVendorBinaryData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceFaultInfoEXT>
+  {
+    using Type = DeviceFaultInfoEXT;
+  };
+
+  struct DeviceFaultVendorBinaryHeaderVersionOneEXT
+  {
+    using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, uint32_t driverVersion_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}, uint32_t applicationNameOffset_ = {}, uint32_t applicationVersion_ = {}, uint32_t engineNameOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : headerSize( headerSize_ ), headerVersion( headerVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), driverVersion( driverVersion_ ), pipelineCacheUUID( pipelineCacheUUID_ ), applicationNameOffset( applicationNameOffset_ ), applicationVersion( applicationVersion_ ), engineNameOffset( engineNameOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      headerSize = headerSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      headerVersion = headerVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorID = vendorID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceID = deviceID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      driverVersion = driverVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setPipelineCacheUUID( std::array<uint8_t,VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCacheUUID = pipelineCacheUUID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      applicationNameOffset = applicationNameOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      applicationVersion = applicationVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      engineNameOffset = engineNameOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
+    }
+
+    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( headerSize, headerVersion, vendorID, deviceID, driverVersion, pipelineCacheUUID, applicationNameOffset, applicationVersion, engineNameOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default;
+#else
+    bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( headerSize == rhs.headerSize )
+          && ( headerVersion == rhs.headerVersion )
+          && ( vendorID == rhs.vendorID )
+          && ( deviceID == rhs.deviceID )
+          && ( driverVersion == rhs.driverVersion )
+          && ( pipelineCacheUUID == rhs.pipelineCacheUUID )
+          && ( applicationNameOffset == rhs.applicationNameOffset )
+          && ( applicationVersion == rhs.applicationVersion )
+          && ( engineNameOffset == rhs.engineNameOffset );
+#endif
+    }
+
+    bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t headerSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne;
+    uint32_t vendorID = {};
+    uint32_t deviceID = {};
+    uint32_t driverVersion = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+    uint32_t applicationNameOffset = {};
+    uint32_t applicationVersion = {};
+    uint32_t engineNameOffset = {};
+
+  };
+
+  struct DeviceGroupBindSparseInfo
+  {
+    using NativeType = VkDeviceGroupBindSparseInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
+    {}
+
+
+    DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceDeviceIndex = resourceDeviceIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryDeviceIndex = memoryDeviceIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default;
+#else
+    bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+#endif
+    }
+
+    bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+    const void * pNext = {};
+    uint32_t resourceDeviceIndex = {};
+    uint32_t memoryDeviceIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
+  {
+    using Type = DeviceGroupBindSparseInfo;
+  };
+  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+
+  struct DeviceGroupCommandBufferBeginInfo
+  {
+    using NativeType = VkDeviceGroupCommandBufferBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
+    {}
+
+
+    DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
+#else
+    bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask );
+#endif
+    }
+
+    bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+    const void * pNext = {};
+    uint32_t deviceMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
+  {
+    using Type = DeviceGroupCommandBufferBeginInfo;
+  };
+  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+
+  struct DeviceGroupDeviceCreateInfo
+  {
+    using NativeType = VkDeviceGroupDeviceCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      physicalDeviceCount = physicalDeviceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPhysicalDevices = pPhysicalDevices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
+      pPhysicalDevices = physicalDevices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
+    }
+
+    operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
+#else
+    bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( pPhysicalDevices == rhs.pPhysicalDevices );
+#endif
+    }
+
+    bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
+    const void * pNext = {};
+    uint32_t physicalDeviceCount = {};
+    const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
+  {
+    using Type = DeviceGroupDeviceCreateInfo;
+  };
+  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
+
+  struct DeviceGroupPresentCapabilitiesKHR
+  {
+    using NativeType = VkDeviceGroupPresentCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentMask( presentMask_ ), modes( modes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+    operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentMask, modes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentMask == rhs.presentMask )
+          && ( modes == rhs.modes );
+#endif
+    }
+
+    bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
+  {
+    using Type = DeviceGroupPresentCapabilitiesKHR;
+  };
+
+  struct DeviceGroupPresentInfoKHR
+  {
+    using NativeType = VkDeviceGroupPresentInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t * pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceMasks = pDeviceMasks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
+      pDeviceMasks = deviceMasks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
+    }
+
+    operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
+#else
+    bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pDeviceMasks == rhs.pDeviceMasks )
+          && ( mode == rhs.mode );
+#endif
+    }
+
+    bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+    const void * pNext = {};
+    uint32_t swapchainCount = {};
+    const uint32_t * pDeviceMasks = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
+  {
+    using Type = DeviceGroupPresentInfoKHR;
+  };
+
+  struct DeviceGroupRenderPassBeginInfo
+  {
+    using NativeType = VkDeviceGroupRenderPassBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceRenderAreaCount = deviceRenderAreaCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceRenderAreas = pDeviceRenderAreas_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
+      pDeviceRenderAreas = deviceRenderAreas_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
+#else
+    bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask )
+          && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+          && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+#endif
+    }
+
+    bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+    const void * pNext = {};
+    uint32_t deviceMask = {};
+    uint32_t deviceRenderAreaCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
+  {
+    using Type = DeviceGroupRenderPassBeginInfo;
+  };
+  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+
+  struct DeviceGroupSubmitInfo
+  {
+    using NativeType = VkDeviceGroupSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t * pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
+      pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
+      pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
+      pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
+    }
+
+    operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphoreDeviceIndices, commandBufferCount, pCommandBufferDeviceMasks, signalSemaphoreCount, pSignalSemaphoreDeviceIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
+#else
+    bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+#endif
+    }
+
+    bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+    const void * pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const uint32_t * pWaitSemaphoreDeviceIndices = {};
+    uint32_t commandBufferCount = {};
+    const uint32_t * pCommandBufferDeviceMasks = {};
+    uint32_t signalSemaphoreCount = {};
+    const uint32_t * pSignalSemaphoreDeviceIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
+  {
+    using Type = DeviceGroupSubmitInfo;
+  };
+  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+
+  struct DeviceGroupSwapchainCreateInfoKHR
+  {
+    using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), modes( modes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      modes = modes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, modes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( modes == rhs.modes );
+#endif
+    }
+
+    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
+  {
+    using Type = DeviceGroupSwapchainCreateInfoKHR;
+  };
+
+  struct ImageCreateInfo
+  {
+    using NativeType = VkImageCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageType = imageType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevels = mipLevels_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayers = arrayLayers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCreateInfo*>( this );
+    }
+
+    operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, imageType, format, extent, mipLevels, arrayLayers, samples, tiling, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices, initialLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCreateInfo const & ) const = default;
+#else
+    bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( imageType == rhs.imageType )
+          && ( format == rhs.format )
+          && ( extent == rhs.extent )
+          && ( mipLevels == rhs.mipLevels )
+          && ( arrayLayers == rhs.arrayLayers )
+          && ( samples == rhs.samples )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( initialLayout == rhs.initialLayout );
+#endif
+    }
+
+    bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    uint32_t mipLevels = {};
+    uint32_t arrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t * pQueueFamilyIndices = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCreateInfo>
+  {
+    using Type = ImageCreateInfo;
+  };
+
+  struct DeviceImageMemoryRequirements
+  {
+    using NativeType = VkDeviceImageMemoryRequirements;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pCreateInfo( pCreateInfo_ ), planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
+    {}
+
+
+    DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCreateInfo = pCreateInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceImageMemoryRequirements*>( this );
+    }
+
+    operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceImageMemoryRequirements*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pCreateInfo, planeAspect );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceImageMemoryRequirements const & ) const = default;
+#else
+    bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pCreateInfo == rhs.pCreateInfo )
+          && ( planeAspect == rhs.planeAspect );
+#endif
+    }
+
+    bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements;
+    const void * pNext = {};
+    const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
+  {
+    using Type = DeviceImageMemoryRequirements;
+  };
+  using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
+
+  struct DeviceMemoryOpaqueCaptureAddressInfo
+  {
+    using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
+    {}
+
+
+    DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+    }
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
+#else
+    bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+#endif
+    }
+
+    bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
+  {
+    using Type = DeviceMemoryOpaqueCaptureAddressInfo;
+  };
+  using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
+
+  struct DeviceMemoryOverallocationCreateInfoAMD
+  {
+    using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), overallocationBehavior( overallocationBehavior_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
+    {}
+
+
+    DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+    {
+      overallocationBehavior = overallocationBehavior_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, overallocationBehavior );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default;
+#else
+    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( overallocationBehavior == rhs.overallocationBehavior );
+#endif
+    }
+
+    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
+  {
+    using Type = DeviceMemoryOverallocationCreateInfoAMD;
+  };
+
+  struct DeviceMemoryReportCallbackDataEXT
+  {
+    using NativeType = VkDeviceMemoryReportCallbackDataEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), type( type_ ), memoryObjectId( memoryObjectId_ ), size( size_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), heapIndex( heapIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
+    {}
+
+
+    DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
+    }
+
+    operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT const &, uint64_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default;
+#else
+    bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( type == rhs.type )
+          && ( memoryObjectId == rhs.memoryObjectId )
+          && ( size == rhs.size )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( heapIndex == rhs.heapIndex );
+#endif
+    }
+
+    bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
+    uint64_t memoryObjectId = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    uint32_t heapIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
+  {
+    using Type = DeviceMemoryReportCallbackDataEXT;
+  };
+
+  struct DevicePrivateDataCreateInfo
+  {
+    using NativeType = VkDevicePrivateDataCreateInfo;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo(uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), privateDataSlotRequestCount( privateDataSlotRequestCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
+    {}
+
+
+    DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      privateDataSlotRequestCount = privateDataSlotRequestCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDevicePrivateDataCreateInfo*>( this );
+    }
+
+    operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDevicePrivateDataCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, privateDataSlotRequestCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default;
+#else
+    bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
+#endif
+    }
+
+    bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo;
+    const void * pNext = {};
+    uint32_t privateDataSlotRequestCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
+  {
+    using Type = DevicePrivateDataCreateInfo;
+  };
+  using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
+
+  struct DeviceQueueGlobalPriorityCreateInfoKHR
+  {
+    using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), globalPriority( globalPriority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalPriority = globalPriority_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoKHR*>( this );
+    }
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, globalPriority );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( globalPriority == rhs.globalPriority );
+#endif
+    }
+
+    bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR>
+  {
+    using Type = DeviceQueueGlobalPriorityCreateInfoKHR;
+  };
+  using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR;
+
+  struct DeviceQueueInfo2
+  {
+    using NativeType = VkDeviceQueueInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
+    {}
+
+
+    DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueIndex = queueIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
+    }
+
+    operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DeviceQueueInfo2 const & ) const = default;
+#else
+    bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueIndex == rhs.queueIndex );
+#endif
+    }
+
+    bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t queueIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
+  {
+    using Type = DeviceQueueInfo2;
+  };
+
+  struct DirectDriverLoadingInfoLUNARG
+  {
+    using NativeType = VkDirectDriverLoadingInfoLUNARG;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG(VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pfnGetInstanceProcAddr( pfnGetInstanceProcAddr_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DirectDriverLoadingInfoLUNARG( *reinterpret_cast<DirectDriverLoadingInfoLUNARG const *>( &rhs ) )
+    {}
+
+
+    DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPfnGetInstanceProcAddr( PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDirectDriverLoadingInfoLUNARG*>( this );
+    }
+
+    operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDirectDriverLoadingInfoLUNARG*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG const &, PFN_vkGetInstanceProcAddrLUNARG const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr );
+    }
+#endif
+
+
+
+
+
+    bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr );
+#endif
+    }
+
+    bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {};
+    PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDirectDriverLoadingInfoLUNARG>
+  {
+    using Type = DirectDriverLoadingInfoLUNARG;
+  };
+
+  struct DirectDriverLoadingListLUNARG
+  {
+    using NativeType = VkDirectDriverLoadingListLUNARG;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG(VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive, uint32_t driverCount_ = {}, const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mode( mode_ ), driverCount( driverCount_ ), pDrivers( pDrivers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DirectDriverLoadingListLUNARG( *reinterpret_cast<DirectDriverLoadingListLUNARG const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DirectDriverLoadingListLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG> const & drivers_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast<uint32_t>( drivers_.size() ) ), pDrivers( drivers_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      driverCount = driverCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPDrivers( const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDrivers = pDrivers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DirectDriverLoadingListLUNARG & setDrivers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG> const & drivers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      driverCount = static_cast<uint32_t>( drivers_.size() );
+      pDrivers = drivers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDirectDriverLoadingListLUNARG*>( this );
+    }
+
+    operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDirectDriverLoadingListLUNARG*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mode, driverCount, pDrivers );
+    }
+#endif
+
+
+
+
+
+    bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mode == rhs.mode )
+          && ( driverCount == rhs.driverCount )
+          && ( pDrivers == rhs.pDrivers );
+#endif
+    }
+
+    bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingListLUNARG;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive;
+    uint32_t driverCount = {};
+    const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDirectDriverLoadingListLUNARG>
+  {
+    using Type = DirectDriverLoadingListLUNARG;
+  };
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  struct DirectFBSurfaceCreateInfoEXT
+  {
+    using NativeType = VkDirectFBSurfaceCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB * dfb_ = {}, IDirectFBSurface * surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), dfb( dfb_ ), surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dfb = dfb_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT const &, IDirectFB * const &, IDirectFBSurface * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, dfb, surface );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dfb == rhs.dfb )
+          && ( surface == rhs.surface );
+#endif
+    }
+
+    bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
+    IDirectFB * dfb = {};
+    IDirectFBSurface * surface = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
+  {
+    using Type = DirectFBSurfaceCreateInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  struct DispatchIndirectCommand
+  {
+    using NativeType = VkDispatchIndirectCommand;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
+    {}
+
+
+    DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
+    }
+
+    operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y, z );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DispatchIndirectCommand const & ) const = default;
+#else
+    bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+#endif
+    }
+
+    bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t x = {};
+    uint32_t y = {};
+    uint32_t z = {};
+
+  };
+
+  struct DisplayEventInfoEXT
+  {
+    using NativeType = VkDisplayEventInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), displayEvent( displayEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayEvent = displayEvent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
+    }
+
+    operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, displayEvent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayEventInfoEXT const & ) const = default;
+#else
+    bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayEvent == rhs.displayEvent );
+#endif
+    }
+
+    bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
+  {
+    using Type = DisplayEventInfoEXT;
+  };
+
+  struct DisplayModeParametersKHR
+  {
+    using NativeType = VkDisplayModeParametersKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
+    : visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      visibleRegion = visibleRegion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      refreshRate = refreshRate_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
+    }
+
+    operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( visibleRegion, refreshRate );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayModeParametersKHR const & ) const = default;
+#else
+    bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( visibleRegion == rhs.visibleRegion )
+          && ( refreshRate == rhs.refreshRate );
+#endif
+    }
+
+    bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
+    uint32_t refreshRate = {};
+
+  };
+
+  struct DisplayModeCreateInfoKHR
+  {
+    using NativeType = VkDisplayModeCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), parameters( parameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameters = parameters_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, parameters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( parameters == rhs.parameters );
+#endif
+    }
+
+    bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
+  {
+    using Type = DisplayModeCreateInfoKHR;
+  };
+
+  struct DisplayModePropertiesKHR
+  {
+    using NativeType = VkDisplayModePropertiesKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayMode( displayMode_ ), parameters( parameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( displayMode, parameters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
+#else
+    bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( displayMode == rhs.displayMode )
+          && ( parameters == rhs.parameters );
+#endif
+    }
+
+    bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
+  };
+
+  struct DisplayModeProperties2KHR
+  {
+    using NativeType = VkDisplayModeProperties2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), displayModeProperties( displayModeProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
+    {}
+
+
+    DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
+    }
+
+    operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, displayModeProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
+#else
+    bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayModeProperties == rhs.displayModeProperties );
+#endif
+    }
+
+    bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
+  {
+    using Type = DisplayModeProperties2KHR;
+  };
+
+  struct DisplayNativeHdrSurfaceCapabilitiesAMD
+  {
+    using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), localDimmingSupport( localDimmingSupport_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
+    {}
+
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, localDimmingSupport );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default;
+#else
+    bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingSupport == rhs.localDimmingSupport );
+#endif
+    }
+
+    bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
+  {
+    using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
+  };
+
+  struct DisplayPlaneCapabilitiesKHR
+  {
+    using NativeType = VkDisplayPlaneCapabilitiesKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( supportedAlpha == rhs.supportedAlpha )
+          && ( minSrcPosition == rhs.minSrcPosition )
+          && ( maxSrcPosition == rhs.maxSrcPosition )
+          && ( minSrcExtent == rhs.minSrcExtent )
+          && ( maxSrcExtent == rhs.maxSrcExtent )
+          && ( minDstPosition == rhs.minDstPosition )
+          && ( maxDstPosition == rhs.maxDstPosition )
+          && ( minDstExtent == rhs.minDstExtent )
+          && ( maxDstExtent == rhs.maxDstExtent );
+#endif
+    }
+
+    bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
+    VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
+    VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
+    VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
+    VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
+
+  };
+
+  struct DisplayPlaneCapabilities2KHR
+  {
+    using NativeType = VkDisplayPlaneCapabilities2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), capabilities( capabilities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, capabilities );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
+#else
+    bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( capabilities == rhs.capabilities );
+#endif
+    }
+
+    bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
+  {
+    using Type = DisplayPlaneCapabilities2KHR;
+  };
+
+  struct DisplayPlaneInfo2KHR
+  {
+    using NativeType = VkDisplayPlaneInfo2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mode( mode_ ), planeIndex( planeIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mode, planeIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
+#else
+    bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mode == rhs.mode )
+          && ( planeIndex == rhs.planeIndex );
+#endif
+    }
+
+    bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
+    uint32_t planeIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
+  {
+    using Type = DisplayPlaneInfo2KHR;
+  };
+
+  struct DisplayPlanePropertiesKHR
+  {
+    using NativeType = VkDisplayPlanePropertiesKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( currentDisplay, currentStackIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
+#else
+    bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( currentDisplay == rhs.currentDisplay )
+          && ( currentStackIndex == rhs.currentStackIndex );
+#endif
+    }
+
+    bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
+    uint32_t currentStackIndex = {};
+
+  };
+
+  struct DisplayPlaneProperties2KHR
+  {
+    using NativeType = VkDisplayPlaneProperties2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), displayPlaneProperties( displayPlaneProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, displayPlaneProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
+#else
+    bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPlaneProperties == rhs.displayPlaneProperties );
+#endif
+    }
+
+    bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
+  {
+    using Type = DisplayPlaneProperties2KHR;
+  };
+
+  struct DisplayPowerInfoEXT
+  {
+    using NativeType = VkDisplayPowerInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), powerState( powerState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
+    {}
+
+
+    DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      powerState = powerState_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
+    }
+
+    operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, powerState );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
+#else
+    bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( powerState == rhs.powerState );
+#endif
+    }
+
+    bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
+  {
+    using Type = DisplayPowerInfoEXT;
+  };
+
+  struct DisplayPresentInfoKHR
+  {
+    using NativeType = VkDisplayPresentInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcRect = srcRect_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstRect = dstRect_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      persistent = persistent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
+    }
+
+    operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcRect, dstRect, persistent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
+#else
+    bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcRect == rhs.srcRect )
+          && ( dstRect == rhs.dstRect )
+          && ( persistent == rhs.persistent );
+#endif
+    }
+
+    bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
+    VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
+  {
+    using Type = DisplayPresentInfoKHR;
+  };
+
+  struct DisplayPropertiesKHR
+  {
+    using NativeType = VkDisplayPropertiesKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char * displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
+    : display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, const char * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = display <=> rhs.display; cmp != 0 ) return cmp;
+     if ( displayName != rhs.displayName )
+        if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 )
+          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) return cmp;
+      if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) return cmp;
+      if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) return cmp;
+      if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) return cmp;
+      if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( display == rhs.display )
+          && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) )
+          && ( physicalDimensions == rhs.physicalDimensions )
+          && ( physicalResolution == rhs.physicalResolution )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( planeReorderPossible == rhs.planeReorderPossible )
+          && ( persistentContent == rhs.persistentContent );
+    }
+
+    bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
+    const char * displayName = {};
+    VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
+    VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
+    VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
+
+  };
+
+  struct DisplayProperties2KHR
+  {
+    using NativeType = VkDisplayProperties2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), displayProperties( displayProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
+    {}
+
+
+    DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
+    }
+
+    operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, displayProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplayProperties2KHR const & ) const = default;
+#else
+    bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayProperties == rhs.displayProperties );
+#endif
+    }
+
+    bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
+  {
+    using Type = DisplayProperties2KHR;
+  };
+
+  struct DisplaySurfaceCreateInfoKHR
+  {
+    using NativeType = VkDisplaySurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayMode = displayMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeStackIndex = planeStackIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalAlpha = globalAlpha_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaMode = alphaMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, float const &, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( displayMode == rhs.displayMode )
+          && ( planeIndex == rhs.planeIndex )
+          && ( planeStackIndex == rhs.planeStackIndex )
+          && ( transform == rhs.transform )
+          && ( globalAlpha == rhs.globalAlpha )
+          && ( alphaMode == rhs.alphaMode )
+          && ( imageExtent == rhs.imageExtent );
+#endif
+    }
+
+    bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+    uint32_t planeIndex = {};
+    uint32_t planeStackIndex = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    float globalAlpha = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
+  {
+    using Type = DisplaySurfaceCreateInfoKHR;
+  };
+
+  struct DrawIndexedIndirectCommand
+  {
+    using NativeType = VkDrawIndexedIndirectCommand;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+    : indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
+    {}
+
+
+    DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstIndex = firstIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
+    }
+
+    operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, int32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
+#else
+    bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( indexCount == rhs.indexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstIndex == rhs.firstIndex )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( firstInstance == rhs.firstInstance );
+#endif
+    }
+
+    bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t indexCount = {};
+    uint32_t instanceCount = {};
+    uint32_t firstIndex = {};
+    int32_t vertexOffset = {};
+    uint32_t firstInstance = {};
+
+  };
+
+  struct DrawIndirectCommand
+  {
+    using NativeType = VkDrawIndirectCommand;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
+    {}
+
+
+    DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
+    }
+
+    operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawIndirectCommand*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( vertexCount, instanceCount, firstVertex, firstInstance );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrawIndirectCommand const & ) const = default;
+#else
+    bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( vertexCount == rhs.vertexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstVertex == rhs.firstVertex )
+          && ( firstInstance == rhs.firstInstance );
+#endif
+    }
+
+    bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t vertexCount = {};
+    uint32_t instanceCount = {};
+    uint32_t firstVertex = {};
+    uint32_t firstInstance = {};
+
+  };
+
+  struct DrawMeshTasksIndirectCommandEXT
+  {
+    using NativeType = VkDrawMeshTasksIndirectCommandEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT(uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {}) VULKAN_HPP_NOEXCEPT
+    : groupCountX( groupCountX_ ), groupCountY( groupCountY_ ), groupCountZ( groupCountZ_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast<DrawMeshTasksIndirectCommandEXT const *>( &rhs ) )
+    {}
+
+
+    DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCountX = groupCountX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCountY = groupCountY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCountZ = groupCountZ_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT*>( this );
+    }
+
+    operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( groupCountX, groupCountY, groupCountZ );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default;
+#else
+    bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( groupCountX == rhs.groupCountX )
+          && ( groupCountY == rhs.groupCountY )
+          && ( groupCountZ == rhs.groupCountZ );
+#endif
+    }
+
+    bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t groupCountX = {};
+    uint32_t groupCountY = {};
+    uint32_t groupCountZ = {};
+
+  };
+
+  struct DrawMeshTasksIndirectCommandNV
+  {
+    using NativeType = VkDrawMeshTasksIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
+    : taskCount( taskCount_ ), firstTask( firstTask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskCount = taskCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstTask = firstTask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+    operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( taskCount, firstTask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( taskCount == rhs.taskCount )
+          && ( firstTask == rhs.firstTask );
+#endif
+    }
+
+    bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t taskCount = {};
+    uint32_t firstTask = {};
+
+  };
+
+  struct DrmFormatModifierProperties2EXT
+  {
+    using NativeType = VkDrmFormatModifierProperties2EXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
+    {}
+
+
+    DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT*>( this );
+    }
+
+    operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierProperties2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default;
+#else
+    bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
+#endif
+    }
+
+    bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint64_t drmFormatModifier = {};
+    uint32_t drmFormatModifierPlaneCount = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
+
+  };
+
+  struct DrmFormatModifierPropertiesEXT
+  {
+    using NativeType = VkDrmFormatModifierPropertiesEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
+#else
+    bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
+#endif
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint64_t drmFormatModifier = {};
+    uint32_t drmFormatModifierPlaneCount = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
+
+  };
+
+  struct DrmFormatModifierPropertiesList2EXT
+  {
+    using NativeType = VkDrmFormatModifierPropertiesList2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DrmFormatModifierPropertiesList2EXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT> const & drmFormatModifierProperties_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default;
+#else
+    bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+#endif
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
+    void * pNext = {};
+    uint32_t drmFormatModifierCount = {};
+    VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
+  {
+    using Type = DrmFormatModifierPropertiesList2EXT;
+  };
+
+  struct DrmFormatModifierPropertiesListEXT
+  {
+    using NativeType = VkDrmFormatModifierPropertiesListEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
+#else
+    bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+#endif
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+    void * pNext = {};
+    uint32_t drmFormatModifierCount = {};
+    VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
+  {
+    using Type = DrmFormatModifierPropertiesListEXT;
+  };
+
+  struct EventCreateInfo
+  {
+    using NativeType = VkEventCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
+    {}
+
+
+    EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkEventCreateInfo*>( this );
+    }
+
+    operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkEventCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( EventCreateInfo const & ) const = default;
+#else
+    bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eEventCreateInfo>
+  {
+    using Type = EventCreateInfo;
+  };
+
+  struct ExportFenceCreateInfo
+  {
+    using NativeType = VkExportFenceCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
+    }
+
+    operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportFenceCreateInfo const & ) const = default;
+#else
+    bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
+  {
+    using Type = ExportFenceCreateInfo;
+  };
+  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ExportFenceWin32HandleInfoKHR
+  {
+    using NativeType = VkExportFenceWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pAttributes, dwAccess, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+    const void * pNext = {};
+    const SECURITY_ATTRIBUTES * pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
+  {
+    using Type = ExportFenceWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExportMemoryAllocateInfo
+  {
+    using NativeType = VkExportMemoryAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
+    {}
+
+
+    ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
+#else
+    bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
+  {
+    using Type = ExportMemoryAllocateInfo;
+  };
+  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+
+  struct ExportMemoryAllocateInfoNV
+  {
+    using NativeType = VkExportMemoryAllocateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
+    {}
+
+
+    ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default;
+#else
+    bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
+  {
+    using Type = ExportMemoryAllocateInfoNV;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ExportMemoryWin32HandleInfoKHR
+  {
+    using NativeType = VkExportMemoryWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pAttributes, dwAccess, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+    const void * pNext = {};
+    const SECURITY_ATTRIBUTES * pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
+  {
+    using Type = ExportMemoryWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ExportMemoryWin32HandleInfoNV
+  {
+    using NativeType = VkExportMemoryWin32HandleInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pAttributes( pAttributes_ ), dwAccess( dwAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
+    {}
+
+
+    ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pAttributes, dwAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default;
+#else
+    bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess );
+#endif
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+    const void * pNext = {};
+    const SECURITY_ATTRIBUTES * pAttributes = {};
+    DWORD dwAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
+  {
+    using Type = ExportMemoryWin32HandleInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalBufferInfoEXT
+  {
+    using NativeType = VkExportMetalBufferInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), mtlBuffer( mtlBuffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalBufferInfoEXT( *reinterpret_cast<ExportMetalBufferInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlBuffer = mtlBuffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalBufferInfoEXT*>( this );
+    }
+
+    operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalBufferInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, MTLBuffer_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, mtlBuffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( mtlBuffer == rhs.mtlBuffer );
+#endif
+    }
+
+    bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    MTLBuffer_id mtlBuffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalBufferInfoEXT>
+  {
+    using Type = ExportMetalBufferInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalCommandQueueInfoEXT
+  {
+    using NativeType = VkExportMetalCommandQueueInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT(VULKAN_HPP_NAMESPACE::Queue queue_ = {}, MTLCommandQueue_id mtlCommandQueue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), queue( queue_ ), mtlCommandQueue( mtlCommandQueue_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalCommandQueueInfoEXT( *reinterpret_cast<ExportMetalCommandQueueInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queue = queue_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlCommandQueue = mtlCommandQueue_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalCommandQueueInfoEXT*>( this );
+    }
+
+    operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalCommandQueueInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Queue const &, MTLCommandQueue_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, queue, mtlCommandQueue );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queue == rhs.queue )
+          && ( mtlCommandQueue == rhs.mtlCommandQueue );
+#endif
+    }
+
+    bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Queue queue = {};
+    MTLCommandQueue_id mtlCommandQueue = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalCommandQueueInfoEXT>
+  {
+    using Type = ExportMetalCommandQueueInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalDeviceInfoEXT
+  {
+    using NativeType = VkExportMetalDeviceInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT(MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mtlDevice( mtlDevice_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalDeviceInfoEXT( *reinterpret_cast<ExportMetalDeviceInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlDevice = mtlDevice_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalDeviceInfoEXT*>( this );
+    }
+
+    operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalDeviceInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLDevice_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mtlDevice );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mtlDevice == rhs.mtlDevice );
+#endif
+    }
+
+    bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT;
+    const void * pNext = {};
+    MTLDevice_id mtlDevice = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalDeviceInfoEXT>
+  {
+    using Type = ExportMetalDeviceInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalIOSurfaceInfoEXT
+  {
+    using NativeType = VkExportMetalIOSurfaceInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ ), ioSurface( ioSurface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast<ExportMetalIOSurfaceInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ioSurface = ioSurface_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT*>( this );
+    }
+
+    operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalIOSurfaceInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, IOSurfaceRef const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image, ioSurface );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( ioSurface == rhs.ioSurface );
+#endif
+    }
+
+    bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    IOSurfaceRef ioSurface = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalIoSurfaceInfoEXT>
+  {
+    using Type = ExportMetalIOSurfaceInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalObjectCreateInfoEXT
+  {
+    using NativeType = VkExportMetalObjectCreateInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), exportObjectType( exportObjectType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalObjectCreateInfoEXT( *reinterpret_cast<ExportMetalObjectCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exportObjectType = exportObjectType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalObjectCreateInfoEXT*>( this );
+    }
+
+    operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalObjectCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, exportObjectType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportObjectType == rhs.exportObjectType );
+#endif
+    }
+
+    bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalObjectCreateInfoEXT>
+  {
+    using Type = ExportMetalObjectCreateInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalObjectsInfoEXT
+  {
+    using NativeType = VkExportMetalObjectsInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalObjectsInfoEXT( *reinterpret_cast<ExportMetalObjectsInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalObjectsInfoEXT*>( this );
+    }
+
+    operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalObjectsInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+#endif
+    }
+
+    bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT;
+    const void * pNext = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalObjectsInfoEXT>
+  {
+    using Type = ExportMetalObjectsInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalSharedEventInfoEXT
+  {
+    using NativeType = VkExportMetalSharedEventInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Event event_ = {}, MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), event( event_ ), mtlSharedEvent( mtlSharedEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalSharedEventInfoEXT( *reinterpret_cast<ExportMetalSharedEventInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT
+    {
+      event = event_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlSharedEvent = mtlSharedEvent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalSharedEventInfoEXT*>( this );
+    }
+
+    operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalSharedEventInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::Event const &, MTLSharedEvent_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, event, mtlSharedEvent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( event == rhs.event )
+          && ( mtlSharedEvent == rhs.mtlSharedEvent );
+#endif
+    }
+
+    bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::Event event = {};
+    MTLSharedEvent_id mtlSharedEvent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalSharedEventInfoEXT>
+  {
+    using Type = ExportMetalSharedEventInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ExportMetalTextureInfoEXT
+  {
+    using NativeType = VkExportMetalTextureInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ ), imageView( imageView_ ), bufferView( bufferView_ ), plane( plane_ ), mtlTexture( mtlTexture_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportMetalTextureInfoEXT( *reinterpret_cast<ExportMetalTextureInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferView = bufferView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
+    {
+      plane = plane_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlTexture = mtlTexture_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMetalTextureInfoEXT*>( this );
+    }
+
+    operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMetalTextureInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::BufferView const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &, MTLTexture_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default;
+#else
+    bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( imageView == rhs.imageView )
+          && ( bufferView == rhs.bufferView )
+          && ( plane == rhs.plane )
+          && ( mtlTexture == rhs.mtlTexture );
+#endif
+    }
+
+    bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::BufferView bufferView = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+    MTLTexture_id mtlTexture = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMetalTextureInfoEXT>
+  {
+    using Type = ExportMetalTextureInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  struct ExportSemaphoreCreateInfo
+  {
+    using NativeType = VkExportSemaphoreCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
+#else
+    bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
+  {
+    using Type = ExportSemaphoreCreateInfo;
+  };
+  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ExportSemaphoreWin32HandleInfoKHR
+  {
+    using NativeType = VkExportSemaphoreWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pAttributes, dwAccess, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+    const void * pNext = {};
+    const SECURITY_ATTRIBUTES * pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
+  {
+    using Type = ExportSemaphoreWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExtensionProperties
+  {
+    using NativeType = VkExtensionProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
+    : extensionName( extensionName_ ), specVersion( specVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
+    {}
+
+
+    ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtensionProperties*>( this );
+    }
+
+    operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtensionProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( extensionName, specVersion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExtensionProperties const & ) const = default;
+#else
+    bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( extensionName == rhs.extensionName )
+          && ( specVersion == rhs.specVersion );
+#endif
+    }
+
+    bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
+    uint32_t specVersion = {};
+
+  };
+
+  struct ExternalMemoryProperties
+  {
+    using NativeType = VkExternalMemoryProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
+    {}
+
+
+    ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
+    }
+
+    operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalMemoryProperties const & ) const = default;
+#else
+    bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+#endif
+    }
+
+    bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
+
+  };
+  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+
+  struct ExternalBufferProperties
+  {
+    using NativeType = VkExternalBufferProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
+    {}
+
+
+    ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalBufferProperties*>( this );
+    }
+
+    operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalBufferProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, externalMemoryProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalBufferProperties const & ) const = default;
+#else
+    bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+#endif
+    }
+
+    bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalBufferProperties>
+  {
+    using Type = ExternalBufferProperties;
+  };
+  using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+
+  struct ExternalFenceProperties
+  {
+    using NativeType = VkExternalFenceProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
+    {}
+
+
+    ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFenceProperties*>( this );
+    }
+
+    operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFenceProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalFenceProperties const & ) const = default;
+#else
+    bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalFenceFeatures == rhs.externalFenceFeatures );
+#endif
+    }
+
+    bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalFenceProperties>
+  {
+    using Type = ExternalFenceProperties;
+  };
+  using ExternalFencePropertiesKHR = ExternalFenceProperties;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct ExternalFormatANDROID
+  {
+    using NativeType = VkExternalFormatANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), externalFormat( externalFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
+    {}
+
+
+    ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      externalFormat = externalFormat_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
+    }
+
+    operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFormatANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, externalFormat );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalFormatANDROID const & ) const = default;
+#else
+    bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalFormat == rhs.externalFormat );
+#endif
+    }
+
+    bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
+    void * pNext = {};
+    uint64_t externalFormat = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalFormatANDROID>
+  {
+    using Type = ExternalFormatANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ExternalImageFormatProperties
+  {
+    using NativeType = VkExternalImageFormatProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
+    {}
+
+
+    ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
+    }
+
+    operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, externalMemoryProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalImageFormatProperties const & ) const = default;
+#else
+    bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+#endif
+    }
+
+    bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
+  {
+    using Type = ExternalImageFormatProperties;
+  };
+  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+
+  struct ImageFormatProperties
+  {
+    using NativeType = VkImageFormatProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
+    {}
+
+
+    ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties*>( this );
+    }
+
+    operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageFormatProperties const & ) const = default;
+#else
+    bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( maxExtent == rhs.maxExtent )
+          && ( maxMipLevels == rhs.maxMipLevels )
+          && ( maxArrayLayers == rhs.maxArrayLayers )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( maxResourceSize == rhs.maxResourceSize );
+#endif
+    }
+
+    bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
+    uint32_t maxMipLevels = {};
+    uint32_t maxArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
+
+  };
+
+  struct ExternalImageFormatPropertiesNV
+  {
+    using NativeType = VkExternalImageFormatPropertiesNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+    operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageFormatProperties const &, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default;
+#else
+    bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( imageFormatProperties == rhs.imageFormatProperties )
+          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+#endif
+    }
+
+    bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
+
+  };
+
+  struct ExternalMemoryBufferCreateInfo
+  {
+    using NativeType = VkExternalMemoryBufferCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
+#else
+    bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
+  {
+    using Type = ExternalMemoryBufferCreateInfo;
+  };
+  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+
+  struct ExternalMemoryImageCreateInfo
+  {
+    using NativeType = VkExternalMemoryImageCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
+#else
+    bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
+  {
+    using Type = ExternalMemoryImageCreateInfo;
+  };
+  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+
+  struct ExternalMemoryImageCreateInfoNV
+  {
+    using NativeType = VkExternalMemoryImageCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default;
+#else
+    bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+#endif
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
+  {
+    using Type = ExternalMemoryImageCreateInfoNV;
+  };
+
+  struct ExternalSemaphoreProperties
+  {
+    using NativeType = VkExternalSemaphoreProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
+    {}
+
+
+    ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
+    }
+
+    operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
+#else
+    bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+#endif
+    }
+
+    bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
+  {
+    using Type = ExternalSemaphoreProperties;
+  };
+  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+
+  struct FenceCreateInfo
+  {
+    using NativeType = VkFenceCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
+    {}
+
+
+    FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceCreateInfo*>( this );
+    }
+
+    operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FenceCreateInfo const & ) const = default;
+#else
+    bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceCreateInfo>
+  {
+    using Type = FenceCreateInfo;
+  };
+
+  struct FenceGetFdInfoKHR
+  {
+    using NativeType = VkFenceGetFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fence( fence_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
+    }
+
+    operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fence, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
+#else
+    bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
+  {
+    using Type = FenceGetFdInfoKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct FenceGetWin32HandleInfoKHR
+  {
+    using NativeType = VkFenceGetWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fence( fence_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fence, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
+  {
+    using Type = FenceGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct FilterCubicImageViewImageFormatPropertiesEXT
+  {
+    using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, filterCubic, filterCubicMinmax );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
+#else
+    bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterCubic == rhs.filterCubic )
+          && ( filterCubicMinmax == rhs.filterCubicMinmax );
+#endif
+    }
+
+    bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
+  {
+    using Type = FilterCubicImageViewImageFormatPropertiesEXT;
+  };
+
+  struct FormatProperties
+  {
+    using NativeType = VkFormatProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
+    {}
+
+
+    FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties*>( this );
+    }
+
+    operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FormatProperties const & ) const = default;
+#else
+    bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( linearTilingFeatures == rhs.linearTilingFeatures )
+          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+          && ( bufferFeatures == rhs.bufferFeatures );
+#endif
+    }
+
+    bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
+
+  };
+
+  struct FormatProperties2
+  {
+    using NativeType = VkFormatProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), formatProperties( formatProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
+    {}
+
+
+    FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties2*>( this );
+    }
+
+    operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, formatProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FormatProperties2 const & ) const = default;
+#else
+    bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatProperties == rhs.formatProperties );
+#endif
+    }
+
+    bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFormatProperties2>
+  {
+    using Type = FormatProperties2;
+  };
+  using FormatProperties2KHR = FormatProperties2;
+
+  struct FormatProperties3
+  {
+    using NativeType = VkFormatProperties3;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FormatProperties3(VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) )
+    {}
+
+
+    FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties3 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties3*>( this );
+    }
+
+    operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties3*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FormatProperties3 const & ) const = default;
+#else
+    bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( linearTilingFeatures == rhs.linearTilingFeatures )
+          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+          && ( bufferFeatures == rhs.bufferFeatures );
+#endif
+    }
+
+    bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFormatProperties3>
+  {
+    using Type = FormatProperties3;
+  };
+  using FormatProperties3KHR = FormatProperties3;
+
+  struct FragmentShadingRateAttachmentInfoKHR
+  {
+    using NativeType = VkFragmentShadingRateAttachmentInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
+    {}
+
+
+    FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+    operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
+#else
+    bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment )
+          && ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
+#endif
+    }
+
+    bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
+    const void * pNext = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
+  {
+    using Type = FragmentShadingRateAttachmentInfoKHR;
+  };
+
+  struct FramebufferAttachmentImageInfo
+  {
+    using NativeType = VkFramebufferAttachmentImageInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = viewFormatCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewFormats = pViewFormats_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+      pViewFormats = viewFormats_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
+    }
+
+    operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
+#else
+    bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layerCount == rhs.layerCount )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
+#endif
+    }
+
+    bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t layerCount = {};
+    uint32_t viewFormatCount = {};
+    const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
+  {
+    using Type = FramebufferAttachmentImageInfo;
+  };
+  using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+
+  struct FramebufferAttachmentsCreateInfo
+  {
+    using NativeType = VkFramebufferAttachmentsCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentImageInfoCount = attachmentImageInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentImageInfos = pAttachmentImageInfos_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
+      pAttachmentImageInfos = attachmentImageInfos_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
+    }
+
+    operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
+#else
+    bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
+          && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
+#endif
+    }
+
+    bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
+    const void * pNext = {};
+    uint32_t attachmentImageInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
+  {
+    using Type = FramebufferAttachmentsCreateInfo;
+  };
+  using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
+
+  struct FramebufferCreateInfo
+  {
+    using NativeType = VkFramebufferCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layers = layers_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
+    }
+
+    operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FramebufferCreateFlags const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FramebufferCreateInfo const & ) const = default;
+#else
+    bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( renderPass == rhs.renderPass )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layers == rhs.layers );
+#endif
+    }
+
+    bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t layers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
+  {
+    using Type = FramebufferCreateInfo;
+  };
+
+  struct FramebufferMixedSamplesCombinationNV
+  {
+    using NativeType = VkFramebufferMixedSamplesCombinationNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
+    {}
+
+
+    FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
+    }
+
+    operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default;
+#else
+    bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( coverageReductionMode == rhs.coverageReductionMode )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( depthStencilSamples == rhs.depthStencilSamples )
+          && ( colorSamples == rhs.colorSamples );
+#endif
+    }
+
+    bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
+  {
+    using Type = FramebufferMixedSamplesCombinationNV;
+  };
+
+  struct IndirectCommandsStreamNV
+  {
+    using NativeType = VkIndirectCommandsStreamNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
+    {}
+
+
+    IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
+    }
+
+    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( buffer, offset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( IndirectCommandsStreamNV const & ) const = default;
+#else
+    bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
+#endif
+    }
+
+    bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
+  };
+
+  struct GeneratedCommandsInfoNV
+  {
+    using NativeType = VkGeneratedCommandsInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = streamCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStreams = pStreams_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = static_cast<uint32_t>( streams_.size() );
+      pStreams = streams_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCount = sequencesCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessBuffer = preprocessBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessOffset = preprocessOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessSize = preprocessSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountBuffer = sequencesCountBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountOffset = sequencesCountOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexBuffer = sequencesIndexBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexOffset = sequencesIndexOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
+    }
+
+    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, streamCount, pStreams, sequencesCount, preprocessBuffer, preprocessOffset, preprocessSize, sequencesCountBuffer, sequencesCountOffset, sequencesIndexBuffer, sequencesIndexOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeneratedCommandsInfoNV const & ) const = default;
+#else
+    bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipeline == rhs.pipeline )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( streamCount == rhs.streamCount )
+          && ( pStreams == rhs.pStreams )
+          && ( sequencesCount == rhs.sequencesCount )
+          && ( preprocessBuffer == rhs.preprocessBuffer )
+          && ( preprocessOffset == rhs.preprocessOffset )
+          && ( preprocessSize == rhs.preprocessSize )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+#endif
+    }
+
+    bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+    uint32_t streamCount = {};
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {};
+    uint32_t sequencesCount = {};
+    VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
+  {
+    using Type = GeneratedCommandsInfoNV;
+  };
+
+  struct GeneratedCommandsMemoryRequirementsInfoNV
+  {
+    using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
+    {}
+
+
+    GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
+    }
+
+    operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default;
+#else
+    bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipeline == rhs.pipeline )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( maxSequencesCount == rhs.maxSequencesCount );
+#endif
+    }
+
+    bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+    uint32_t maxSequencesCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
+  {
+    using Type = GeneratedCommandsMemoryRequirementsInfoNV;
+  };
+
+  struct VertexInputBindingDescription
+  {
+    using NativeType = VkVertexInputBindingDescription;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), stride( stride_ ), inputRate( inputRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
+    {}
+
+
+    VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputRate = inputRate_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
+    }
+
+    operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( binding, stride, inputRate );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VertexInputBindingDescription const & ) const = default;
+#else
+    bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( binding == rhs.binding )
+          && ( stride == rhs.stride )
+          && ( inputRate == rhs.inputRate );
+#endif
+    }
+
+    bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t binding = {};
+    uint32_t stride = {};
+    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
+
+  };
+
+  struct VertexInputAttributeDescription
+  {
+    using NativeType = VkVertexInputAttributeDescription;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
+    {}
+
+
+    VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
+    {
+      location = location_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
+    }
+
+    operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( location, binding, format, offset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VertexInputAttributeDescription const & ) const = default;
+#else
+    bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( location == rhs.location )
+          && ( binding == rhs.binding )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset );
+#endif
+    }
+
+    bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t location = {};
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint32_t offset = {};
+
+  };
+
+  struct PipelineVertexInputStateCreateInfo
+  {
+    using NativeType = VkPipelineVertexInputStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDescriptions = pVertexBindingDescriptions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
+      pVertexBindingDescriptions = vertexBindingDescriptions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
+      pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+          && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+          && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+          && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+#endif
+    }
+
+    bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
+    uint32_t vertexBindingDescriptionCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {};
+    uint32_t vertexAttributeDescriptionCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
+  {
+    using Type = PipelineVertexInputStateCreateInfo;
+  };
+
+  struct PipelineInputAssemblyStateCreateInfo
+  {
+    using NativeType = VkPipelineInputAssemblyStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+    {
+      topology = topology_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveRestartEnable = primitiveRestartEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags const &, VULKAN_HPP_NAMESPACE::PrimitiveTopology const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, topology, primitiveRestartEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( topology == rhs.topology )
+          && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+#endif
+    }
+
+    bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
+  {
+    using Type = PipelineInputAssemblyStateCreateInfo;
+  };
+
+  struct PipelineTessellationStateCreateInfo
+  {
+    using NativeType = VkPipelineTessellationStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), patchControlPoints( patchControlPoints_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patchControlPoints = patchControlPoints_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, patchControlPoints );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( patchControlPoints == rhs.patchControlPoints );
+#endif
+    }
+
+    bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
+    uint32_t patchControlPoints = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
+  {
+    using Type = PipelineTessellationStateCreateInfo;
+  };
+
+  struct PipelineViewportStateCreateInfo
+  {
+    using NativeType = VkPipelineViewportStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewports = pViewports_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( viewports_.size() );
+      pViewports = viewports_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scissorCount = scissorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pScissors = pScissors_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scissorCount = static_cast<uint32_t>( scissors_.size() );
+      pScissors = scissors_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewports == rhs.pViewports )
+          && ( scissorCount == rhs.scissorCount )
+          && ( pScissors == rhs.pScissors );
+#endif
+    }
+
+    bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {};
+    uint32_t scissorCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
+  {
+    using Type = PipelineViewportStateCreateInfo;
+  };
+
+  struct PipelineRasterizationStateCreateInfo
+  {
+    using NativeType = VkPipelineRasterizationStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClampEnable = depthClampEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizerDiscardEnable = rasterizerDiscardEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      polygonMode = polygonMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cullMode = cullMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frontFace = frontFace_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasEnable = depthBiasEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasConstantFactor = depthBiasConstantFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasSlopeFactor = depthBiasSlopeFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineWidth = lineWidth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PolygonMode const &, VULKAN_HPP_NAMESPACE::CullModeFlags const &, VULKAN_HPP_NAMESPACE::FrontFace const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, float const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, depthClampEnable, rasterizerDiscardEnable, polygonMode, cullMode, frontFace, depthBiasEnable, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor, lineWidth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClampEnable == rhs.depthClampEnable )
+          && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+          && ( polygonMode == rhs.polygonMode )
+          && ( cullMode == rhs.cullMode )
+          && ( frontFace == rhs.frontFace )
+          && ( depthBiasEnable == rhs.depthBiasEnable )
+          && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+          && ( lineWidth == rhs.lineWidth );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
+    VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
+    VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
+    VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
+    float depthBiasConstantFactor = {};
+    float depthBiasClamp = {};
+    float depthBiasSlopeFactor = {};
+    float lineWidth = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
+  {
+    using Type = PipelineRasterizationStateCreateInfo;
+  };
+
+  struct PipelineMultisampleStateCreateInfo
+  {
+    using NativeType = VkPipelineMultisampleStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationSamples = rasterizationSamples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleShadingEnable = sampleShadingEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minSampleShading = minSampleShading_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleMask = pSampleMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToCoverageEnable = alphaToCoverageEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOneEnable = alphaToOneEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, const VULKAN_HPP_NAMESPACE::SampleMask * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( sampleShadingEnable == rhs.sampleShadingEnable )
+          && ( minSampleShading == rhs.minSampleShading )
+          && ( pSampleMask == rhs.pSampleMask )
+          && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
+          && ( alphaToOneEnable == rhs.alphaToOneEnable );
+#endif
+    }
+
+    bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
+    float minSampleShading = {};
+    const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
+  {
+    using Type = PipelineMultisampleStateCreateInfo;
+  };
+
+  struct StencilOpState
+  {
+    using NativeType = VkStencilOpState;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
+    : failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+      : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
+    {}
+
+
+    StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      failOp = failOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      passOp = passOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthFailOp = depthFailOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareMask = compareMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      writeMask = writeMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reference = reference_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStencilOpState*>( this );
+    }
+
+    operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStencilOpState*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::CompareOp const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( StencilOpState const & ) const = default;
+#else
+    bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( failOp == rhs.failOp )
+          && ( passOp == rhs.passOp )
+          && ( depthFailOp == rhs.depthFailOp )
+          && ( compareOp == rhs.compareOp )
+          && ( compareMask == rhs.compareMask )
+          && ( writeMask == rhs.writeMask )
+          && ( reference == rhs.reference );
+#endif
+    }
+
+    bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    uint32_t compareMask = {};
+    uint32_t writeMask = {};
+    uint32_t reference = {};
+
+  };
+
+  struct PipelineDepthStencilStateCreateInfo
+  {
+    using NativeType = VkPipelineDepthStencilStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthTestEnable = depthTestEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthWriteEnable = depthWriteEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthCompareOp = depthCompareOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBoundsTestEnable = depthBoundsTestEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilTestEnable = stencilTestEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
+    {
+      front = front_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
+    {
+      back = back_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepthBounds = minDepthBounds_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepthBounds = maxDepthBounds_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, depthTestEnable, depthWriteEnable, depthCompareOp, depthBoundsTestEnable, stencilTestEnable, front, back, minDepthBounds, maxDepthBounds );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthTestEnable == rhs.depthTestEnable )
+          && ( depthWriteEnable == rhs.depthWriteEnable )
+          && ( depthCompareOp == rhs.depthCompareOp )
+          && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+          && ( stencilTestEnable == rhs.stencilTestEnable )
+          && ( front == rhs.front )
+          && ( back == rhs.back )
+          && ( minDepthBounds == rhs.minDepthBounds )
+          && ( maxDepthBounds == rhs.maxDepthBounds );
+#endif
+    }
+
+    bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
+    VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
+    VULKAN_HPP_NAMESPACE::StencilOpState front = {};
+    VULKAN_HPP_NAMESPACE::StencilOpState back = {};
+    float minDepthBounds = {};
+    float maxDepthBounds = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
+  {
+    using Type = PipelineDepthStencilStateCreateInfo;
+  };
+
+  struct PipelineColorBlendAttachmentState
+  {
+    using NativeType = VkPipelineColorBlendAttachmentState;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
+    {}
+
+
+    PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendEnable = blendEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcColorBlendFactor = srcColorBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstColorBlendFactor = dstColorBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorBlendOp = colorBlendOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAlphaBlendFactor = srcAlphaBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAlphaBlendFactor = dstAlphaBlendFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaBlendOp = alphaBlendOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorWriteMask = colorWriteMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+    operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::ColorComponentFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
+#else
+    bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( blendEnable == rhs.blendEnable )
+          && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+          && ( colorBlendOp == rhs.colorBlendOp )
+          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+          && ( alphaBlendOp == rhs.alphaBlendOp )
+          && ( colorWriteMask == rhs.colorWriteMask );
+#endif
+    }
+
+    bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
+    VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
+
+  };
+
+  struct PipelineColorBlendStateCreateInfo
+  {
+    using NativeType = VkPipelineColorBlendStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, std::array<float,4> const & blendConstants_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const & blendConstants_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOpEnable = logicOpEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendConstants = blendConstants_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::LogicOp const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( logicOpEnable == rhs.logicOpEnable )
+          && ( logicOp == rhs.logicOp )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( blendConstants == rhs.blendConstants );
+#endif
+    }
+
+    bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
+    VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
+  {
+    using Type = PipelineColorBlendStateCreateInfo;
+  };
+
+  struct PipelineDynamicStateCreateInfo
+  {
+    using NativeType = VkPipelineDynamicStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicStateCount = dynamicStateCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicStates = pDynamicStates_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
+      pDynamicStates = dynamicStates_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DynamicState * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dynamicStateCount == rhs.dynamicStateCount )
+          && ( pDynamicStates == rhs.pDynamicStates );
+#endif
+    }
+
+    bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
+    uint32_t dynamicStateCount = {};
+    const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
+  {
+    using Type = PipelineDynamicStateCreateInfo;
+  };
+
+  struct GraphicsPipelineCreateInfo
+  {
+    using NativeType = VkGraphicsPipelineCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexInputState = pVertexInputState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAssemblyState = pInputAssemblyState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTessellationState = pTessellationState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportState = pViewportState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRasterizationState = pRasterizationState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMultisampleState = pMultisampleState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilState = pDepthStencilState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorBlendState = pColorBlendState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicState = pDynamicState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
+    }
+
+    operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, stageCount, pStages, pVertexInputState, pInputAssemblyState, pTessellationState, pViewportState, pRasterizationState, pMultisampleState, pDepthStencilState, pColorBlendState, pDynamicState, layout, renderPass, subpass, basePipelineHandle, basePipelineIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
+#else
+    bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( pVertexInputState == rhs.pVertexInputState )
+          && ( pInputAssemblyState == rhs.pInputAssemblyState )
+          && ( pTessellationState == rhs.pTessellationState )
+          && ( pViewportState == rhs.pViewportState )
+          && ( pRasterizationState == rhs.pRasterizationState )
+          && ( pMultisampleState == rhs.pMultisampleState )
+          && ( pDepthStencilState == rhs.pDepthStencilState )
+          && ( pColorBlendState == rhs.pColorBlendState )
+          && ( pDynamicState == rhs.pDynamicState )
+          && ( layout == rhs.layout )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+#endif
+    }
+
+    bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t subpass = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
+  {
+    using Type = GraphicsPipelineCreateInfo;
+  };
+
+  struct GraphicsPipelineLibraryCreateInfoEXT
+  {
+    using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast<GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
+    }
+
+    operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsPipelineLibraryCreateInfoEXT>
+  {
+    using Type = GraphicsPipelineLibraryCreateInfoEXT;
+  };
+
+  struct GraphicsShaderGroupCreateInfoNV
+  {
+    using NativeType = VkGraphicsShaderGroupCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexInputState = pVertexInputState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTessellationState = pTessellationState_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
+    }
+
+    operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default;
+#else
+    bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( pVertexInputState == rhs.pVertexInputState )
+          && ( pTessellationState == rhs.pTessellationState );
+#endif
+    }
+
+    bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+    const void * pNext = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
+  {
+    using Type = GraphicsShaderGroupCreateInfoNV;
+  };
+
+  struct GraphicsPipelineShaderGroupsCreateInfoNV
+  {
+    using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCount = pipelineCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelines = pPipelines_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCount = static_cast<uint32_t>( pipelines_.size() );
+      pPipelines = pipelines_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+    }
+
+    operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default;
+#else
+    bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( pipelineCount == rhs.pipelineCount )
+          && ( pPipelines == rhs.pPipelines );
+#endif
+    }
+
+    bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+    const void * pNext = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {};
+    uint32_t pipelineCount = {};
+    const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
+  {
+    using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
+  };
+
+  struct XYColorEXT
+  {
+    using NativeType = VkXYColorEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
+    {}
+
+
+    XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXYColorEXT*>( this );
+    }
+
+    operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXYColorEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( XYColorEXT const & ) const = default;
+#else
+    bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+#endif
+    }
+
+    bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float x = {};
+    float y = {};
+
+  };
+
+  struct HdrMetadataEXT
+  {
+    using NativeType = VkHdrMetadataEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
+    {}
+
+
+    HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryRed = displayPrimaryRed_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryGreen = displayPrimaryGreen_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryBlue = displayPrimaryBlue_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      whitePoint = whitePoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLuminance = maxLuminance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLuminance = minLuminance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxContentLightLevel = maxContentLightLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
+    }
+
+    operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHdrMetadataEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, float const &, float const &, float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, displayPrimaryRed, displayPrimaryGreen, displayPrimaryBlue, whitePoint, maxLuminance, minLuminance, maxContentLightLevel, maxFrameAverageLightLevel );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( HdrMetadataEXT const & ) const = default;
+#else
+    bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPrimaryRed == rhs.displayPrimaryRed )
+          && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
+          && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
+          && ( whitePoint == rhs.whitePoint )
+          && ( maxLuminance == rhs.maxLuminance )
+          && ( minLuminance == rhs.minLuminance )
+          && ( maxContentLightLevel == rhs.maxContentLightLevel )
+          && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
+#endif
+    }
+
+    bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
+    float maxLuminance = {};
+    float minLuminance = {};
+    float maxContentLightLevel = {};
+    float maxFrameAverageLightLevel = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eHdrMetadataEXT>
+  {
+    using Type = HdrMetadataEXT;
+  };
+
+  struct HeadlessSurfaceCreateInfoEXT
+  {
+    using NativeType = VkHeadlessSurfaceCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
+  {
+    using Type = HeadlessSurfaceCreateInfoEXT;
+  };
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  struct IOSSurfaceCreateInfoMVK
+  {
+    using NativeType = VkIOSSurfaceCreateInfoMVK;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pView( pView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+      : IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
+    {}
+
+
+    IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pView );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default;
+#else
+    bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+#endif
+    }
+
+    bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
+    const void * pView = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
+  {
+    using Type = IOSSurfaceCreateInfoMVK;
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+  struct ImageBlit
+  {
+    using NativeType = VkImageBlit;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
+    {}
+
+
+    ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffsets = srcOffsets_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffsets = dstOffsets_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageBlit*>( this );
+    }
+
+    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageBlit*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageBlit const & ) const = default;
+#else
+    bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffsets == rhs.srcOffsets )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffsets == rhs.dstOffsets );
+#endif
+    }
+
+    bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
+  };
+
+  struct ImageCaptureDescriptorDataInfoEXT
+  {
+    using NativeType = VkImageCaptureDescriptorDataInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageCaptureDescriptorDataInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+    operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default;
+#else
+    bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+#endif
+    }
+
+    bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCaptureDescriptorDataInfoEXT>
+  {
+    using Type = ImageCaptureDescriptorDataInfoEXT;
+  };
+
+  struct ImageCompressionControlEXT
+  {
+    using NativeType = VkImageCompressionControlEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT(VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, uint32_t compressionControlPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), compressionControlPlaneCount( compressionControlPlaneCount_ ), pFixedRateFlags( pFixedRateFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCompressionControlEXT( *reinterpret_cast<ImageCompressionControlEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), compressionControlPlaneCount( static_cast<uint32_t>( fixedRateFlags_.size() ) ), pFixedRateFlags( fixedRateFlags_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compressionControlPlaneCount = compressionControlPlaneCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFixedRateFlags = pFixedRateFlags_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageCompressionControlEXT & setFixedRateFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compressionControlPlaneCount = static_cast<uint32_t>( fixedRateFlags_.size() );
+      pFixedRateFlags = fixedRateFlags_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCompressionControlEXT*>( this );
+    }
+
+    operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCompressionControlEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCompressionControlEXT const & ) const = default;
+#else
+    bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( compressionControlPlaneCount == rhs.compressionControlPlaneCount )
+          && ( pFixedRateFlags == rhs.pFixedRateFlags );
+#endif
+    }
+
+    bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {};
+    uint32_t compressionControlPlaneCount = {};
+    VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCompressionControlEXT>
+  {
+    using Type = ImageCompressionControlEXT;
+  };
+
+  struct ImageCompressionPropertiesEXT
+  {
+    using NativeType = VkImageCompressionPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT(VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageCompressionFlags( imageCompressionFlags_ ), imageCompressionFixedRateFlags( imageCompressionFixedRateFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCompressionPropertiesEXT( *reinterpret_cast<ImageCompressionPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCompressionPropertiesEXT*>( this );
+    }
+
+    operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCompressionPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default;
+#else
+    bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageCompressionFlags == rhs.imageCompressionFlags )
+          && ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags );
+#endif
+    }
+
+    bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {};
+    VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCompressionPropertiesEXT>
+  {
+    using Type = ImageCompressionPropertiesEXT;
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImageFormatConstraintsInfoFUCHSIA
+  {
+    using NativeType = VkImageFormatConstraintsInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, uint64_t sysmemPixelFormat_ = {}, uint32_t colorSpaceCount_ = {}, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageCreateInfo( imageCreateInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), flags( flags_ ), sysmemPixelFormat( sysmemPixelFormat_ ), colorSpaceCount( colorSpaceCount_ ), pColorSpaces( pColorSpaces_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, uint64_t sysmemPixelFormat_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), imageCreateInfo( imageCreateInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), flags( flags_ ), sysmemPixelFormat( sysmemPixelFormat_ ), colorSpaceCount( static_cast<uint32_t>( colorSpaces_.size() ) ), pColorSpaces( colorSpaces_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCreateInfo = imageCreateInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      requiredFormatFeatures = requiredFormatFeatures_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sysmemPixelFormat = sysmemPixelFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorSpaceCount = colorSpaceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorSpaces = pColorSpaces_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorSpaceCount = static_cast<uint32_t>( colorSpaces_.size() );
+      pColorSpaces = colorSpaces_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA*>( this );
+    }
+
+    operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageCreateInfo == rhs.imageCreateInfo )
+          && ( requiredFormatFeatures == rhs.requiredFormatFeatures )
+          && ( flags == rhs.flags )
+          && ( sysmemPixelFormat == rhs.sysmemPixelFormat )
+          && ( colorSpaceCount == rhs.colorSpaceCount )
+          && ( pColorSpaces == rhs.pColorSpaces );
+#endif
+    }
+
+    bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
+    VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {};
+    uint64_t sysmemPixelFormat = {};
+    uint32_t colorSpaceCount = {};
+    const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageFormatConstraintsInfoFUCHSIA>
+  {
+    using Type = ImageFormatConstraintsInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImageConstraintsInfoFUCHSIA
+  {
+    using NativeType = VkImageConstraintsInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(uint32_t formatConstraintsCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), formatConstraintsCount( formatConstraintsCount_ ), pFormatConstraints( pFormatConstraints_ ), bufferCollectionConstraints( bufferCollectionConstraints_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageConstraintsInfoFUCHSIA( *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) ), pFormatConstraints( formatConstraints_.data() ), bufferCollectionConstraints( bufferCollectionConstraints_ ), flags( flags_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatConstraintsCount = formatConstraintsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFormatConstraints = pFormatConstraints_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageConstraintsInfoFUCHSIA & setFormatConstraints( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatConstraintsCount = static_cast<uint32_t>( formatConstraints_.size() );
+      pFormatConstraints = formatConstraints_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferCollectionConstraints = bufferCollectionConstraints_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageConstraintsInfoFUCHSIA*>( this );
+    }
+
+    operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageConstraintsInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatConstraintsCount == rhs.formatConstraintsCount )
+          && ( pFormatConstraints == rhs.pFormatConstraints )
+          && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA;
+    const void * pNext = {};
+    uint32_t formatConstraintsCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
+    VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageConstraintsInfoFUCHSIA>
+  {
+    using Type = ImageConstraintsInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct ImageCopy
+  {
+    using NativeType = VkImageCopy;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
+    {}
+
+
+    ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCopy*>( this );
+    }
+
+    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCopy*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageCopy const & ) const = default;
+#else
+    bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+#endif
+    }
+
+    bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+
+  struct SubresourceLayout
+  {
+    using NativeType = VkSubresourceLayout;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
+    {}
+
+
+    SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rowPitch = rowPitch_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayPitch = arrayPitch_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthPitch = depthPitch_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubresourceLayout*>( this );
+    }
+
+    operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubresourceLayout*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( offset, size, rowPitch, arrayPitch, depthPitch );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubresourceLayout const & ) const = default;
+#else
+    bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( rowPitch == rhs.rowPitch )
+          && ( arrayPitch == rhs.arrayPitch )
+          && ( depthPitch == rhs.depthPitch );
+#endif
+    }
+
+    bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
+
+  };
+
+  struct ImageDrmFormatModifierExplicitCreateInfoEXT
+  {
+    using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPlaneLayouts = pPlaneLayouts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
+      pPlaneLayouts = planeLayouts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubresourceLayout * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( pPlaneLayouts == rhs.pPlaneLayouts );
+#endif
+    }
+
+    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+    const void * pNext = {};
+    uint64_t drmFormatModifier = {};
+    uint32_t drmFormatModifierPlaneCount = {};
+    const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
+  {
+    using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
+  };
+
+  struct ImageDrmFormatModifierListCreateInfoEXT
+  {
+    using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t * pDrmFormatModifiers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierCount = drmFormatModifierCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDrmFormatModifiers = pDrmFormatModifiers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
+      pDrmFormatModifiers = drmFormatModifiers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
+#endif
+    }
+
+    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t drmFormatModifierCount = {};
+    const uint64_t * pDrmFormatModifiers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
+  {
+    using Type = ImageDrmFormatModifierListCreateInfoEXT;
+  };
+
+  struct ImageDrmFormatModifierPropertiesEXT
+  {
+    using NativeType = VkImageDrmFormatModifierPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier );
+#endif
+    }
+
+    bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+    void * pNext = {};
+    uint64_t drmFormatModifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
+  {
+    using Type = ImageDrmFormatModifierPropertiesEXT;
+  };
+
+  struct ImageFormatListCreateInfo
+  {
+    using NativeType = VkImageFormatListCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = viewFormatCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewFormats = pViewFormats_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+      pViewFormats = viewFormats_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
+    }
+
+    operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, viewFormatCount, pViewFormats );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
+#else
+    bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
+#endif
+    }
+
+    bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
+    const void * pNext = {};
+    uint32_t viewFormatCount = {};
+    const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
+  {
+    using Type = ImageFormatListCreateInfo;
+  };
+  using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
+
+  struct ImageFormatProperties2
+  {
+    using NativeType = VkImageFormatProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageFormatProperties( imageFormatProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
+    {}
+
+
+    ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties2*>( this );
+    }
+
+    operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageFormatProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageFormatProperties2 const & ) const = default;
+#else
+    bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFormatProperties == rhs.imageFormatProperties );
+#endif
+    }
+
+    bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageFormatProperties2>
+  {
+    using Type = ImageFormatProperties2;
+  };
+  using ImageFormatProperties2KHR = ImageFormatProperties2;
+
+  struct ImageMemoryBarrier
+  {
+    using NativeType = VkImageMemoryBarrier;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
+    {}
+
+
+    ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldLayout = oldLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      newLayout = newLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+    }
+
+    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageMemoryBarrier const & ) const = default;
+#else
+    bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
+#endif
+    }
+
+    bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
+  {
+    using Type = ImageMemoryBarrier;
+  };
+
+  struct ImageMemoryRequirementsInfo2
+  {
+    using NativeType = VkImageMemoryRequirementsInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
+    {}
+
+
+    ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
+#else
+    bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+#endif
+    }
+
+    bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
+  {
+    using Type = ImageMemoryRequirementsInfo2;
+  };
+  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImagePipeSurfaceCreateInfoFUCHSIA
+  {
+    using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), imagePipeHandle( imagePipeHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagePipeHandle = imagePipeHandle_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA const &, zx_handle_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, imagePipeHandle );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
+    }
+
+    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
+    zx_handle_t imagePipeHandle = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
+  {
+    using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct ImagePlaneMemoryRequirementsInfo
+  {
+    using NativeType = VkImagePlaneMemoryRequirementsInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
+    {}
+
+
+    ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+    operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, planeAspect );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
+#else
+    bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+#endif
+    }
+
+    bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
+  {
+    using Type = ImagePlaneMemoryRequirementsInfo;
+  };
+  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+
+  struct ImageResolve
+  {
+    using NativeType = VkImageResolve;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
+    {}
+
+
+    ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve*>( this );
+    }
+
+    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageResolve const & ) const = default;
+#else
+    bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+#endif
+    }
+
+    bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+
+  struct ImageResolve2
+  {
+    using NativeType = VkImageResolve2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageResolve2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) )
+    {}
+
+
+    ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve2*>( this );
+    }
+
+    operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageResolve2 const & ) const = default;
+#else
+    bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+#endif
+    }
+
+    bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageResolve2>
+  {
+    using Type = ImageResolve2;
+  };
+  using ImageResolve2KHR = ImageResolve2;
+
+  struct ImageSparseMemoryRequirementsInfo2
+  {
+    using NativeType = VkImageSparseMemoryRequirementsInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
+    {}
+
+
+    ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default;
+#else
+    bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+#endif
+    }
+
+    bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
+  {
+    using Type = ImageSparseMemoryRequirementsInfo2;
+  };
+  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+
+  struct ImageStencilUsageCreateInfo
+  {
+    using NativeType = VkImageStencilUsageCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stencilUsage( stencilUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilUsage = stencilUsage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
+    }
+
+    operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stencilUsage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
+#else
+    bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilUsage == rhs.stencilUsage );
+#endif
+    }
+
+    bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
+  {
+    using Type = ImageStencilUsageCreateInfo;
+  };
+  using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+
+  struct ImageSubresource2EXT
+  {
+    using NativeType = VkImageSubresource2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSubresource2EXT(VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageSubresource( imageSubresource_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresource2EXT( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSubresource2EXT( *reinterpret_cast<ImageSubresource2EXT const *>( &rhs ) )
+    {}
+
+
+    ImageSubresource2EXT & operator=( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSubresource2EXT & operator=( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSubresource2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresource2EXT*>( this );
+    }
+
+    operator VkImageSubresource2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresource2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageSubresource const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageSubresource );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSubresource2EXT const & ) const = default;
+#else
+    bool operator==( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageSubresource == rhs.imageSubresource );
+#endif
+    }
+
+    bool operator!=( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2EXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageSubresource2EXT>
+  {
+    using Type = ImageSubresource2EXT;
+  };
+
+  struct ImageSwapchainCreateInfoKHR
+  {
+    using NativeType = VkImageSwapchainCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchain( swapchain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchain );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain );
+#endif
+    }
+
+    bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
+  {
+    using Type = ImageSwapchainCreateInfoKHR;
+  };
+
+  struct ImageViewASTCDecodeModeEXT
+  {
+    using NativeType = VkImageViewASTCDecodeModeEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), decodeMode( decodeMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
+    {}
+
+
+    ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeMode = decodeMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+    operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, decodeMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
+#else
+    bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeMode == rhs.decodeMode );
+#endif
+    }
+
+    bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
+  {
+    using Type = ImageViewASTCDecodeModeEXT;
+  };
+
+  struct ImageViewAddressPropertiesNVX
+  {
+    using NativeType = VkImageViewAddressPropertiesNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceAddress( deviceAddress_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
+    {}
+
+
+    ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
+    }
+
+    operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceAddress, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default;
+#else
+    bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceAddress == rhs.deviceAddress )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
+  {
+    using Type = ImageViewAddressPropertiesNVX;
+  };
+
+  struct ImageViewCaptureDescriptorDataInfoEXT
+  {
+    using NativeType = VkImageViewCaptureDescriptorDataInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageView( imageView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+    operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageView );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default;
+#else
+    bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView );
+#endif
+    }
+
+    bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewCaptureDescriptorDataInfoEXT>
+  {
+    using Type = ImageViewCaptureDescriptorDataInfoEXT;
+  };
+
+  struct ImageViewCreateInfo
+  {
+    using NativeType = VkImageViewCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewType = viewType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
+    }
+
+    operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageViewType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewCreateInfo const & ) const = default;
+#else
+    bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( image == rhs.image )
+          && ( viewType == rhs.viewType )
+          && ( format == rhs.format )
+          && ( components == rhs.components )
+          && ( subresourceRange == rhs.subresourceRange );
+#endif
+    }
+
+    bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewCreateInfo>
+  {
+    using Type = ImageViewCreateInfo;
+  };
+
+  struct ImageViewHandleInfoNVX
+  {
+    using NativeType = VkImageViewHandleInfoNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
+    {}
+
+
+    ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
+    }
+
+    operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, VULKAN_HPP_NAMESPACE::Sampler const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageView, descriptorType, sampler );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewHandleInfoNVX const & ) const = default;
+#else
+    bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView )
+          && ( descriptorType == rhs.descriptorType )
+          && ( sampler == rhs.sampler );
+#endif
+    }
+
+    bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
+  {
+    using Type = ImageViewHandleInfoNVX;
+  };
+
+  struct ImageViewMinLodCreateInfoEXT
+  {
+    using NativeType = VkImageViewMinLodCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT(float minLod_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minLod( minLod_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewMinLodCreateInfoEXT( *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLod = minLod_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewMinLodCreateInfoEXT*>( this );
+    }
+
+    operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewMinLodCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minLod );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minLod == rhs.minLod );
+#endif
+    }
+
+    bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT;
+    const void * pNext = {};
+    float minLod = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewMinLodCreateInfoEXT>
+  {
+    using Type = ImageViewMinLodCreateInfoEXT;
+  };
+
+  struct ImageViewSampleWeightCreateInfoQCOM
+  {
+    using NativeType = VkImageViewSampleWeightCreateInfoQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM(VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, uint32_t numPhases_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), filterCenter( filterCenter_ ), filterSize( filterSize_ ), numPhases( numPhases_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast<ImageViewSampleWeightCreateInfoQCOM const *>( &rhs ) )
+    {}
+
+
+    ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      filterCenter = filterCenter_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      filterSize = filterSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT
+    {
+      numPhases = numPhases_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM*>( this );
+    }
+
+    operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, filterCenter, filterSize, numPhases );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default;
+#else
+    bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterCenter == rhs.filterCenter )
+          && ( filterSize == rhs.filterSize )
+          && ( numPhases == rhs.numPhases );
+#endif
+    }
+
+    bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {};
+    VULKAN_HPP_NAMESPACE::Extent2D filterSize = {};
+    uint32_t numPhases = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewSampleWeightCreateInfoQCOM>
+  {
+    using Type = ImageViewSampleWeightCreateInfoQCOM;
+  };
+
+  struct ImageViewUsageCreateInfo
+  {
+    using NativeType = VkImageViewUsageCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), usage( usage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
+    {}
+
+
+    ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
+    }
+
+    operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, usage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
+#else
+    bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( usage == rhs.usage );
+#endif
+    }
+
+    bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
+  {
+    using Type = ImageViewUsageCreateInfo;
+  };
+  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct ImportAndroidHardwareBufferInfoANDROID
+  {
+    using NativeType = VkImportAndroidHardwareBufferInfoANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
+    {}
+
+
+    ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, struct AHardwareBuffer * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default;
+#else
+    bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+    const void * pNext = {};
+    struct AHardwareBuffer * buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
+  {
+    using Type = ImportAndroidHardwareBufferInfoANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ImportFenceFdInfoKHR
+  {
+    using NativeType = VkImportFenceFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
+    }
+
+    operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, int const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fence, flags, handleType, fd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+#endif
+    }
+
+    bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
+  {
+    using Type = ImportFenceFdInfoKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ImportFenceWin32HandleInfoKHR
+  {
+    using NativeType = VkImportFenceWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fence, flags, handleType, handle, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
+  {
+    using Type = ImportFenceWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImportMemoryBufferCollectionFUCHSIA
+  {
+    using NativeType = VkImportMemoryBufferCollectionFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), collection( collection_ ), index( index_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      collection = collection_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
+    {
+      index = index_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA*>( this );
+    }
+
+    operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, collection, index );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default;
+#else
+    bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( collection == rhs.collection )
+          && ( index == rhs.index );
+#endif
+    }
+
+    bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
+    uint32_t index = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryBufferCollectionFUCHSIA>
+  {
+    using Type = ImportMemoryBufferCollectionFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct ImportMemoryFdInfoKHR
+  {
+    using NativeType = VkImportMemoryFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, int const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType, fd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+#endif
+    }
+
+    bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
+  {
+    using Type = ImportMemoryFdInfoKHR;
+  };
+
+  struct ImportMemoryHostPointerInfoEXT
+  {
+    using NativeType = VkImportMemoryHostPointerInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * pHostPointer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ ), pHostPointer( pHostPointer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pHostPointer = pHostPointer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+    operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType, pHostPointer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
+#else
+    bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( pHostPointer == rhs.pHostPointer );
+#endif
+    }
+
+    bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    void * pHostPointer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
+  {
+    using Type = ImportMemoryHostPointerInfoEXT;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ImportMemoryWin32HandleInfoKHR
+  {
+    using NativeType = VkImportMemoryWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType, handle, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
+  {
+    using Type = ImportMemoryWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ImportMemoryWin32HandleInfoNV
+  {
+    using NativeType = VkImportMemoryWin32HandleInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ ), handle( handle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, HANDLE const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType, handle );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default;
+#else
+    bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle );
+#endif
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
+    HANDLE handle = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
+  {
+    using Type = ImportMemoryWin32HandleInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImportMemoryZirconHandleInfoFUCHSIA
+  {
+    using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ ), handle( handle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
+    }
+
+    operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, zx_handle_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType, handle );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 );
+    }
+
+    bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    zx_handle_t handle = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
+  {
+    using Type = ImportMemoryZirconHandleInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ImportMetalBufferInfoEXT
+  {
+    using NativeType = VkImportMetalBufferInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT(MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mtlBuffer( mtlBuffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMetalBufferInfoEXT( *reinterpret_cast<ImportMetalBufferInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlBuffer = mtlBuffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMetalBufferInfoEXT*>( this );
+    }
+
+    operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMetalBufferInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLBuffer_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mtlBuffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default;
+#else
+    bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mtlBuffer == rhs.mtlBuffer );
+#endif
+    }
+
+    bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT;
+    const void * pNext = {};
+    MTLBuffer_id mtlBuffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMetalBufferInfoEXT>
+  {
+    using Type = ImportMetalBufferInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ImportMetalIOSurfaceInfoEXT
+  {
+    using NativeType = VkImportMetalIOSurfaceInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT(IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), ioSurface( ioSurface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast<ImportMetalIOSurfaceInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ioSurface = ioSurface_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT*>( this );
+    }
+
+    operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMetalIOSurfaceInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, IOSurfaceRef const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, ioSurface );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default;
+#else
+    bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ioSurface == rhs.ioSurface );
+#endif
+    }
+
+    bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT;
+    const void * pNext = {};
+    IOSurfaceRef ioSurface = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMetalIoSurfaceInfoEXT>
+  {
+    using Type = ImportMetalIOSurfaceInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ImportMetalSharedEventInfoEXT
+  {
+    using NativeType = VkImportMetalSharedEventInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT(MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mtlSharedEvent( mtlSharedEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMetalSharedEventInfoEXT( *reinterpret_cast<ImportMetalSharedEventInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlSharedEvent = mtlSharedEvent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMetalSharedEventInfoEXT*>( this );
+    }
+
+    operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMetalSharedEventInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLSharedEvent_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mtlSharedEvent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default;
+#else
+    bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mtlSharedEvent == rhs.mtlSharedEvent );
+#endif
+    }
+
+    bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT;
+    const void * pNext = {};
+    MTLSharedEvent_id mtlSharedEvent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMetalSharedEventInfoEXT>
+  {
+    using Type = ImportMetalSharedEventInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct ImportMetalTextureInfoEXT
+  {
+    using NativeType = VkImportMetalTextureInfoEXT;
+
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), plane( plane_ ), mtlTexture( mtlTexture_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportMetalTextureInfoEXT( *reinterpret_cast<ImportMetalTextureInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
+    {
+      plane = plane_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mtlTexture = mtlTexture_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMetalTextureInfoEXT*>( this );
+    }
+
+    operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMetalTextureInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &, MTLTexture_id const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, plane, mtlTexture );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default;
+#else
+    bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( plane == rhs.plane )
+          && ( mtlTexture == rhs.mtlTexture );
+#endif
+    }
+
+    bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+    MTLTexture_id mtlTexture = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMetalTextureInfoEXT>
+  {
+    using Type = ImportMetalTextureInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  struct ImportSemaphoreFdInfoKHR
+  {
+    using NativeType = VkImportSemaphoreFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, int const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, flags, handleType, fd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+#endif
+    }
+
+    bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
+  {
+    using Type = ImportSemaphoreFdInfoKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct ImportSemaphoreWin32HandleInfoKHR
+  {
+    using NativeType = VkImportSemaphoreWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, flags, handleType, handle, name );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+#endif
+    }
+
+    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
+  {
+    using Type = ImportSemaphoreWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct ImportSemaphoreZirconHandleInfoFUCHSIA
+  {
+    using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), zirconHandle( zirconHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      zirconHandle = zirconHandle_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
+    }
+
+    operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, zx_handle_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
+    }
+
+    bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+    zx_handle_t zirconHandle = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
+  {
+    using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct IndirectCommandsLayoutTokenNV
+  {
+    using NativeType = VkIndirectCommandsLayoutTokenNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, const uint32_t * pIndexTypeValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
+#else
+      if ( indexTypes_.size() != indexTypeValues_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stream = stream_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingUnit = vertexBindingUnit_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexDynamicStride = vertexDynamicStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantPipelineLayout = pushconstantPipelineLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantOffset = pushconstantOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantSize = pushconstantSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectStateFlags = indirectStateFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = indexTypeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIndexTypes = pIndexTypes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
+      pIndexTypes = indexTypes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIndexTypeValues = pIndexTypeValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
+      pIndexTypeValues = indexTypeValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndexType * const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, tokenType, stream, offset, vertexBindingUnit, vertexDynamicStride, pushconstantPipelineLayout, pushconstantShaderStageFlags, pushconstantOffset, pushconstantSize, indirectStateFlags, indexTypeCount, pIndexTypes, pIndexTypeValues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( tokenType == rhs.tokenType )
+          && ( stream == rhs.stream )
+          && ( offset == rhs.offset )
+          && ( vertexBindingUnit == rhs.vertexBindingUnit )
+          && ( vertexDynamicStride == rhs.vertexDynamicStride )
+          && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
+          && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
+          && ( pushconstantOffset == rhs.pushconstantOffset )
+          && ( pushconstantSize == rhs.pushconstantSize )
+          && ( indirectStateFlags == rhs.indirectStateFlags )
+          && ( indexTypeCount == rhs.indexTypeCount )
+          && ( pIndexTypes == rhs.pIndexTypes )
+          && ( pIndexTypeValues == rhs.pIndexTypeValues );
+#endif
+    }
+
+    bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
+    uint32_t stream = {};
+    uint32_t offset = {};
+    uint32_t vertexBindingUnit = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
+    uint32_t pushconstantOffset = {};
+    uint32_t pushconstantSize = {};
+    VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
+    uint32_t indexTypeCount = {};
+    const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {};
+    const uint32_t * pIndexTypeValues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
+  {
+    using Type = IndirectCommandsLayoutTokenNV;
+  };
+
+  struct IndirectCommandsLayoutCreateInfoNV
+  {
+    using NativeType = VkIndirectCommandsLayoutCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t * pStreamStrides_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenCount = tokenCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTokens = pTokens_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenCount = static_cast<uint32_t>( tokens_.size() );
+      pTokens = tokens_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = streamCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStreamStrides = pStreamStrides_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = static_cast<uint32_t>( streamStrides_.size() );
+      pStreamStrides = streamStrides_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( tokenCount == rhs.tokenCount )
+          && ( pTokens == rhs.pTokens )
+          && ( streamCount == rhs.streamCount )
+          && ( pStreamStrides == rhs.pStreamStrides );
+#endif
+    }
+
+    bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t tokenCount = {};
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {};
+    uint32_t streamCount = {};
+    const uint32_t * pStreamStrides = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
+  {
+    using Type = IndirectCommandsLayoutCreateInfoNV;
+  };
+
+  struct InitializePerformanceApiInfoINTEL
+  {
+    using NativeType = VkInitializePerformanceApiInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+    operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pUserData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default;
+#else
+    bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pUserData == rhs.pUserData );
+#endif
+    }
+
+    bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+    const void * pNext = {};
+    void * pUserData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
+  {
+    using Type = InitializePerformanceApiInfoINTEL;
+  };
+
+  struct InputAttachmentAspectReference
+  {
+    using NativeType = VkInputAttachmentAspectReference;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+      : InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
+    {}
+
+
+    InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentIndex = inputAttachmentIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
+    }
+
+    operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( subpass, inputAttachmentIndex, aspectMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( InputAttachmentAspectReference const & ) const = default;
+#else
+    bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( subpass == rhs.subpass )
+          && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
+          && ( aspectMask == rhs.aspectMask );
+#endif
+    }
+
+    bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t subpass = {};
+    uint32_t inputAttachmentIndex = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
+  };
+  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+
+  struct InstanceCreateInfo
+  {
+    using NativeType = VkInstanceCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationInfo = pApplicationInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+      ppEnabledLayerNames = pEnabledLayerNames_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
+    }
+
+    operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInstanceCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::InstanceCreateFlags const &, const VULKAN_HPP_NAMESPACE::ApplicationInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) return cmp;
+      if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
+      for ( size_t i = 0; i < enabledLayerCount; ++i )
+      {
+        if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
+          if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
+            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
+      }
+      if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
+      for ( size_t i = 0; i < enabledExtensionCount; ++i )
+      {
+        if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
+          if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
+            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
+      }
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pApplicationInfo == rhs.pApplicationInfo )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && std::equal( ppEnabledLayerNames, ppEnabledLayerNames + enabledLayerCount, rhs.ppEnabledLayerNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } );
+    }
+
+    bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
+    const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {};
+    uint32_t enabledLayerCount = {};
+    const char * const * ppEnabledLayerNames = {};
+    uint32_t enabledExtensionCount = {};
+    const char * const * ppEnabledExtensionNames = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eInstanceCreateInfo>
+  {
+    using Type = InstanceCreateInfo;
+  };
+
+  struct LayerProperties
+  {
+    using NativeType = VkLayerProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}) VULKAN_HPP_NOEXCEPT
+    : layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
+    {}
+
+
+    LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkLayerProperties*>( this );
+    }
+
+    operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkLayerProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( layerName, specVersion, implementationVersion, description );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( LayerProperties const & ) const = default;
+#else
+    bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( layerName == rhs.layerName )
+          && ( specVersion == rhs.specVersion )
+          && ( implementationVersion == rhs.implementationVersion )
+          && ( description == rhs.description );
+#endif
+    }
+
+    bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
+    uint32_t specVersion = {};
+    uint32_t implementationVersion = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
+  };
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  struct MacOSSurfaceCreateInfoMVK
+  {
+    using NativeType = VkMacOSSurfaceCreateInfoMVK;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pView( pView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
+    {}
+
+
+    MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pView );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default;
+#else
+    bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+#endif
+    }
+
+    bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
+    const void * pView = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
+  {
+    using Type = MacOSSurfaceCreateInfoMVK;
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  struct MappedMemoryRange
+  {
+    using NativeType = VkMappedMemoryRange;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
+    {}
+
+
+    MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMappedMemoryRange*>( this );
+    }
+
+    operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMappedMemoryRange*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, offset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MappedMemoryRange const & ) const = default;
+#else
+    bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMappedMemoryRange>
+  {
+    using Type = MappedMemoryRange;
+  };
+
+  struct MemoryAllocateFlagsInfo
+  {
+    using NativeType = VkMemoryAllocateFlagsInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
+    {}
+
+
+    MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+    operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, deviceMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
+#else
+    bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( deviceMask == rhs.deviceMask );
+#endif
+    }
+
+    bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
+    uint32_t deviceMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
+  {
+    using Type = MemoryAllocateFlagsInfo;
+  };
+  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+
+  struct MemoryAllocateInfo
+  {
+    using NativeType = VkMemoryAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
+    {}
+
+
+    MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      allocationSize = allocationSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryTypeIndex = memoryTypeIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
+    }
+
+    operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, allocationSize, memoryTypeIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryAllocateInfo const & ) const = default;
+#else
+    bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeIndex == rhs.memoryTypeIndex );
+#endif
+    }
+
+    bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+    uint32_t memoryTypeIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
+  {
+    using Type = MemoryAllocateInfo;
+  };
+
+  struct MemoryBarrier
+  {
+    using NativeType = VkMemoryBarrier;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
+    {}
+
+
+    MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryBarrier*>( this );
+    }
+
+    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryBarrier*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcAccessMask, dstAccessMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryBarrier const & ) const = default;
+#else
+    bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
+#endif
+    }
+
+    bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryBarrier>
+  {
+    using Type = MemoryBarrier;
+  };
+
+  struct MemoryDedicatedAllocateInfo
+  {
+    using NativeType = VkMemoryDedicatedAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image( image_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
+    {}
+
+
+    MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+    operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image, buffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
+#else
+    bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+#endif
+    }
+
+    bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
+  {
+    using Type = MemoryDedicatedAllocateInfo;
+  };
+  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+
+  struct MemoryDedicatedRequirements
+  {
+    using NativeType = VkMemoryDedicatedRequirements;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
+    {}
+
+
+    MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
+    }
+
+    operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
+#else
+    bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+          && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+#endif
+    }
+
+    bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
+  {
+    using Type = MemoryDedicatedRequirements;
+  };
+  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+
+  struct MemoryFdPropertiesKHR
+  {
+    using NativeType = VkMemoryFdPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
+    }
+
+    operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
+#else
+    bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+    void * pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
+  {
+    using Type = MemoryFdPropertiesKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  struct MemoryGetAndroidHardwareBufferInfoANDROID
+  {
+    using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
+    {}
+
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default;
+#else
+    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+#endif
+    }
+
+    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
+  {
+    using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct MemoryGetFdInfoKHR
+  {
+    using NativeType = VkMemoryGetFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
+#else
+    bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
+  {
+    using Type = MemoryGetFdInfoKHR;
+  };
+
+  struct MemoryGetRemoteAddressInfoNV
+  {
+    using NativeType = VkMemoryGetRemoteAddressInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryGetRemoteAddressInfoNV( *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs ) )
+    {}
+
+
+    MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV*>( this );
+    }
+
+    operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetRemoteAddressInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default;
+#else
+    bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetRemoteAddressInfoNV>
+  {
+    using Type = MemoryGetRemoteAddressInfoNV;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct MemoryGetWin32HandleInfoKHR
+  {
+    using NativeType = VkMemoryGetWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
+  {
+    using Type = MemoryGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct MemoryGetZirconHandleInfoFUCHSIA
+  {
+    using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
+    }
+
+    operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memory, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
+  {
+    using Type = MemoryGetZirconHandleInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct MemoryHeap
+  {
+    using NativeType = VkMemoryHeap;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : size( size_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
+    {}
+
+
+    MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHeap*>( this );
+    }
+
+    operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHeap*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryHeapFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( size, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryHeap const & ) const = default;
+#else
+    bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( size == rhs.size )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
+
+  };
+
+  struct MemoryHostPointerPropertiesEXT
+  {
+    using NativeType = VkMemoryHostPointerPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+    operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
+#else
+    bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+    void * pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
+  {
+    using Type = MemoryHostPointerPropertiesEXT;
+  };
+
+  struct MemoryOpaqueCaptureAddressAllocateInfo
+  {
+    using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
+    {}
+
+
+    MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+    }
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, opaqueCaptureAddress );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
+#else
+    bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+#endif
+    }
+
+    bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+    const void * pNext = {};
+    uint64_t opaqueCaptureAddress = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
+  {
+    using Type = MemoryOpaqueCaptureAddressAllocateInfo;
+  };
+  using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
+
+  struct MemoryPriorityAllocateInfoEXT
+  {
+    using NativeType = VkMemoryPriorityAllocateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), priority( priority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      priority = priority_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+    operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, priority );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default;
+#else
+    bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( priority == rhs.priority );
+#endif
+    }
+
+    bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
+    const void * pNext = {};
+    float priority = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
+  {
+    using Type = MemoryPriorityAllocateInfoEXT;
+  };
+
+  struct MemoryRequirements
+  {
+    using NativeType = VkMemoryRequirements;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
+    {}
+
+
+    MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements*>( this );
+    }
+
+    operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( size, alignment, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryRequirements const & ) const = default;
+#else
+    bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( size == rhs.size )
+          && ( alignment == rhs.alignment )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  struct MemoryRequirements2
+  {
+    using NativeType = VkMemoryRequirements2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
+    {}
+
+
+    MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements2*>( this );
+    }
+
+    operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryRequirements );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryRequirements2 const & ) const = default;
+#else
+    bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+#endif
+    }
+
+    bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryRequirements2>
+  {
+    using Type = MemoryRequirements2;
+  };
+  using MemoryRequirements2KHR = MemoryRequirements2;
+
+  struct MemoryType
+  {
+    using NativeType = VkMemoryType;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
+    {}
+
+
+    MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryType*>( this );
+    }
+
+    operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryType*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::MemoryPropertyFlags const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( propertyFlags, heapIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryType const & ) const = default;
+#else
+    bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( propertyFlags == rhs.propertyFlags )
+          && ( heapIndex == rhs.heapIndex );
+#endif
+    }
+
+    bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
+    uint32_t heapIndex = {};
+
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct MemoryWin32HandlePropertiesKHR
+  {
+    using NativeType = VkMemoryWin32HandlePropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+    operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default;
+#else
+    bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+    void * pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
+  {
+    using Type = MemoryWin32HandlePropertiesKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct MemoryZirconHandlePropertiesFUCHSIA
+  {
+    using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
+    }
+
+    operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryTypeBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default;
+#else
+    bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+#endif
+    }
+
+    bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
+    void * pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
+  {
+    using Type = MemoryZirconHandlePropertiesFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  struct MetalSurfaceCreateInfoEXT
+  {
+    using NativeType = VkMetalSurfaceCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pLayer( pLayer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLayer = pLayer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT const &, const CAMetalLayer * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pLayer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pLayer == rhs.pLayer );
+#endif
+    }
+
+    bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
+    const CAMetalLayer * pLayer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
+  {
+    using Type = MetalSurfaceCreateInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  struct MicromapBuildInfoEXT
+  {
+    using NativeType = VkMicromapBuildInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT(VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild, VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {}, uint32_t usageCountsCount_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), dstMicromap( dstMicromap_ ), usageCountsCount( usageCountsCount_ ), pUsageCounts( pUsageCounts_ ), ppUsageCounts( ppUsageCounts_ ), data( data_ ), scratchData( scratchData_ ), triangleArray( triangleArray_ ), triangleArrayStride( triangleArrayStride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapBuildInfoEXT( *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_, VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), dstMicromap( dstMicromap_ ), usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ), pUsageCounts( usageCounts_.data() ), ppUsageCounts( pUsageCounts_.data() ), data( data_ ), scratchData( scratchData_ ), triangleArray( triangleArray_ ), triangleArrayStride( triangleArrayStride_ )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1);
+#else
+      if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstMicromap = dstMicromap_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = usageCountsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUsageCounts = pUsageCounts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MicromapBuildInfoEXT & setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+      pUsageCounts = usageCounts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppUsageCounts = ppUsageCounts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MicromapBuildInfoEXT & setPUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+      ppUsageCounts = pUsageCounts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scratchData = scratchData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangleArray = triangleArray_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangleArrayStride = triangleArrayStride_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapBuildInfoEXT*>( this );
+    }
+
+    operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapBuildInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT const &, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+    VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild;
+    VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {};
+    uint32_t usageCountsCount = {};
+    const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+    const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMicromapBuildInfoEXT>
+  {
+    using Type = MicromapBuildInfoEXT;
+  };
+
+  struct MicromapBuildSizesInfoEXT
+  {
+    using NativeType = VkMicromapBuildSizesInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT(VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), micromapSize( micromapSize_ ), buildScratchSize( buildScratchSize_ ), discardable( discardable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapBuildSizesInfoEXT( *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      micromapSize = micromapSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buildScratchSize = buildScratchSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardable = discardable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapBuildSizesInfoEXT*>( this );
+    }
+
+    operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapBuildSizesInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default;
+#else
+    bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( micromapSize == rhs.micromapSize )
+          && ( buildScratchSize == rhs.buildScratchSize )
+          && ( discardable == rhs.discardable );
+#endif
+    }
+
+    bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 discardable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMicromapBuildSizesInfoEXT>
+  {
+    using Type = MicromapBuildSizesInfoEXT;
+  };
+
+  struct MicromapCreateInfoEXT
+  {
+    using NativeType = VkMicromapCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT(VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), createFlags( createFlags_ ), buffer( buffer_ ), offset( offset_ ), size( size_ ), type( type_ ), deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapCreateInfoEXT( *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      createFlags = createFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapCreateInfoEXT*>( this );
+    }
+
+    operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MicromapCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( createFlags == rhs.createFlags )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( type == rhs.type )
+          && ( deviceAddress == rhs.deviceAddress );
+#endif
+    }
+
+    bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMicromapCreateInfoEXT>
+  {
+    using Type = MicromapCreateInfoEXT;
+  };
+
+  struct MicromapTriangleEXT
+  {
+    using NativeType = VkMicromapTriangleEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MicromapTriangleEXT(uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {}) VULKAN_HPP_NOEXCEPT
+    : dataOffset( dataOffset_ ), subdivisionLevel( subdivisionLevel_ ), format( format_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapTriangleEXT( *reinterpret_cast<MicromapTriangleEXT const *>( &rhs ) )
+    {}
+
+
+    MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataOffset = dataOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subdivisionLevel = subdivisionLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapTriangleEXT*>( this );
+    }
+
+    operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapTriangleEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint16_t const &, uint16_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( dataOffset, subdivisionLevel, format );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MicromapTriangleEXT const & ) const = default;
+#else
+    bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( dataOffset == rhs.dataOffset )
+          && ( subdivisionLevel == rhs.subdivisionLevel )
+          && ( format == rhs.format );
+#endif
+    }
+
+    bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t dataOffset = {};
+    uint16_t subdivisionLevel = {};
+    uint16_t format = {};
+
+  };
+
+  struct MicromapVersionInfoEXT
+  {
+    using NativeType = VkMicromapVersionInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT(const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pVersionData( pVersionData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MicromapVersionInfoEXT( *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVersionData = pVersionData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMicromapVersionInfoEXT*>( this );
+    }
+
+    operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMicromapVersionInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pVersionData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MicromapVersionInfoEXT const & ) const = default;
+#else
+    bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pVersionData == rhs.pVersionData );
+#endif
+    }
+
+    bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT;
+    const void * pNext = {};
+    const uint8_t * pVersionData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMicromapVersionInfoEXT>
+  {
+    using Type = MicromapVersionInfoEXT;
+  };
+
+  struct MultiDrawIndexedInfoEXT
+  {
+    using NativeType = VkMultiDrawIndexedInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT(uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : firstIndex( firstIndex_ ), indexCount( indexCount_ ), vertexOffset( vertexOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MultiDrawIndexedInfoEXT( *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstIndex = firstIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultiDrawIndexedInfoEXT*>( this );
+    }
+
+    operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultiDrawIndexedInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( firstIndex, indexCount, vertexOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default;
+#else
+    bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( firstIndex == rhs.firstIndex )
+          && ( indexCount == rhs.indexCount )
+          && ( vertexOffset == rhs.vertexOffset );
+#endif
+    }
+
+    bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t firstIndex = {};
+    uint32_t indexCount = {};
+    int32_t vertexOffset = {};
+
+  };
+
+  struct MultiDrawInfoEXT
+  {
+    using NativeType = VkMultiDrawInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT(uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : firstVertex( firstVertex_ ), vertexCount( vertexCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultiDrawInfoEXT*>( this );
+    }
+
+    operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultiDrawInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( firstVertex, vertexCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MultiDrawInfoEXT const & ) const = default;
+#else
+    bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( firstVertex == rhs.firstVertex )
+          && ( vertexCount == rhs.vertexCount );
+#endif
+    }
+
+    bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t firstVertex = {};
+    uint32_t vertexCount = {};
+
+  };
+
+  struct MultisamplePropertiesEXT
+  {
+    using NativeType = VkMultisamplePropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
+    }
+
+    operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxSampleLocationGridSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
+#else
+    bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+#endif
+    }
+
+    bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
+  {
+    using Type = MultisamplePropertiesEXT;
+  };
+
+  struct MultisampledRenderToSingleSampledInfoEXT
+  {
+    using NativeType = VkMultisampledRenderToSingleSampledInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), multisampledRenderToSingleSampledEnable( multisampledRenderToSingleSampledEnable_ ), rasterizationSamples( rasterizationSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast<MultisampledRenderToSingleSampledInfoEXT const *>( &rhs ) )
+    {}
+
+
+    MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationSamples = rasterizationSamples_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT*>( this );
+    }
+
+    operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default;
+#else
+    bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable )
+          && ( rasterizationSamples == rhs.rasterizationSamples );
+#endif
+    }
+
+    bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMultisampledRenderToSingleSampledInfoEXT>
+  {
+    using Type = MultisampledRenderToSingleSampledInfoEXT;
+  };
+
+  struct MultiviewPerViewAttributesInfoNVX
+  {
+    using NativeType = VkMultiviewPerViewAttributesInfoNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), perViewAttributes( perViewAttributes_ ), perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs ) )
+    {}
+
+
+    MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      perViewAttributes = perViewAttributes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
+    {
+      perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX*>( this );
+    }
+
+    operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default;
+#else
+    bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( perViewAttributes == rhs.perViewAttributes )
+          && ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly );
+#endif
+    }
+
+    bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMultiviewPerViewAttributesInfoNVX>
+  {
+    using Type = MultiviewPerViewAttributesInfoNVX;
+  };
+
+  struct MutableDescriptorTypeListEXT
+  {
+    using NativeType = VkMutableDescriptorTypeListEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT(uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : descriptorTypeCount( descriptorTypeCount_ ), pDescriptorTypes( pDescriptorTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MutableDescriptorTypeListEXT( *reinterpret_cast<MutableDescriptorTypeListEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ )
+    : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorTypeCount = descriptorTypeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorTypes = pDescriptorTypes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MutableDescriptorTypeListEXT & setDescriptorTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
+      pDescriptorTypes = descriptorTypes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMutableDescriptorTypeListEXT*>( this );
+    }
+
+    operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMutableDescriptorTypeListEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorType * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( descriptorTypeCount, pDescriptorTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default;
+#else
+    bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( descriptorTypeCount == rhs.descriptorTypeCount )
+          && ( pDescriptorTypes == rhs.pDescriptorTypes );
+#endif
+    }
+
+    bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t descriptorTypeCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {};
+
+  };
+  using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
+
+  struct MutableDescriptorTypeCreateInfoEXT
+  {
+    using NativeType = VkMutableDescriptorTypeCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT(uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ), pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast<MutableDescriptorTypeCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MutableDescriptorTypeCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) ), pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
+      pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT*>( this );
+    }
+
+    operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount )
+          && ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
+#endif
+    }
+
+    bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t mutableDescriptorTypeListCount = {};
+    const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoEXT>
+  {
+    using Type = MutableDescriptorTypeCreateInfoEXT;
+  };
+  using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+
+  struct OpaqueCaptureDescriptorDataCreateInfoEXT
+  {
+    using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT(const void * opaqueCaptureDescriptorData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), opaqueCaptureDescriptorData( opaqueCaptureDescriptorData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast<OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
+    }
+
+    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, opaqueCaptureDescriptorData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData );
+#endif
+    }
+
+    bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;
+    const void * pNext = {};
+    const void * opaqueCaptureDescriptorData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT>
+  {
+    using Type = OpaqueCaptureDescriptorDataCreateInfoEXT;
+  };
+
+  struct OpticalFlowExecuteInfoNV
+  {
+    using NativeType = VkOpticalFlowExecuteInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV(VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpticalFlowExecuteInfoNV( *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    OpticalFlowExecuteInfoNV & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpticalFlowExecuteInfoNV*>( this );
+    }
+
+    operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpticalFlowExecuteInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {};
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpticalFlowExecuteInfoNV>
+  {
+    using Type = OpticalFlowExecuteInfoNV;
+  };
+
+  struct OpticalFlowImageFormatInfoNV
+  {
+    using NativeType = VkOpticalFlowImageFormatInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV(VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), usage( usage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpticalFlowImageFormatInfoNV( *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs ) )
+    {}
+
+
+    OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpticalFlowImageFormatInfoNV*>( this );
+    }
+
+    operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpticalFlowImageFormatInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, usage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( usage == rhs.usage );
+#endif
+    }
+
+    bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpticalFlowImageFormatInfoNV>
+  {
+    using Type = OpticalFlowImageFormatInfoNV;
+  };
+
+  struct OpticalFlowImageFormatPropertiesNV
+  {
+    using NativeType = VkOpticalFlowImageFormatPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV*>( this );
+    }
+
+    operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format );
+#endif
+    }
+
+    bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpticalFlowImageFormatPropertiesNV>
+  {
+    using Type = OpticalFlowImageFormatPropertiesNV;
+  };
+
+  struct OpticalFlowSessionCreateInfoNV
+  {
+    using NativeType = VkOpticalFlowSessionCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV(uint32_t width_ = {}, uint32_t height_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), width( width_ ), height( height_ ), imageFormat( imageFormat_ ), flowVectorFormat( flowVectorFormat_ ), costFormat( costFormat_ ), outputGridSize( outputGridSize_ ), hintGridSize( hintGridSize_ ), performanceLevel( performanceLevel_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpticalFlowSessionCreateInfoNV( *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flowVectorFormat = flowVectorFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      costFormat = costFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      outputGridSize = outputGridSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hintGridSize = hintGridSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceLevel = performanceLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV*>( this );
+    }
+
+    operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpticalFlowSessionCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( imageFormat == rhs.imageFormat )
+          && ( flowVectorFormat == rhs.flowVectorFormat )
+          && ( costFormat == rhs.costFormat )
+          && ( outputGridSize == rhs.outputGridSize )
+          && ( hintGridSize == rhs.hintGridSize )
+          && ( performanceLevel == rhs.performanceLevel )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV;
+    void * pNext = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown;
+    VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpticalFlowSessionCreateInfoNV>
+  {
+    using Type = OpticalFlowSessionCreateInfoNV;
+  };
+
+  struct OpticalFlowSessionCreatePrivateDataInfoNV
+  {
+    using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV(uint32_t id_ = {}, uint32_t size_ = {}, const void * pPrivateData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), id( id_ ), size( size_ ), pPrivateData( pPrivateData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs ) )
+    {}
+
+
+    OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
+    {
+      id = id_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPrivateData = pPrivateData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
+    }
+
+    operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, id, size, pPrivateData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default;
+#else
+    bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( id == rhs.id )
+          && ( size == rhs.size )
+          && ( pPrivateData == rhs.pPrivateData );
+#endif
+    }
+
+    bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+    void * pNext = {};
+    uint32_t id = {};
+    uint32_t size = {};
+    const void * pPrivateData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV>
+  {
+    using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
+  };
+
+  struct PastPresentationTimingGOOGLE
+  {
+    using NativeType = VkPastPresentationTimingGOOGLE;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
+    : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
+    {}
+
+
+    PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+    operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default;
+#else
+    bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime )
+          && ( actualPresentTime == rhs.actualPresentTime )
+          && ( earliestPresentTime == rhs.earliestPresentTime )
+          && ( presentMargin == rhs.presentMargin );
+#endif
+    }
+
+    bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t presentID = {};
+    uint64_t desiredPresentTime = {};
+    uint64_t actualPresentTime = {};
+    uint64_t earliestPresentTime = {};
+    uint64_t presentMargin = {};
+
+  };
+
+  struct PerformanceConfigurationAcquireInfoINTEL
+  {
+    using NativeType = VkPerformanceConfigurationAcquireInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default;
+#else
+    bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type );
+#endif
+    }
+
+    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
+  {
+    using Type = PerformanceConfigurationAcquireInfoINTEL;
+  };
+
+  struct PerformanceCounterDescriptionKHR
+  {
+    using NativeType = VkPerformanceCounterDescriptionKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), name( name_ ), category( category_ ), description( description_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
+    {}
+
+
+    PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+    operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, name, category, description );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default;
+#else
+    bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( name == rhs.name )
+          && ( category == rhs.category )
+          && ( description == rhs.description );
+#endif
+    }
+
+    bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
+  {
+    using Type = PerformanceCounterDescriptionKHR;
+  };
+
+  struct PerformanceCounterKHR
+  {
+    using NativeType = VkPerformanceCounterKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const & uuid_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
+    {}
+
+
+    PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+    }
+
+    operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, unit, scope, storage, uuid );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceCounterKHR const & ) const = default;
+#else
+    bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( unit == rhs.unit )
+          && ( scope == rhs.scope )
+          && ( storage == rhs.storage )
+          && ( uuid == rhs.uuid );
+#endif
+    }
+
+    bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
+  {
+    using Type = PerformanceCounterKHR;
+  };
+
+  union PerformanceCounterResultKHR
+  {
+    using NativeType = VkPerformanceCounterResultKHR;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} )
+      : int32( int32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ )
+      : int64( int64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ )
+      : uint32( uint32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ )
+      : uint64( uint64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ )
+      : float32( float32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ )
+      : float64( float64_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int32 = int32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int64 = int64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint32 = uint32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint64 = uint64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float32 = float32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float64 = float64_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkPerformanceCounterResultKHR const &() const
+    {
+      return *reinterpret_cast<const VkPerformanceCounterResultKHR*>( this );
+    }
+
+    operator VkPerformanceCounterResultKHR &()
+    {
+      return *reinterpret_cast<VkPerformanceCounterResultKHR*>( this );
+    }
+
+    int32_t int32;
+    int64_t int64;
+    uint32_t uint32;
+    uint64_t uint64;
+    float float32;
+    double float64;
+
+  };
+
+  struct PerformanceMarkerInfoINTEL
+  {
+    using NativeType = VkPerformanceMarkerInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), marker( marker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, marker );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default;
+#else
+    bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+#endif
+    }
+
+    bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+    const void * pNext = {};
+    uint64_t marker = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
+  {
+    using Type = PerformanceMarkerInfoINTEL;
+  };
+
+  struct PerformanceOverrideInfoINTEL
+  {
+    using NativeType = VkPerformanceOverrideInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), enable( enable_ ), parameter( parameter_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enable = enable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameter = parameter_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, enable, parameter );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default;
+#else
+    bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( enable == rhs.enable )
+          && ( parameter == rhs.parameter );
+#endif
+    }
+
+    bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+    VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+    uint64_t parameter = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
+  {
+    using Type = PerformanceOverrideInfoINTEL;
+  };
+
+  struct PerformanceQuerySubmitInfoKHR
+  {
+    using NativeType = VkPerformanceQuerySubmitInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), counterPassIndex( counterPassIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
+    {}
+
+
+    PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterPassIndex = counterPassIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+    operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, counterPassIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
+#else
+    bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( counterPassIndex == rhs.counterPassIndex );
+#endif
+    }
+
+    bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+    const void * pNext = {};
+    uint32_t counterPassIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
+  {
+    using Type = PerformanceQuerySubmitInfoKHR;
+  };
+
+  struct PerformanceStreamMarkerInfoINTEL
+  {
+    using NativeType = VkPerformanceStreamMarkerInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), marker( marker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, marker );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default;
+#else
+    bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+#endif
+    }
+
+    bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+    const void * pNext = {};
+    uint32_t marker = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
+  {
+    using Type = PerformanceStreamMarkerInfoINTEL;
+  };
+
+  union PerformanceValueDataINTEL
+  {
+    using NativeType = VkPerformanceValueDataINTEL;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} )
+      : value32( value32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ )
+      : value64( value64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ )
+      : valueFloat( valueFloat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ )
+      : valueString( valueString_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value32 = value32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value64 = value64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueFloat = valueFloat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueBool = valueBool_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueString = valueString_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkPerformanceValueDataINTEL const &() const
+    {
+      return *reinterpret_cast<const VkPerformanceValueDataINTEL*>( this );
+    }
+
+    operator VkPerformanceValueDataINTEL &()
+    {
+      return *reinterpret_cast<VkPerformanceValueDataINTEL*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VULKAN_HPP_NAMESPACE::Bool32 valueBool;
+    const char * valueString;
+#else
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VkBool32 valueBool;
+    const char * valueString;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct PerformanceValueINTEL
+  {
+    using NativeType = VkPerformanceValueINTEL;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
+    {}
+
+
+    PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
+    }
+
+    operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL const &, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( type, data );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
+    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
+
+  };
+
+  struct PhysicalDevice16BitStorageFeatures
+  {
+    using NativeType = VkPhysicalDevice16BitStorageFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+    operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 );
+#endif
+    }
+
+    bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
+  {
+    using Type = PhysicalDevice16BitStorageFeatures;
+  };
+  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+
+  struct PhysicalDevice4444FormatsFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatA4R4G4B4 = formatA4R4G4B4_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatA4B4G4R4 = formatA4B4G4R4_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
+          && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
+#endif
+    }
+
+    bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
+  {
+    using Type = PhysicalDevice4444FormatsFeaturesEXT;
+  };
+
+  struct PhysicalDevice8BitStorageFeatures
+  {
+    using NativeType = VkPhysicalDevice8BitStorageFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer8BitAccess = storageBuffer8BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant8 = storagePushConstant8_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
+    }
+
+    operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+          && ( storagePushConstant8 == rhs.storagePushConstant8 );
+#endif
+    }
+
+    bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
+  {
+    using Type = PhysicalDevice8BitStorageFeatures;
+  };
+  using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
+
+  struct PhysicalDeviceASTCDecodeFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), decodeModeSharedExponent( decodeModeSharedExponent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeModeSharedExponent = decodeModeSharedExponent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, decodeModeSharedExponent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
+  {
+    using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
+  };
+
+  struct PhysicalDeviceAccelerationStructureFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructure( accelerationStructure_ ), accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ), accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ), accelerationStructureHostCommands( accelerationStructureHostCommands_ ), descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureHostCommands = accelerationStructureHostCommands_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructure, accelerationStructureCaptureReplay, accelerationStructureIndirectBuild, accelerationStructureHostCommands, descriptorBindingAccelerationStructureUpdateAfterBind );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure )
+          && ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay )
+          && ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild )
+          && ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands )
+          && ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {};
+    VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
+  {
+    using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
+  };
+
+  struct PhysicalDeviceAccelerationStructurePropertiesKHR
+  {
+    using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ), maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ), minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxGeometryCount, maxInstanceCount, maxPrimitiveCount, maxPerStageDescriptorAccelerationStructures, maxPerStageDescriptorUpdateAfterBindAccelerationStructures, maxDescriptorSetAccelerationStructures, maxDescriptorSetUpdateAfterBindAccelerationStructures, minAccelerationStructureScratchOffsetAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( maxInstanceCount == rhs.maxInstanceCount )
+          && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
+          && ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures )
+          && ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures )
+          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
+          && ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures )
+          && ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
+    void * pNext = {};
+    uint64_t maxGeometryCount = {};
+    uint64_t maxInstanceCount = {};
+    uint64_t maxPrimitiveCount = {};
+    uint32_t maxPerStageDescriptorAccelerationStructures = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
+    uint32_t maxDescriptorSetAccelerationStructures = {};
+    uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {};
+    uint32_t minAccelerationStructureScratchOffsetAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
+  {
+    using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
+  };
+
+  struct PhysicalDeviceAddressBindingReportFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), reportAddressBinding( reportAddressBinding_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reportAddressBinding = reportAddressBinding_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, reportAddressBinding );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( reportAddressBinding == rhs.reportAddressBinding );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT>
+  {
+    using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
+  };
+
+  struct PhysicalDeviceAmigoProfilingFeaturesSEC
+  {
+    using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC(VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), amigoProfiling( amigoProfiling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast<PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      amigoProfiling = amigoProfiling_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
+    }
+
+    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, amigoProfiling );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( amigoProfiling == rhs.amigoProfiling );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC>
+  {
+    using Type = PhysicalDeviceAmigoProfilingFeaturesSEC;
+  };
+
+  struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachmentFeedbackLoopLayout( attachmentFeedbackLoopLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachmentFeedbackLoopLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
+  {
+    using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+  };
+
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, advancedBlendCoherentOperations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
+  {
+    using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+  };
+
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, advancedBlendMaxColorAttachments, advancedBlendIndependentBlend, advancedBlendNonPremultipliedSrcColor, advancedBlendNonPremultipliedDstColor, advancedBlendCorrelatedOverlap, advancedBlendAllOperations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
+          && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
+          && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
+          && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
+          && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
+          && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+    void * pNext = {};
+    uint32_t advancedBlendMaxColorAttachments = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
+  {
+    using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+  };
+
+  struct PhysicalDeviceBorderColorSwizzleFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), borderColorSwizzle( borderColorSwizzle_ ), borderColorSwizzleFromImage( borderColorSwizzleFromImage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      borderColorSwizzle = borderColorSwizzle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      borderColorSwizzleFromImage = borderColorSwizzleFromImage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( borderColorSwizzle == rhs.borderColorSwizzle )
+          && ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {};
+    VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT>
+  {
+    using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
+  };
+
+  struct PhysicalDeviceBufferDeviceAddressFeatures
+  {
+    using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
+  {
+    using Type = PhysicalDeviceBufferDeviceAddressFeatures;
+  };
+  using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
+
+  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
+  {
+    using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  };
+  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+  struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI
+  {
+    using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), clustercullingShader( clustercullingShader_ ), multiviewClusterCullingShader( multiviewClusterCullingShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setClustercullingShader( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clustercullingShader = clustercullingShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setMultiviewClusterCullingShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewClusterCullingShader = multiviewClusterCullingShader_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
+    }
+
+    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( clustercullingShader == rhs.clustercullingShader )
+          && ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI>
+  {
+    using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+  };
+
+  struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI
+  {
+    using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI(std::array<uint32_t,3> const & maxWorkGroupCount_ = {}, std::array<uint32_t,3> const & maxWorkGroupSize_ = {}, uint32_t maxOutputClusterCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxWorkGroupCount( maxWorkGroupCount_ ), maxWorkGroupSize( maxWorkGroupSize_ ), maxOutputClusterCount( maxOutputClusterCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
+    }
+
+    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxWorkGroupCount == rhs.maxWorkGroupCount )
+          && ( maxWorkGroupSize == rhs.maxWorkGroupSize )
+          && ( maxOutputClusterCount == rhs.maxOutputClusterCount );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxWorkGroupCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxWorkGroupSize = {};
+    uint32_t maxOutputClusterCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI>
+  {
+    using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+  };
+
+  struct PhysicalDeviceCoherentMemoryFeaturesAMD
+  {
+    using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceCoherentMemory( deviceCoherentMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceCoherentMemory = deviceCoherentMemory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceCoherentMemory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
+  {
+    using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
+  };
+
+  struct PhysicalDeviceColorWriteEnableFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), colorWriteEnable( colorWriteEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorWriteEnable = colorWriteEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, colorWriteEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( colorWriteEnable == rhs.colorWriteEnable );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
+  {
+    using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
+  };
+
+  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
+          && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
+  {
+    using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+  };
+
+  struct PhysicalDeviceConditionalRenderingFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRendering = conditionalRendering_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedConditionalRendering = inheritedConditionalRendering_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRendering == rhs.conditionalRendering )
+          && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
+  {
+    using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
+  };
+
+  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, float const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, primitiveOverestimationSize, maxExtraPrimitiveOverestimationSize, extraPrimitiveOverestimationSizeGranularity, primitiveUnderestimation, conservativePointAndLineRasterization, degenerateTrianglesRasterized, degenerateLinesRasterized, fullyCoveredFragmentShaderInputVariable, conservativeRasterizationPostDepthCoverage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
+          && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
+          && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
+          && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
+          && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
+          && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
+          && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
+          && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
+          && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+    void * pNext = {};
+    float primitiveOverestimationSize = {};
+    float maxExtraPrimitiveOverestimationSize = {};
+    float extraPrimitiveOverestimationSizeGranularity = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
+  {
+    using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
+  };
+
+  struct PhysicalDeviceCooperativeMatrixFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrix = cooperativeMatrix_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrix == rhs.cooperativeMatrix )
+          && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
+  {
+    using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
+  };
+
+  struct PhysicalDeviceCooperativeMatrixPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
+  {
+    using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
+  };
+
+  struct PhysicalDeviceCopyMemoryIndirectFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), indirectCopy( indirectCopy_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCopy = indirectCopy_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, indirectCopy );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( indirectCopy == rhs.indirectCopy );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV>
+  {
+    using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV;
+  };
+
+  struct PhysicalDeviceCopyMemoryIndirectPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV(VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportedQueues( supportedQueues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportedQueues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedQueues == rhs.supportedQueues );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV>
+  {
+    using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV;
+  };
+
+  struct PhysicalDeviceCornerSampledImageFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), cornerSampledImage( cornerSampledImage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cornerSampledImage = cornerSampledImage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, cornerSampledImage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cornerSampledImage == rhs.cornerSampledImage );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
+  {
+    using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
+  };
+
+  struct PhysicalDeviceCoverageReductionModeFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), coverageReductionMode( coverageReductionMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageReductionMode = coverageReductionMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, coverageReductionMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( coverageReductionMode == rhs.coverageReductionMode );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
+  {
+    using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
+  };
+
+  struct PhysicalDeviceCustomBorderColorFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColors = customBorderColors_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( customBorderColors == rhs.customBorderColors )
+          && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
+    VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
+  {
+    using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
+  };
+
+  struct PhysicalDeviceCustomBorderColorPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxCustomBorderColorSamplers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxCustomBorderColorSamplers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
+  {
+    using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
+  };
+
+  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dedicatedAllocationImageAliasing );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
+  {
+    using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+  };
+
+  struct PhysicalDeviceDepthClampZeroOneFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), depthClampZeroOne( depthClampZeroOne_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthClampZeroOneFeaturesEXT( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDepthClampZeroOneFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClampZeroOneFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClampZeroOne = depthClampZeroOne_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, depthClampZeroOne );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthClampZeroOne == rhs.depthClampZeroOne );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDepthClipControlFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), depthClipControl( depthClipControl_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipControl = depthClipControl_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, depthClipControl );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthClipControl == rhs.depthClipControl );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDepthClipControlFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDepthClipEnableFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), depthClipEnable( depthClipEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipEnable = depthClipEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, depthClipEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthClipEnable == rhs.depthClipEnable );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDepthStencilResolveProperties
+  {
+    using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+          && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+          && ( independentResolveNone == rhs.independentResolveNone )
+          && ( independentResolve == rhs.independentResolve );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
+  {
+    using Type = PhysicalDeviceDepthStencilResolveProperties;
+  };
+  using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
+
+  struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT(size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), combinedImageSamplerDensityMapDescriptorSize( combinedImageSamplerDensityMapDescriptorSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+    void * pNext = {};
+    size_t combinedImageSamplerDensityMapDescriptorSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>
+  {
+    using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+  };
+
+  struct PhysicalDeviceDescriptorBufferFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorBuffer( descriptorBuffer_ ), descriptorBufferCaptureReplay( descriptorBufferCaptureReplay_ ), descriptorBufferImageLayoutIgnored( descriptorBufferImageLayoutIgnored_ ), descriptorBufferPushDescriptors( descriptorBufferPushDescriptors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBuffer( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBuffer = descriptorBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferImageLayoutIgnored( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferPushDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorBuffer == rhs.descriptorBuffer )
+          && ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay )
+          && ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored )
+          && ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDescriptorBufferFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDescriptorBufferPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment_ = {}, uint32_t maxDescriptorBufferBindings_ = {}, uint32_t maxResourceDescriptorBufferBindings_ = {}, uint32_t maxSamplerDescriptorBufferBindings_ = {}, uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, uint32_t maxEmbeddedImmutableSamplers_ = {}, size_t bufferCaptureReplayDescriptorDataSize_ = {}, size_t imageCaptureReplayDescriptorDataSize_ = {}, size_t imageViewCaptureReplayDescriptorDataSize_ = {}, size_t samplerCaptureReplayDescriptorDataSize_ = {}, size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, size_t samplerDescriptorSize_ = {}, size_t combinedImageSamplerDescriptorSize_ = {}, size_t sampledImageDescriptorSize_ = {}, size_t storageImageDescriptorSize_ = {}, size_t uniformTexelBufferDescriptorSize_ = {}, size_t robustUniformTexelBufferDescriptorSize_ = {}, size_t storageTexelBufferDescriptorSize_ = {}, size_t robustStorageTexelBufferDescriptorSize_ = {}, size_t uniformBufferDescriptorSize_ = {}, size_t robustUniformBufferDescriptorSize_ = {}, size_t storageBufferDescriptorSize_ = {}, size_t robustStorageBufferDescriptorSize_ = {}, size_t inputAttachmentDescriptorSize_ = {}, size_t accelerationStructureDescriptorSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), combinedImageSamplerDescriptorSingleArray( combinedImageSamplerDescriptorSingleArray_ ), bufferlessPushDescriptors( bufferlessPushDescriptors_ ), allowSamplerImageViewPostSubmitCreation( allowSamplerImageViewPostSubmitCreation_ ), descriptorBufferOffsetAlignment( descriptorBufferOffsetAlignment_ ), maxDescriptorBufferBindings( maxDescriptorBufferBindings_ ), maxResourceDescriptorBufferBindings( maxResourceDescriptorBufferBindings_ ), maxSamplerDescriptorBufferBindings( maxSamplerDescriptorBufferBindings_ ), maxEmbeddedImmutableSamplerBindings( maxEmbeddedImmutableSamplerBindings_ ), maxEmbeddedImmutableSamplers( maxEmbeddedImmutableSamplers_ ), bufferCaptureReplayDescriptorDataSize( bufferCaptureReplayDescriptorDataSize_ ), imageCaptureReplayDescriptorDataSize( imageCaptureReplayDescriptorDataSize_ ), imageViewCaptureReplayDescriptorDataSize( imageViewCaptureReplayDescriptorDataSize_ ), samplerCaptureReplayDescriptorDataSize( samplerCaptureReplayDescriptorDataSize_ ), accelerationStructureCaptureReplayDescriptorDataSize( accelerationStructureCaptureReplayDescriptorDataSize_ ), samplerDescriptorSize( samplerDescriptorSize_ ), combinedImageSamplerDescriptorSize( combinedImageSamplerDescriptorSize_ ), sampledImageDescriptorSize( sampledImageDescriptorSize_ ), storageImageDescriptorSize( storageImageDescriptorSize_ ), uniformTexelBufferDescriptorSize( uniformTexelBufferDescriptorSize_ ), robustUniformTexelBufferDescriptorSize( robustUniformTexelBufferDescriptorSize_ ), storageTexelBufferDescriptorSize( storageTexelBufferDescriptorSize_ ), robustStorageTexelBufferDescriptorSize( robustStorageTexelBufferDescriptorSize_ ), uniformBufferDescriptorSize( uniformBufferDescriptorSize_ ), robustUniformBufferDescriptorSize( robustUniformBufferDescriptorSize_ ), storageBufferDescriptorSize( storageBufferDescriptorSize_ ), robustStorageBufferDescriptorSize( robustStorageBufferDescriptorSize_ ), inputAttachmentDescriptorSize( inputAttachmentDescriptorSize_ ), accelerationStructureDescriptorSize( accelerationStructureDescriptorSize_ ), maxSamplerDescriptorBufferRange( maxSamplerDescriptorBufferRange_ ), maxResourceDescriptorBufferRange( maxResourceDescriptorBufferRange_ ), samplerDescriptorBufferAddressSpaceSize( samplerDescriptorBufferAddressSpaceSize_ ), resourceDescriptorBufferAddressSpaceSize( resourceDescriptorBufferAddressSpaceSize_ ), descriptorBufferAddressSpaceSize( descriptorBufferAddressSpaceSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, combinedImageSamplerDescriptorSingleArray, bufferlessPushDescriptors, allowSamplerImageViewPostSubmitCreation, descriptorBufferOffsetAlignment, maxDescriptorBufferBindings, maxResourceDescriptorBufferBindings, maxSamplerDescriptorBufferBindings, maxEmbeddedImmutableSamplerBindings, maxEmbeddedImmutableSamplers, bufferCaptureReplayDescriptorDataSize, imageCaptureReplayDescriptorDataSize, imageViewCaptureReplayDescriptorDataSize, samplerCaptureReplayDescriptorDataSize, accelerationStructureCaptureReplayDescriptorDataSize, samplerDescriptorSize, combinedImageSamplerDescriptorSize, sampledImageDescriptorSize, storageImageDescriptorSize, uniformTexelBufferDescriptorSize, robustUniformTexelBufferDescriptorSize, storageTexelBufferDescriptorSize, robustStorageTexelBufferDescriptorSize, uniformBufferDescriptorSize, robustUniformBufferDescriptorSize, storageBufferDescriptorSize, robustStorageBufferDescriptorSize, inputAttachmentDescriptorSize, accelerationStructureDescriptorSize, maxSamplerDescriptorBufferRange, maxResourceDescriptorBufferRange, samplerDescriptorBufferAddressSpaceSize, resourceDescriptorBufferAddressSpaceSize, descriptorBufferAddressSpaceSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray )
+          && ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors )
+          && ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation )
+          && ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment )
+          && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings )
+          && ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings )
+          && ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings )
+          && ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings )
+          && ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers )
+          && ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize )
+          && ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize )
+          && ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize )
+          && ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize )
+          && ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize )
+          && ( samplerDescriptorSize == rhs.samplerDescriptorSize )
+          && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize )
+          && ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize )
+          && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize )
+          && ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize )
+          && ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize )
+          && ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize )
+          && ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize )
+          && ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize )
+          && ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize )
+          && ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize )
+          && ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize )
+          && ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize )
+          && ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize )
+          && ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange )
+          && ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange )
+          && ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize )
+          && ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize )
+          && ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors = {};
+    VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment = {};
+    uint32_t maxDescriptorBufferBindings = {};
+    uint32_t maxResourceDescriptorBufferBindings = {};
+    uint32_t maxSamplerDescriptorBufferBindings = {};
+    uint32_t maxEmbeddedImmutableSamplerBindings = {};
+    uint32_t maxEmbeddedImmutableSamplers = {};
+    size_t bufferCaptureReplayDescriptorDataSize = {};
+    size_t imageCaptureReplayDescriptorDataSize = {};
+    size_t imageViewCaptureReplayDescriptorDataSize = {};
+    size_t samplerCaptureReplayDescriptorDataSize = {};
+    size_t accelerationStructureCaptureReplayDescriptorDataSize = {};
+    size_t samplerDescriptorSize = {};
+    size_t combinedImageSamplerDescriptorSize = {};
+    size_t sampledImageDescriptorSize = {};
+    size_t storageImageDescriptorSize = {};
+    size_t uniformTexelBufferDescriptorSize = {};
+    size_t robustUniformTexelBufferDescriptorSize = {};
+    size_t storageTexelBufferDescriptorSize = {};
+    size_t robustStorageTexelBufferDescriptorSize = {};
+    size_t uniformBufferDescriptorSize = {};
+    size_t robustUniformBufferDescriptorSize = {};
+    size_t storageBufferDescriptorSize = {};
+    size_t robustStorageBufferDescriptorSize = {};
+    size_t inputAttachmentDescriptorSize = {};
+    size_t accelerationStructureDescriptorSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT>
+  {
+    using Type = PhysicalDeviceDescriptorBufferPropertiesEXT;
+  };
+
+  struct PhysicalDeviceDescriptorIndexingFeatures
+  {
+    using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      runtimeDescriptorArray = runtimeDescriptorArray_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
+  {
+    using Type = PhysicalDeviceDescriptorIndexingFeatures;
+  };
+  using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+
+  struct PhysicalDeviceDescriptorIndexingProperties
+  {
+    using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+    void * pNext = {};
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+    uint32_t maxPerStageUpdateAfterBindResources = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
+  {
+    using Type = PhysicalDeviceDescriptorIndexingProperties;
+  };
+  using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+
+  struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE
+  {
+    using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), descriptorSetHostMapping( descriptorSetHostMapping_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetHostMapping = descriptorSetHostMapping_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, descriptorSetHostMapping );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
+  {
+    using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+  };
+
+  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceGeneratedCommands( deviceGeneratedCommands_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceGeneratedCommands = deviceGeneratedCommands_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceGeneratedCommands );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
+  {
+    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+  };
+
+  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxGraphicsShaderGroupCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsStreamCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsStreamStride, minSequencesCountBufferOffsetAlignment, minSequencesIndexBufferOffsetAlignment, minIndirectCommandsBufferOffsetAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
+          && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
+          && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
+          && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
+          && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
+          && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
+          && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
+          && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
+          && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+    void * pNext = {};
+    uint32_t maxGraphicsShaderGroupCount = {};
+    uint32_t maxIndirectSequenceCount = {};
+    uint32_t maxIndirectCommandsTokenCount = {};
+    uint32_t maxIndirectCommandsStreamCount = {};
+    uint32_t maxIndirectCommandsTokenOffset = {};
+    uint32_t maxIndirectCommandsStreamStride = {};
+    uint32_t minSequencesCountBufferOffsetAlignment = {};
+    uint32_t minSequencesIndexBufferOffsetAlignment = {};
+    uint32_t minIndirectCommandsBufferOffsetAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
+  {
+    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+  };
+
+  struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceMemoryReport( deviceMemoryReport_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMemoryReport = deviceMemoryReport_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceMemoryReport );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMemoryReport == rhs.deviceMemoryReport );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDiagnosticsConfigFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), diagnosticsConfig( diagnosticsConfig_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
+    {
+      diagnosticsConfig = diagnosticsConfig_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, diagnosticsConfig );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( diagnosticsConfig == rhs.diagnosticsConfig );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
+  {
+    using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
+  };
+
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxDiscardRectangles( maxDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxDiscardRectangles );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+    void * pNext = {};
+    uint32_t maxDiscardRectangles = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
+  {
+    using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
+  };
+
+  struct PhysicalDeviceDriverProperties
+  {
+    using NativeType = VkPhysicalDeviceDriverProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( driverID == rhs.driverID )
+          && ( driverName == rhs.driverName )
+          && ( driverInfo == rhs.driverInfo )
+          && ( conformanceVersion == rhs.conformanceVersion );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
+    VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
+  {
+    using Type = PhysicalDeviceDriverProperties;
+  };
+  using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
+
+  struct PhysicalDeviceDrmPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceDrmPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, int64_t primaryMajor_ = {}, int64_t primaryMinor_ = {}, int64_t renderMajor_ = {}, int64_t renderMinor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), hasPrimary( hasPrimary_ ), hasRender( hasRender_ ), primaryMajor( primaryMajor_ ), primaryMinor( primaryMinor_ ), renderMajor( renderMajor_ ), renderMinor( renderMinor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, int64_t const &, int64_t const &, int64_t const &, int64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hasPrimary == rhs.hasPrimary )
+          && ( hasRender == rhs.hasRender )
+          && ( primaryMajor == rhs.primaryMajor )
+          && ( primaryMinor == rhs.primaryMinor )
+          && ( renderMajor == rhs.renderMajor )
+          && ( renderMinor == rhs.renderMinor );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hasRender = {};
+    int64_t primaryMajor = {};
+    int64_t primaryMinor = {};
+    int64_t renderMajor = {};
+    int64_t renderMinor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDrmPropertiesEXT>
+  {
+    using Type = PhysicalDeviceDrmPropertiesEXT;
+  };
+
+  struct PhysicalDeviceDynamicRenderingFeatures
+  {
+    using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dynamicRendering( dynamicRendering_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicRendering = dynamicRendering_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dynamicRendering );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dynamicRendering == rhs.dynamicRendering );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
+  {
+    using Type = PhysicalDeviceDynamicRenderingFeatures;
+  };
+  using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
+
+  struct PhysicalDeviceExclusiveScissorFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), exclusiveScissor( exclusiveScissor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissor = exclusiveScissor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, exclusiveScissor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissor == rhs.exclusiveScissor );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
+  {
+    using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
+  };
+
+  struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), extendedDynamicState2( extendedDynamicState2_ ), extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ), extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState2 = extendedDynamicState2_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( extendedDynamicState2 == rhs.extendedDynamicState2 )
+          && ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp )
+          && ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceExtendedDynamicState3FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), extendedDynamicState3TessellationDomainOrigin( extendedDynamicState3TessellationDomainOrigin_ ), extendedDynamicState3DepthClampEnable( extendedDynamicState3DepthClampEnable_ ), extendedDynamicState3PolygonMode( extendedDynamicState3PolygonMode_ ), extendedDynamicState3RasterizationSamples( extendedDynamicState3RasterizationSamples_ ), extendedDynamicState3SampleMask( extendedDynamicState3SampleMask_ ), extendedDynamicState3AlphaToCoverageEnable( extendedDynamicState3AlphaToCoverageEnable_ ), extendedDynamicState3AlphaToOneEnable( extendedDynamicState3AlphaToOneEnable_ ), extendedDynamicState3LogicOpEnable( extendedDynamicState3LogicOpEnable_ ), extendedDynamicState3ColorBlendEnable( extendedDynamicState3ColorBlendEnable_ ), extendedDynamicState3ColorBlendEquation( extendedDynamicState3ColorBlendEquation_ ), extendedDynamicState3ColorWriteMask( extendedDynamicState3ColorWriteMask_ ), extendedDynamicState3RasterizationStream( extendedDynamicState3RasterizationStream_ ), extendedDynamicState3ConservativeRasterizationMode( extendedDynamicState3ConservativeRasterizationMode_ ), extendedDynamicState3ExtraPrimitiveOverestimationSize( extendedDynamicState3ExtraPrimitiveOverestimationSize_ ), extendedDynamicState3DepthClipEnable( extendedDynamicState3DepthClipEnable_ ), extendedDynamicState3SampleLocationsEnable( extendedDynamicState3SampleLocationsEnable_ ), extendedDynamicState3ColorBlendAdvanced( extendedDynamicState3ColorBlendAdvanced_ ), extendedDynamicState3ProvokingVertexMode( extendedDynamicState3ProvokingVertexMode_ ), extendedDynamicState3LineRasterizationMode( extendedDynamicState3LineRasterizationMode_ ), extendedDynamicState3LineStippleEnable( extendedDynamicState3LineStippleEnable_ ), extendedDynamicState3DepthClipNegativeOneToOne( extendedDynamicState3DepthClipNegativeOneToOne_ ), extendedDynamicState3ViewportWScalingEnable( extendedDynamicState3ViewportWScalingEnable_ ), extendedDynamicState3ViewportSwizzle( extendedDynamicState3ViewportSwizzle_ ), extendedDynamicState3CoverageToColorEnable( extendedDynamicState3CoverageToColorEnable_ ), extendedDynamicState3CoverageToColorLocation( extendedDynamicState3CoverageToColorLocation_ ), extendedDynamicState3CoverageModulationMode( extendedDynamicState3CoverageModulationMode_ ), extendedDynamicState3CoverageModulationTableEnable( extendedDynamicState3CoverageModulationTableEnable_ ), extendedDynamicState3CoverageModulationTable( extendedDynamicState3CoverageModulationTable_ ), extendedDynamicState3CoverageReductionMode( extendedDynamicState3CoverageReductionMode_ ), extendedDynamicState3RepresentativeFragmentTestEnable( extendedDynamicState3RepresentativeFragmentTestEnable_ ), extendedDynamicState3ShadingRateImageEnable( extendedDynamicState3ShadingRateImageEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, extendedDynamicState3TessellationDomainOrigin, extendedDynamicState3DepthClampEnable, extendedDynamicState3PolygonMode, extendedDynamicState3RasterizationSamples, extendedDynamicState3SampleMask, extendedDynamicState3AlphaToCoverageEnable, extendedDynamicState3AlphaToOneEnable, extendedDynamicState3LogicOpEnable, extendedDynamicState3ColorBlendEnable, extendedDynamicState3ColorBlendEquation, extendedDynamicState3ColorWriteMask, extendedDynamicState3RasterizationStream, extendedDynamicState3ConservativeRasterizationMode, extendedDynamicState3ExtraPrimitiveOverestimationSize, extendedDynamicState3DepthClipEnable, extendedDynamicState3SampleLocationsEnable, extendedDynamicState3ColorBlendAdvanced, extendedDynamicState3ProvokingVertexMode, extendedDynamicState3LineRasterizationMode, extendedDynamicState3LineStippleEnable, extendedDynamicState3DepthClipNegativeOneToOne, extendedDynamicState3ViewportWScalingEnable, extendedDynamicState3ViewportSwizzle, extendedDynamicState3CoverageToColorEnable, extendedDynamicState3CoverageToColorLocation, extendedDynamicState3CoverageModulationMode, extendedDynamicState3CoverageModulationTableEnable, extendedDynamicState3CoverageModulationTable, extendedDynamicState3CoverageReductionMode, extendedDynamicState3RepresentativeFragmentTestEnable, extendedDynamicState3ShadingRateImageEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin )
+          && ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable )
+          && ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode )
+          && ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples )
+          && ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask )
+          && ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable )
+          && ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable )
+          && ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable )
+          && ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable )
+          && ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation )
+          && ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask )
+          && ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream )
+          && ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode )
+          && ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize )
+          && ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable )
+          && ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable )
+          && ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced )
+          && ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode )
+          && ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode )
+          && ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable )
+          && ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne )
+          && ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable )
+          && ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle )
+          && ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable )
+          && ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation )
+          && ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode )
+          && ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable )
+          && ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable )
+          && ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode )
+          && ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable )
+          && ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT>
+  {
+    using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+  };
+
+  struct PhysicalDeviceExtendedDynamicState3PropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dynamicPrimitiveTopologyUnrestricted( dynamicPrimitiveTopologyUnrestricted_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setDynamicPrimitiveTopologyUnrestricted( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicPrimitiveTopologyUnrestricted = dynamicPrimitiveTopologyUnrestricted_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT>
+  {
+    using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+  };
+
+  struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), extendedDynamicState( extendedDynamicState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState = extendedDynamicState_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, extendedDynamicState );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( extendedDynamicState == rhs.extendedDynamicState );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
+  {
+    using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+  };
+
+  struct PhysicalDeviceExternalBufferInfo
+  {
+    using NativeType = VkPhysicalDeviceExternalBufferInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), usage( usage_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, usage, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
+  {
+    using Type = PhysicalDeviceExternalBufferInfo;
+  };
+  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+
+  struct PhysicalDeviceExternalFenceInfo
+  {
+    using NativeType = VkPhysicalDeviceExternalFenceInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
+  {
+    using Type = PhysicalDeviceExternalFenceInfo;
+  };
+  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+
+  struct PhysicalDeviceExternalImageFormatInfo
+  {
+    using NativeType = VkPhysicalDeviceExternalImageFormatInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
+  {
+    using Type = PhysicalDeviceExternalImageFormatInfo;
+  };
+  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+
+  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minImportedHostPointerAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
+  {
+    using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
+  };
+
+  struct PhysicalDeviceExternalMemoryRDMAFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), externalMemoryRDMA( externalMemoryRDMA_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
+    {
+      externalMemoryRDMA = externalMemoryRDMA_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, externalMemoryRDMA );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryRDMA == rhs.externalMemoryRDMA );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV>
+  {
+    using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
+  };
+
+  struct PhysicalDeviceExternalSemaphoreInfo
+  {
+    using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
+  {
+    using Type = PhysicalDeviceExternalSemaphoreInfo;
+  };
+  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+
+  struct PhysicalDeviceFaultFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceFaultFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceFault( deviceFault_ ), deviceFaultVendorBinary( deviceFaultVendorBinary_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceFault = deviceFault_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceFaultVendorBinary = deviceFaultVendorBinary_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceFault == rhs.deviceFault )
+          && ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFaultFeaturesEXT>
+  {
+    using Type = PhysicalDeviceFaultFeaturesEXT;
+  };
+
+  struct PhysicalDeviceFeatures2
+  {
+    using NativeType = VkPhysicalDeviceFeatures2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), features( features_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
+    {
+      features = features_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, features );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( features == rhs.features );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
+  {
+    using Type = PhysicalDeviceFeatures2;
+  };
+  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+
+  struct PhysicalDeviceFloatControlsProperties
+  {
+    using NativeType = VkPhysicalDeviceFloatControlsProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+          && ( roundingModeIndependence == rhs.roundingModeIndependence )
+          && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+          && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+          && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+          && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+          && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+          && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+          && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+          && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+          && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+          && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+          && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+          && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+          && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+          && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+          && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
+  {
+    using Type = PhysicalDeviceFloatControlsProperties;
+  };
+  using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
+
+  struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityMapDeferred );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subsampledLoads == rhs.subsampledLoads )
+          && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
+          && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
+          && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
+    uint32_t maxSubsampledArrayLayers = {};
+    uint32_t maxDescriptorSetSubsampledSamplers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMap = fragmentDensityMap_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMap == rhs.fragmentDensityMap )
+          && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
+          && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityMapOffset( fragmentDensityMapOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapOffset = fragmentDensityMapOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityMapOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityOffsetGranularity );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
+          && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
+          && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
+  };
+
+  struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentShaderBarycentric( fragmentShaderBarycentric_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderBarycentric = fragmentShaderBarycentric_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentShaderBarycentric );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+  };
+  using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+  struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR
+  {
+    using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), triStripVertexOrderIndependentOfProvokingVertex( triStripVertexOrderIndependentOfProvokingVertex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+  };
+
+  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
+          && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
+          && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentShadingRateEnums( fragmentShadingRateEnums_ ), supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ), noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShadingRateEnums = fragmentShadingRateEnums_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums )
+          && ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates )
+          && ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {};
+    VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+  };
+
+  struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+  };
+
+  struct PhysicalDeviceFragmentShadingRateFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ), primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ), attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate )
+          && ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate )
+          && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
+  };
+
+  struct PhysicalDeviceFragmentShadingRateKHR
+  {
+    using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampleCounts( sampleCounts_ ), fragmentSize( fragmentSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampleCounts, fragmentSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( fragmentSize == rhs.fragmentSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+    VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateKHR;
+  };
+
+  struct PhysicalDeviceFragmentShadingRatePropertiesKHR
+  {
+    using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ), primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ), layeredShadingRateAttachments( layeredShadingRateAttachments_ ), fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ), maxFragmentSize( maxFragmentSize_ ), maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ), maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ), maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ), fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ), fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ), fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ), fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ), fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ), fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ), fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSizeAspectRatio, primitiveFragmentShadingRateWithMultipleViewports, layeredShadingRateAttachments, fragmentShadingRateNonTrivialCombinerOps, maxFragmentSize, maxFragmentSizeAspectRatio, maxFragmentShadingRateCoverageSamples, maxFragmentShadingRateRasterizationSamples, fragmentShadingRateWithShaderDepthStencilWrites, fragmentShadingRateWithSampleMask, fragmentShadingRateWithShaderSampleMask, fragmentShadingRateWithConservativeRasterization, fragmentShadingRateWithFragmentShaderInterlock, fragmentShadingRateWithCustomSampleLocations, fragmentShadingRateStrictMultiplyCombiner );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize )
+          && ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize )
+          && ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio )
+          && ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports )
+          && ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments )
+          && ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps )
+          && ( maxFragmentSize == rhs.maxFragmentSize )
+          && ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio )
+          && ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples )
+          && ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples )
+          && ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites )
+          && ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask )
+          && ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask )
+          && ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization )
+          && ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock )
+          && ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations )
+          && ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
+    uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
+    VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
+    uint32_t maxFragmentSizeAspectRatio = {};
+    uint32_t maxFragmentShadingRateCoverageSamples = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
+  };
+
+  struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), globalPriorityQuery( globalPriorityQuery_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalPriorityQuery = globalPriorityQuery_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, globalPriorityQuery );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( globalPriorityQuery == rhs.globalPriorityQuery );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR>
+  {
+    using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+  };
+  using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+
+  struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), graphicsPipelineLibrary( graphicsPipelineLibrary_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT
+    {
+      graphicsPipelineLibrary = graphicsPipelineLibrary_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, graphicsPipelineLibrary );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
+  {
+    using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+  };
+
+  struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), graphicsPipelineLibraryFastLinking( graphicsPipelineLibraryFastLinking_ ), graphicsPipelineLibraryIndependentInterpolationDecoration( graphicsPipelineLibraryIndependentInterpolationDecoration_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryFastLinking( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ ) VULKAN_HPP_NOEXCEPT
+    {
+      graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryIndependentInterpolationDecoration( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ ) VULKAN_HPP_NOEXCEPT
+    {
+      graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking )
+          && ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {};
+    VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
+  {
+    using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+  };
+
+  struct PhysicalDeviceGroupProperties
+  {
+    using NativeType = VkPhysicalDeviceGroupProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( physicalDevices == rhs.physicalDevices )
+          && ( subsetAllocation == rhs.subsetAllocation );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+    void * pNext = {};
+    uint32_t physicalDeviceCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
+  {
+    using Type = PhysicalDeviceGroupProperties;
+  };
+  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+
+  struct PhysicalDeviceHostQueryResetFeatures
+  {
+    using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), hostQueryReset( hostQueryReset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostQueryReset = hostQueryReset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, hostQueryReset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hostQueryReset == rhs.hostQueryReset );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
+  {
+    using Type = PhysicalDeviceHostQueryResetFeatures;
+  };
+  using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
+
+  struct PhysicalDeviceIDProperties
+  {
+    using NativeType = VkPhysicalDeviceIDProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceUUID == rhs.deviceUUID )
+          && ( driverUUID == rhs.driverUUID )
+          && ( deviceLUID == rhs.deviceLUID )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
+    uint32_t deviceNodeMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
+  {
+    using Type = PhysicalDeviceIDProperties;
+  };
+  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+
+  struct PhysicalDeviceImage2DViewOf3DFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), image2DViewOf3D( image2DViewOf3D_ ), sampler2DViewOf3D( sampler2DViewOf3D_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image2DViewOf3D = image2DViewOf3D_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler2DViewOf3D = sampler2DViewOf3D_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image2DViewOf3D == rhs.image2DViewOf3D )
+          && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT>
+  {
+    using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+  };
+
+  struct PhysicalDeviceImageCompressionControlFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageCompressionControl( imageCompressionControl_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCompressionControl = imageCompressionControl_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageCompressionControl );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageCompressionControl == rhs.imageCompressionControl );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT>
+  {
+    using Type = PhysicalDeviceImageCompressionControlFeaturesEXT;
+  };
+
+  struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageCompressionControlSwapchain( imageCompressionControlSwapchain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCompressionControlSwapchain = imageCompressionControlSwapchain_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageCompressionControlSwapchain );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
+  {
+    using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+  };
+
+  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
+  {
+    using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+    const void * pNext = {};
+    uint64_t drmFormatModifier = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t * pQueueFamilyIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
+  {
+    using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
+  };
+
+  struct PhysicalDeviceImageFormatInfo2
+  {
+    using NativeType = VkPhysicalDeviceImageFormatInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+    operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, type, tiling, usage, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
+  {
+    using Type = PhysicalDeviceImageFormatInfo2;
+  };
+  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+
+  struct PhysicalDeviceImageProcessingFeaturesQCOM
+  {
+    using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), textureSampleWeighted( textureSampleWeighted_ ), textureBoxFilter( textureBoxFilter_ ), textureBlockMatch( textureBlockMatch_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureSampleWeighted = textureSampleWeighted_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureBoxFilter = textureBoxFilter_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureBlockMatch = textureBlockMatch_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( textureSampleWeighted == rhs.textureSampleWeighted )
+          && ( textureBoxFilter == rhs.textureBoxFilter )
+          && ( textureBlockMatch == rhs.textureBlockMatch );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM>
+  {
+    using Type = PhysicalDeviceImageProcessingFeaturesQCOM;
+  };
+
+  struct PhysicalDeviceImageProcessingPropertiesQCOM
+  {
+    using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM(uint32_t maxWeightFilterPhases_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxWeightFilterPhases( maxWeightFilterPhases_ ), maxWeightFilterDimension( maxWeightFilterDimension_ ), maxBlockMatchRegion( maxBlockMatchRegion_ ), maxBoxFilterBlockSize( maxBoxFilterBlockSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases )
+          && ( maxWeightFilterDimension == rhs.maxWeightFilterDimension )
+          && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion )
+          && ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
+    void * pNext = {};
+    uint32_t maxWeightFilterPhases = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM>
+  {
+    using Type = PhysicalDeviceImageProcessingPropertiesQCOM;
+  };
+
+  struct PhysicalDeviceImageRobustnessFeatures
+  {
+    using NativeType = VkPhysicalDeviceImageRobustnessFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), robustImageAccess( robustImageAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustImageAccess = robustImageAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, robustImageAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustImageAccess == rhs.robustImageAccess );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
+  {
+    using Type = PhysicalDeviceImageRobustnessFeatures;
+  };
+  using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
+
+  struct PhysicalDeviceImageViewImageFormatInfoEXT
+  {
+    using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageViewType( imageViewType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewType = imageViewType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageViewType const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageViewType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageViewType == rhs.imageViewType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
+  {
+    using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
+  };
+
+  struct PhysicalDeviceImageViewMinLodFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minLod( minLod_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLod = minLod_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minLod );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minLod == rhs.minLod );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 minLod = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT>
+  {
+    using Type = PhysicalDeviceImageViewMinLodFeaturesEXT;
+  };
+
+  struct PhysicalDeviceImagelessFramebufferFeatures
+  {
+    using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imagelessFramebuffer( imagelessFramebuffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagelessFramebuffer = imagelessFramebuffer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imagelessFramebuffer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
+  {
+    using Type = PhysicalDeviceImagelessFramebufferFeatures;
+  };
+  using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
+
+  struct PhysicalDeviceIndexTypeUint8FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), indexTypeUint8( indexTypeUint8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeUint8 = indexTypeUint8_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, indexTypeUint8 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( indexTypeUint8 == rhs.indexTypeUint8 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
+  {
+    using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
+  };
+
+  struct PhysicalDeviceInheritedViewportScissorFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), inheritedViewportScissor2D( inheritedViewportScissor2D_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedViewportScissor2D = inheritedViewportScissor2D_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, inheritedViewportScissor2D );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
+  {
+    using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
+  };
+
+  struct PhysicalDeviceInlineUniformBlockFeatures
+  {
+    using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inlineUniformBlock = inlineUniformBlock_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( inlineUniformBlock == rhs.inlineUniformBlock )
+          && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
+  {
+    using Type = PhysicalDeviceInlineUniformBlockFeatures;
+  };
+  using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
+
+  struct PhysicalDeviceInlineUniformBlockProperties
+  {
+    using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
+          && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
+          && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
+          && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
+          && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
+    void * pNext = {};
+    uint32_t maxInlineUniformBlockSize = {};
+    uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
+  {
+    using Type = PhysicalDeviceInlineUniformBlockProperties;
+  };
+  using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
+
+  struct PhysicalDeviceInvocationMaskFeaturesHUAWEI
+  {
+    using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), invocationMask( invocationMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      invocationMask = invocationMask_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
+    }
+
+    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, invocationMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( invocationMask == rhs.invocationMask );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI>
+  {
+    using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
+  };
+
+  struct PhysicalDeviceLegacyDitheringFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), legacyDithering( legacyDithering_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      legacyDithering = legacyDithering_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, legacyDithering );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( legacyDithering == rhs.legacyDithering );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT>
+  {
+    using Type = PhysicalDeviceLegacyDitheringFeaturesEXT;
+  };
+
+  struct PhysicalDeviceLimits
+  {
+    using NativeType = VkPhysicalDeviceLimits;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const & maxViewportDimensions_ = {}, std::array<float,2> const & viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const & pointSizeRange_ = {}, std::array<float,2> const & lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
+    }
+
+    operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, int32_t const &, uint32_t const &, int32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( maxImageDimension1D, maxImageDimension2D, maxImageDimension3D, maxImageDimensionCube, maxImageArrayLayers, maxTexelBufferElements, maxUniformBufferRange, maxStorageBufferRange, maxPushConstantsSize, maxMemoryAllocationCount, maxSamplerAllocationCount, bufferImageGranularity, sparseAddressSpaceSize, maxBoundDescriptorSets, maxPerStageDescriptorSamplers, maxPerStageDescriptorUniformBuffers, maxPerStageDescriptorStorageBuffers, maxPerStageDescriptorSampledImages, maxPerStageDescriptorStorageImages, maxPerStageDescriptorInputAttachments, maxPerStageResources, maxDescriptorSetSamplers, maxDescriptorSetUniformBuffers, maxDescriptorSetUniformBuffersDynamic, maxDescriptorSetStorageBuffers, maxDescriptorSetStorageBuffersDynamic, maxDescriptorSetSampledImages, maxDescriptorSetStorageImages, maxDescriptorSetInputAttachments, maxVertexInputAttributes, maxVertexInputBindings, maxVertexInputAttributeOffset, maxVertexInputBindingStride, maxVertexOutputComponents, maxTessellationGenerationLevel, maxTessellationPatchSize, maxTessellationControlPerVertexInputComponents, maxTessellationControlPerVertexOutputComponents, maxTessellationControlPerPatchOutputComponents, maxTessellationControlTotalOutputComponents, maxTessellationEvaluationInputComponents, maxTessellationEvaluationOutputComponents, maxGeometryShaderInvocations, maxGeometryInputComponents, maxGeometryOutputComponents, maxGeometryOutputVertices, maxGeometryTotalOutputComponents, maxFragmentInputComponents, maxFragmentOutputAttachments, maxFragmentDualSrcAttachments, maxFragmentCombinedOutputResources, maxComputeSharedMemorySize, maxComputeWorkGroupCount, maxComputeWorkGroupInvocations, maxComputeWorkGroupSize, subPixelPrecisionBits, subTexelPrecisionBits, mipmapPrecisionBits, maxDrawIndexedIndexValue, maxDrawIndirectCount, maxSamplerLodBias, maxSamplerAnisotropy, maxViewports, maxViewportDimensions, viewportBoundsRange, viewportSubPixelBits, minMemoryMapAlignment, minTexelBufferOffsetAlignment, minUniformBufferOffsetAlignment, minStorageBufferOffsetAlignment, minTexelOffset, maxTexelOffset, minTexelGatherOffset, maxTexelGatherOffset, minInterpolationOffset, maxInterpolationOffset, subPixelInterpolationOffsetBits, maxFramebufferWidth, maxFramebufferHeight, maxFramebufferLayers, framebufferColorSampleCounts, framebufferDepthSampleCounts, framebufferStencilSampleCounts, framebufferNoAttachmentsSampleCounts, maxColorAttachments, sampledImageColorSampleCounts, sampledImageIntegerSampleCounts, sampledImageDepthSampleCounts, sampledImageStencilSampleCounts, storageImageSampleCounts, maxSampleMaskWords, timestampComputeAndGraphics, timestampPeriod, maxClipDistances, maxCullDistances, maxCombinedClipAndCullDistances, discreteQueuePriorities, pointSizeRange, lineWidthRange, pointSizeGranularity, lineWidthGranularity, strictLines, standardSampleLocations, optimalBufferCopyOffsetAlignment, optimalBufferCopyRowPitchAlignment, nonCoherentAtomSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceLimits const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( maxImageDimension1D == rhs.maxImageDimension1D )
+          && ( maxImageDimension2D == rhs.maxImageDimension2D )
+          && ( maxImageDimension3D == rhs.maxImageDimension3D )
+          && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
+          && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
+          && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
+          && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
+          && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
+          && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
+          && ( bufferImageGranularity == rhs.bufferImageGranularity )
+          && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
+          && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
+          && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
+          && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
+          && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
+          && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
+          && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
+          && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
+          && ( maxPerStageResources == rhs.maxPerStageResources )
+          && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
+          && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
+          && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
+          && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
+          && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
+          && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
+          && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
+          && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
+          && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
+          && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
+          && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
+          && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
+          && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
+          && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
+          && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
+          && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
+          && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
+          && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
+          && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
+          && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
+          && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
+          && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
+          && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
+          && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
+          && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
+          && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
+          && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
+          && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
+          && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
+          && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
+          && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
+          && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
+          && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
+          && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
+          && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
+          && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
+          && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
+          && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
+          && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
+          && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
+          && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
+          && ( maxViewports == rhs.maxViewports )
+          && ( maxViewportDimensions == rhs.maxViewportDimensions )
+          && ( viewportBoundsRange == rhs.viewportBoundsRange )
+          && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
+          && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
+          && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
+          && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
+          && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
+          && ( minTexelOffset == rhs.minTexelOffset )
+          && ( maxTexelOffset == rhs.maxTexelOffset )
+          && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
+          && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
+          && ( minInterpolationOffset == rhs.minInterpolationOffset )
+          && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
+          && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
+          && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
+          && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
+          && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
+          && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
+          && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
+          && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
+          && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
+          && ( maxColorAttachments == rhs.maxColorAttachments )
+          && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
+          && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
+          && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
+          && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
+          && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
+          && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
+          && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
+          && ( timestampPeriod == rhs.timestampPeriod )
+          && ( maxClipDistances == rhs.maxClipDistances )
+          && ( maxCullDistances == rhs.maxCullDistances )
+          && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
+          && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
+          && ( pointSizeRange == rhs.pointSizeRange )
+          && ( lineWidthRange == rhs.lineWidthRange )
+          && ( pointSizeGranularity == rhs.pointSizeGranularity )
+          && ( lineWidthGranularity == rhs.lineWidthGranularity )
+          && ( strictLines == rhs.strictLines )
+          && ( standardSampleLocations == rhs.standardSampleLocations )
+          && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
+          && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
+          && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t maxImageDimension1D = {};
+    uint32_t maxImageDimension2D = {};
+    uint32_t maxImageDimension3D = {};
+    uint32_t maxImageDimensionCube = {};
+    uint32_t maxImageArrayLayers = {};
+    uint32_t maxTexelBufferElements = {};
+    uint32_t maxUniformBufferRange = {};
+    uint32_t maxStorageBufferRange = {};
+    uint32_t maxPushConstantsSize = {};
+    uint32_t maxMemoryAllocationCount = {};
+    uint32_t maxSamplerAllocationCount = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
+    uint32_t maxBoundDescriptorSets = {};
+    uint32_t maxPerStageDescriptorSamplers = {};
+    uint32_t maxPerStageDescriptorUniformBuffers = {};
+    uint32_t maxPerStageDescriptorStorageBuffers = {};
+    uint32_t maxPerStageDescriptorSampledImages = {};
+    uint32_t maxPerStageDescriptorStorageImages = {};
+    uint32_t maxPerStageDescriptorInputAttachments = {};
+    uint32_t maxPerStageResources = {};
+    uint32_t maxDescriptorSetSamplers = {};
+    uint32_t maxDescriptorSetUniformBuffers = {};
+    uint32_t maxDescriptorSetUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetStorageBuffers = {};
+    uint32_t maxDescriptorSetStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetSampledImages = {};
+    uint32_t maxDescriptorSetStorageImages = {};
+    uint32_t maxDescriptorSetInputAttachments = {};
+    uint32_t maxVertexInputAttributes = {};
+    uint32_t maxVertexInputBindings = {};
+    uint32_t maxVertexInputAttributeOffset = {};
+    uint32_t maxVertexInputBindingStride = {};
+    uint32_t maxVertexOutputComponents = {};
+    uint32_t maxTessellationGenerationLevel = {};
+    uint32_t maxTessellationPatchSize = {};
+    uint32_t maxTessellationControlPerVertexInputComponents = {};
+    uint32_t maxTessellationControlPerVertexOutputComponents = {};
+    uint32_t maxTessellationControlPerPatchOutputComponents = {};
+    uint32_t maxTessellationControlTotalOutputComponents = {};
+    uint32_t maxTessellationEvaluationInputComponents = {};
+    uint32_t maxTessellationEvaluationOutputComponents = {};
+    uint32_t maxGeometryShaderInvocations = {};
+    uint32_t maxGeometryInputComponents = {};
+    uint32_t maxGeometryOutputComponents = {};
+    uint32_t maxGeometryOutputVertices = {};
+    uint32_t maxGeometryTotalOutputComponents = {};
+    uint32_t maxFragmentInputComponents = {};
+    uint32_t maxFragmentOutputAttachments = {};
+    uint32_t maxFragmentDualSrcAttachments = {};
+    uint32_t maxFragmentCombinedOutputResources = {};
+    uint32_t maxComputeSharedMemorySize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
+    uint32_t maxComputeWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
+    uint32_t subPixelPrecisionBits = {};
+    uint32_t subTexelPrecisionBits = {};
+    uint32_t mipmapPrecisionBits = {};
+    uint32_t maxDrawIndexedIndexValue = {};
+    uint32_t maxDrawIndirectCount = {};
+    float maxSamplerLodBias = {};
+    float maxSamplerAnisotropy = {};
+    uint32_t maxViewports = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
+    uint32_t viewportSubPixelBits = {};
+    size_t minMemoryMapAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
+    int32_t minTexelOffset = {};
+    uint32_t maxTexelOffset = {};
+    int32_t minTexelGatherOffset = {};
+    uint32_t maxTexelGatherOffset = {};
+    float minInterpolationOffset = {};
+    float maxInterpolationOffset = {};
+    uint32_t subPixelInterpolationOffsetBits = {};
+    uint32_t maxFramebufferWidth = {};
+    uint32_t maxFramebufferHeight = {};
+    uint32_t maxFramebufferLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
+    uint32_t maxColorAttachments = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
+    uint32_t maxSampleMaskWords = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
+    float timestampPeriod = {};
+    uint32_t maxClipDistances = {};
+    uint32_t maxCullDistances = {};
+    uint32_t maxCombinedClipAndCullDistances = {};
+    uint32_t discreteQueuePriorities = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
+    float pointSizeGranularity = {};
+    float lineWidthGranularity = {};
+    VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
+
+  };
+
+  struct PhysicalDeviceLineRasterizationFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangularLines = rectangularLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bresenhamLines = bresenhamLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      smoothLines = smoothLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledRectangularLines = stippledRectangularLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledBresenhamLines = stippledBresenhamLines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledSmoothLines = stippledSmoothLines_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rectangularLines == rhs.rectangularLines )
+          && ( bresenhamLines == rhs.bresenhamLines )
+          && ( smoothLines == rhs.smoothLines )
+          && ( stippledRectangularLines == rhs.stippledRectangularLines )
+          && ( stippledBresenhamLines == rhs.stippledBresenhamLines )
+          && ( stippledSmoothLines == rhs.stippledSmoothLines );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
+  {
+    using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
+  };
+
+  struct PhysicalDeviceLineRasterizationPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, lineSubPixelPrecisionBits );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+    void * pNext = {};
+    uint32_t lineSubPixelPrecisionBits = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
+  {
+    using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
+  };
+
+  struct PhysicalDeviceLinearColorAttachmentFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), linearColorAttachment( linearColorAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      linearColorAttachment = linearColorAttachment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, linearColorAttachment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( linearColorAttachment == rhs.linearColorAttachment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV>
+  {
+    using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV;
+  };
+
+  struct PhysicalDeviceMaintenance3Properties
+  {
+    using NativeType = VkPhysicalDeviceMaintenance3Properties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
+    void * pNext = {};
+    uint32_t maxPerSetDescriptors = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
+  {
+    using Type = PhysicalDeviceMaintenance3Properties;
+  };
+  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+
+  struct PhysicalDeviceMaintenance4Features
+  {
+    using NativeType = VkPhysicalDeviceMaintenance4Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maintenance4( maintenance4_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maintenance4 = maintenance4_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features*>( this );
+    }
+
+    operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maintenance4 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maintenance4 == rhs.maintenance4 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
+  {
+    using Type = PhysicalDeviceMaintenance4Features;
+  };
+  using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
+
+  struct PhysicalDeviceMaintenance4Properties
+  {
+    using NativeType = VkPhysicalDeviceMaintenance4Properties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties(VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxBufferSize( maxBufferSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxBufferSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxBufferSize == rhs.maxBufferSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
+  {
+    using Type = PhysicalDeviceMaintenance4Properties;
+  };
+  using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
+
+  struct PhysicalDeviceMemoryBudgetPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), heapBudget( heapBudget_ ), heapUsage( heapUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, heapBudget, heapUsage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( heapBudget == rhs.heapBudget )
+          && ( heapUsage == rhs.heapUsage );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
+  {
+    using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
+  };
+
+  struct PhysicalDeviceMemoryDecompressionFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryDecompression( memoryDecompression_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast<PhysicalDeviceMemoryDecompressionFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryDecompression = memoryDecompression_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryDecompression );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryDecompression == rhs.memoryDecompression );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV>
+  {
+    using Type = PhysicalDeviceMemoryDecompressionFeaturesNV;
+  };
+
+  struct PhysicalDeviceMemoryDecompressionPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV(VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, uint64_t maxDecompressionIndirectCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), decompressionMethods( decompressionMethods_ ), maxDecompressionIndirectCount( maxDecompressionIndirectCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast<PhysicalDeviceMemoryDecompressionPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decompressionMethods == rhs.decompressionMethods )
+          && ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {};
+    uint64_t maxDecompressionIndirectCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV>
+  {
+    using Type = PhysicalDeviceMemoryDecompressionPropertiesNV;
+  };
+
+  struct PhysicalDeviceMemoryPriorityFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryPriority( memoryPriority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryPriority = memoryPriority_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryPriority );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryPriority == rhs.memoryPriority );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
+  };
+
+  struct PhysicalDeviceMemoryProperties
+  {
+    using NativeType = VkPhysicalDeviceMemoryProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( memoryTypeCount == rhs.memoryTypeCount )
+          && ( memoryTypes == rhs.memoryTypes )
+          && ( memoryHeapCount == rhs.memoryHeapCount )
+          && ( memoryHeaps == rhs.memoryHeaps );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t memoryTypeCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
+    uint32_t memoryHeapCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
+
+  };
+
+  struct PhysicalDeviceMemoryProperties2
+  {
+    using NativeType = VkPhysicalDeviceMemoryProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryProperties( memoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryProperties == rhs.memoryProperties );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
+  {
+    using Type = PhysicalDeviceMemoryProperties2;
+  };
+  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+
+  struct PhysicalDeviceMeshShaderFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), taskShader( taskShader_ ), meshShader( meshShader_ ), multiviewMeshShader( multiviewMeshShader_ ), primitiveFragmentShadingRateMeshShader( primitiveFragmentShadingRateMeshShader_ ), meshShaderQueries( meshShaderQueries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskShader = taskShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      meshShader = meshShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewMeshShader = multiviewMeshShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      meshShaderQueries = meshShaderQueries_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( taskShader == rhs.taskShader )
+          && ( meshShader == rhs.meshShader )
+          && ( multiviewMeshShader == rhs.multiviewMeshShader )
+          && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader )
+          && ( meshShaderQueries == rhs.meshShaderQueries );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMeshShaderFeaturesEXT;
+  };
+
+  struct PhysicalDeviceMeshShaderFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), taskShader( taskShader_ ), meshShader( meshShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskShader = taskShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      meshShader = meshShader_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, taskShader, meshShader );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( taskShader == rhs.taskShader )
+          && ( meshShader == rhs.meshShader );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
+  {
+    using Type = PhysicalDeviceMeshShaderFeaturesNV;
+  };
+
+  struct PhysicalDeviceMeshShaderPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT(uint32_t maxTaskWorkGroupTotalCount_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskPayloadSize_ = {}, uint32_t maxTaskSharedMemorySize_ = {}, uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshWorkGroupTotalCount_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshSharedMemorySize_ = {}, uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshOutputMemorySize_ = {}, uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, uint32_t maxMeshOutputComponents_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshOutputLayers_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxTaskWorkGroupTotalCount( maxTaskWorkGroupTotalCount_ ), maxTaskWorkGroupCount( maxTaskWorkGroupCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskPayloadSize( maxTaskPayloadSize_ ), maxTaskSharedMemorySize( maxTaskSharedMemorySize_ ), maxTaskPayloadAndSharedMemorySize( maxTaskPayloadAndSharedMemorySize_ ), maxMeshWorkGroupTotalCount( maxMeshWorkGroupTotalCount_ ), maxMeshWorkGroupCount( maxMeshWorkGroupCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshSharedMemorySize( maxMeshSharedMemorySize_ ), maxMeshPayloadAndSharedMemorySize( maxMeshPayloadAndSharedMemorySize_ ), maxMeshOutputMemorySize( maxMeshOutputMemorySize_ ), maxMeshPayloadAndOutputMemorySize( maxMeshPayloadAndOutputMemorySize_ ), maxMeshOutputComponents( maxMeshOutputComponents_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshOutputLayers( maxMeshOutputLayers_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ), maxPreferredTaskWorkGroupInvocations( maxPreferredTaskWorkGroupInvocations_ ), maxPreferredMeshWorkGroupInvocations( maxPreferredMeshWorkGroupInvocations_ ), prefersLocalInvocationVertexOutput( prefersLocalInvocationVertexOutput_ ), prefersLocalInvocationPrimitiveOutput( prefersLocalInvocationPrimitiveOutput_ ), prefersCompactVertexOutput( prefersCompactVertexOutput_ ), prefersCompactPrimitiveOutput( prefersCompactPrimitiveOutput_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxTaskWorkGroupTotalCount, maxTaskWorkGroupCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskPayloadSize, maxTaskSharedMemorySize, maxTaskPayloadAndSharedMemorySize, maxMeshWorkGroupTotalCount, maxMeshWorkGroupCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshSharedMemorySize, maxMeshPayloadAndSharedMemorySize, maxMeshOutputMemorySize, maxMeshPayloadAndOutputMemorySize, maxMeshOutputComponents, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshOutputLayers, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity, maxPreferredTaskWorkGroupInvocations, maxPreferredMeshWorkGroupInvocations, prefersLocalInvocationVertexOutput, prefersLocalInvocationPrimitiveOutput, prefersCompactVertexOutput, prefersCompactPrimitiveOutput );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount )
+          && ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount )
+          && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
+          && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
+          && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize )
+          && ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize )
+          && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize )
+          && ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount )
+          && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount )
+          && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
+          && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
+          && ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize )
+          && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize )
+          && ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize )
+          && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize )
+          && ( maxMeshOutputComponents == rhs.maxMeshOutputComponents )
+          && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
+          && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
+          && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers )
+          && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
+          && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
+          && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity )
+          && ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations )
+          && ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations )
+          && ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput )
+          && ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput )
+          && ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput )
+          && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxTaskWorkGroupTotalCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupCount = {};
+    uint32_t maxTaskWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
+    uint32_t maxTaskPayloadSize = {};
+    uint32_t maxTaskSharedMemorySize = {};
+    uint32_t maxTaskPayloadAndSharedMemorySize = {};
+    uint32_t maxMeshWorkGroupTotalCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupCount = {};
+    uint32_t maxMeshWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
+    uint32_t maxMeshSharedMemorySize = {};
+    uint32_t maxMeshPayloadAndSharedMemorySize = {};
+    uint32_t maxMeshOutputMemorySize = {};
+    uint32_t maxMeshPayloadAndOutputMemorySize = {};
+    uint32_t maxMeshOutputComponents = {};
+    uint32_t maxMeshOutputVertices = {};
+    uint32_t maxMeshOutputPrimitives = {};
+    uint32_t maxMeshOutputLayers = {};
+    uint32_t maxMeshMultiviewViewCount = {};
+    uint32_t meshOutputPerVertexGranularity = {};
+    uint32_t meshOutputPerPrimitiveGranularity = {};
+    uint32_t maxPreferredTaskWorkGroupInvocations = {};
+    uint32_t maxPreferredMeshWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesEXT>
+  {
+    using Type = PhysicalDeviceMeshShaderPropertiesEXT;
+  };
+
+  struct PhysicalDeviceMeshShaderPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxDrawMeshTasksCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskTotalMemorySize, maxTaskOutputCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshTotalMemorySize, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
+          && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
+          && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
+          && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
+          && ( maxTaskOutputCount == rhs.maxTaskOutputCount )
+          && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
+          && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
+          && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
+          && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
+          && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
+          && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
+          && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
+          && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+    void * pNext = {};
+    uint32_t maxDrawMeshTasksCount = {};
+    uint32_t maxTaskWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
+    uint32_t maxTaskTotalMemorySize = {};
+    uint32_t maxTaskOutputCount = {};
+    uint32_t maxMeshWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
+    uint32_t maxMeshTotalMemorySize = {};
+    uint32_t maxMeshOutputVertices = {};
+    uint32_t maxMeshOutputPrimitives = {};
+    uint32_t maxMeshMultiviewViewCount = {};
+    uint32_t meshOutputPerVertexGranularity = {};
+    uint32_t meshOutputPerPrimitiveGranularity = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
+  {
+    using Type = PhysicalDeviceMeshShaderPropertiesNV;
+  };
+
+  struct PhysicalDeviceMultiDrawFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), multiDraw( multiDraw_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiDraw = multiDraw_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, multiDraw );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiDraw == rhs.multiDraw );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMultiDrawFeaturesEXT;
+  };
+
+  struct PhysicalDeviceMultiDrawPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT(uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxMultiDrawCount( maxMultiDrawCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxMultiDrawCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxMultiDrawCount == rhs.maxMultiDrawCount );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxMultiDrawCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawPropertiesEXT>
+  {
+    using Type = PhysicalDeviceMultiDrawPropertiesEXT;
+  };
+
+  struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), multisampledRenderToSingleSampled( multisampledRenderToSingleSampled_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, multisampledRenderToSingleSampled );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+  };
+
+  struct PhysicalDeviceMultiviewFeatures
+  {
+    using NativeType = VkPhysicalDeviceMultiviewFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
+  {
+    using Type = PhysicalDeviceMultiviewFeatures;
+  };
+  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+  {
+    using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), perViewPositionAllComponents( perViewPositionAllComponents_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, perViewPositionAllComponents );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
+  {
+    using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+  };
+
+  struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM
+  {
+    using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), multiviewPerViewViewports( multiviewPerViewViewports_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setMultiviewPerViewViewports( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewPerViewViewports = multiviewPerViewViewports_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, multiviewPerViewViewports );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>
+  {
+    using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+  };
+
+  struct PhysicalDeviceMultiviewProperties
+  {
+    using NativeType = VkPhysicalDeviceMultiviewProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
+    void * pNext = {};
+    uint32_t maxMultiviewViewCount = {};
+    uint32_t maxMultiviewInstanceIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
+  {
+    using Type = PhysicalDeviceMultiviewProperties;
+  };
+  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+
+  struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mutableDescriptorType( mutableDescriptorType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mutableDescriptorType = mutableDescriptorType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mutableDescriptorType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mutableDescriptorType == rhs.mutableDescriptorType );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+  };
+  using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+  struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), nonSeamlessCubeMap( nonSeamlessCubeMap_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      nonSeamlessCubeMap = nonSeamlessCubeMap_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, nonSeamlessCubeMap );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
+  {
+    using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+  };
+
+  struct PhysicalDeviceOpacityMicromapFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), micromap( micromap_ ), micromapCaptureReplay( micromapCaptureReplay_ ), micromapHostCommands( micromapHostCommands_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      micromap = micromap_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      micromapCaptureReplay = micromapCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT
+    {
+      micromapHostCommands = micromapHostCommands_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( micromap == rhs.micromap )
+          && ( micromapCaptureReplay == rhs.micromapCaptureReplay )
+          && ( micromapHostCommands == rhs.micromapHostCommands );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 micromap = {};
+    VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT>
+  {
+    using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
+  };
+
+  struct PhysicalDeviceOpacityMicromapPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT(uint32_t maxOpacity2StateSubdivisionLevel_ = {}, uint32_t maxOpacity4StateSubdivisionLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxOpacity2StateSubdivisionLevel( maxOpacity2StateSubdivisionLevel_ ), maxOpacity4StateSubdivisionLevel( maxOpacity4StateSubdivisionLevel_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel )
+          && ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxOpacity2StateSubdivisionLevel = {};
+    uint32_t maxOpacity4StateSubdivisionLevel = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT>
+  {
+    using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
+  };
+
+  struct PhysicalDeviceOpticalFlowFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), opticalFlow( opticalFlow_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opticalFlow = opticalFlow_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, opticalFlow );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opticalFlow == rhs.opticalFlow );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowFeaturesNV>
+  {
+    using Type = PhysicalDeviceOpticalFlowFeaturesNV;
+  };
+
+  struct PhysicalDeviceOpticalFlowPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV(VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {}, uint32_t minWidth_ = {}, uint32_t minHeight_ = {}, uint32_t maxWidth_ = {}, uint32_t maxHeight_ = {}, uint32_t maxNumRegionsOfInterest_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportedOutputGridSizes( supportedOutputGridSizes_ ), supportedHintGridSizes( supportedHintGridSizes_ ), hintSupported( hintSupported_ ), costSupported( costSupported_ ), bidirectionalFlowSupported( bidirectionalFlowSupported_ ), globalFlowSupported( globalFlowSupported_ ), minWidth( minWidth_ ), minHeight( minHeight_ ), maxWidth( maxWidth_ ), maxHeight( maxHeight_ ), maxNumRegionsOfInterest( maxNumRegionsOfInterest_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportedOutputGridSizes, supportedHintGridSizes, hintSupported, costSupported, bidirectionalFlowSupported, globalFlowSupported, minWidth, minHeight, maxWidth, maxHeight, maxNumRegionsOfInterest );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes )
+          && ( supportedHintGridSizes == rhs.supportedHintGridSizes )
+          && ( hintSupported == rhs.hintSupported )
+          && ( costSupported == rhs.costSupported )
+          && ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported )
+          && ( globalFlowSupported == rhs.globalFlowSupported )
+          && ( minWidth == rhs.minWidth )
+          && ( minHeight == rhs.minHeight )
+          && ( maxWidth == rhs.maxWidth )
+          && ( maxHeight == rhs.maxHeight )
+          && ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {};
+    VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {};
+    VULKAN_HPP_NAMESPACE::Bool32 costSupported = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {};
+    VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {};
+    uint32_t minWidth = {};
+    uint32_t minHeight = {};
+    uint32_t maxWidth = {};
+    uint32_t maxHeight = {};
+    uint32_t maxNumRegionsOfInterest = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowPropertiesNV>
+  {
+    using Type = PhysicalDeviceOpticalFlowPropertiesNV;
+  };
+
+  struct PhysicalDevicePCIBusInfoPropertiesEXT
+  {
+    using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pciDomain == rhs.pciDomain )
+          && ( pciBus == rhs.pciBus )
+          && ( pciDevice == rhs.pciDevice )
+          && ( pciFunction == rhs.pciFunction );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+    void * pNext = {};
+    uint32_t pciDomain = {};
+    uint32_t pciBus = {};
+    uint32_t pciDevice = {};
+    uint32_t pciFunction = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
+  {
+    using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
+  };
+
+  struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pageableDeviceLocalMemory( pageableDeviceLocalMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pageableDeviceLocalMemory = pageableDeviceLocalMemory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pageableDeviceLocalMemory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
+  {
+    using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+  };
+
+  struct PhysicalDevicePerformanceQueryFeaturesKHR
+  {
+    using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterQueryPools = performanceCounterQueryPools_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
+          && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
+  {
+    using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
+  };
+
+  struct PhysicalDevicePerformanceQueryPropertiesKHR
+  {
+    using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, allowCommandBufferQueryCopies );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
+  {
+    using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
+  };
+
+  struct PhysicalDevicePipelineCreationCacheControlFeatures
+  {
+    using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineCreationCacheControl( pipelineCreationCacheControl_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelineCreationCacheControlFeatures & operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCreationCacheControl = pipelineCreationCacheControl_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineCreationCacheControl );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
+  {
+    using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
+  };
+  using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;
+
+  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+  {
+    using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineExecutableInfo( pipelineExecutableInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineExecutableInfo = pipelineExecutableInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineExecutableInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
+  {
+    using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+  };
+
+  struct PhysicalDevicePipelinePropertiesFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelinePropertiesIdentifier( pipelinePropertiesIdentifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelinePropertiesIdentifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT>
+  {
+    using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
+  };
+
+  struct PhysicalDevicePipelineProtectedAccessFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineProtectedAccess( pipelineProtectedAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineProtectedAccessFeaturesEXT( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelineProtectedAccessFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineProtectedAccess = pipelineProtectedAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineProtectedAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT>
+  {
+    using Type = PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+  };
+
+  struct PhysicalDevicePipelineRobustnessFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineRobustness( pipelineRobustness_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineRobustnessFeaturesEXT( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelineRobustnessFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineRobustnessFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineRobustness = pipelineRobustness_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePipelineRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineRobustness );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelineRobustnessFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineRobustness == rhs.pipelineRobustness );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT>
+  {
+    using Type = PhysicalDevicePipelineRobustnessFeaturesEXT;
+  };
+
+  struct PhysicalDevicePipelineRobustnessPropertiesEXT
+  {
+    using NativeType = VkPhysicalDevicePipelineRobustnessPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessPropertiesEXT(VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), defaultRobustnessStorageBuffers( defaultRobustnessStorageBuffers_ ), defaultRobustnessUniformBuffers( defaultRobustnessUniformBuffers_ ), defaultRobustnessVertexInputs( defaultRobustnessVertexInputs_ ), defaultRobustnessImages( defaultRobustnessImages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessPropertiesEXT( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineRobustnessPropertiesEXT( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePipelineRobustnessPropertiesEXT( *reinterpret_cast<PhysicalDevicePipelineRobustnessPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePipelineRobustnessPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineRobustnessPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePipelineRobustnessPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers )
+          && ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers )
+          && ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs )
+          && ( defaultRobustnessImages == rhs.defaultRobustnessImages );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT>
+  {
+    using Type = PhysicalDevicePipelineRobustnessPropertiesEXT;
+  };
+
+  struct PhysicalDevicePointClippingProperties
+  {
+    using NativeType = VkPhysicalDevicePointClippingProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pointClippingBehavior( pointClippingBehavior_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+    operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pointClippingBehavior );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pointClippingBehavior == rhs.pointClippingBehavior );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
+  {
+    using Type = PhysicalDevicePointClippingProperties;
+  };
+  using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct PhysicalDevicePortabilitySubsetFeaturesKHR
+  {
+    using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
+    {
+      events = events_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewFormatSwizzle = imageViewFormatSwizzle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView2DOn3DImage = imageView2DOn3DImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multisampleArrayImage = multisampleArrayImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mutableComparisonSamplers = mutableComparisonSamplers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pointPolygons = pointPolygons_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerMipLodBias = samplerMipLodBias_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateStencilMaskRef = separateStencilMaskRef_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationIsolines = tessellationIsolines_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationPointMode = tessellationPointMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangleFans = triangleFans_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, constantAlphaColorBlendFactors, events, imageViewFormatReinterpretation, imageViewFormatSwizzle, imageView2DOn3DImage, multisampleArrayImage, mutableComparisonSamplers, pointPolygons, samplerMipLodBias, separateStencilMaskRef, shaderSampleRateInterpolationFunctions, tessellationIsolines, tessellationPointMode, triangleFans, vertexAttributeAccessBeyondStride );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
+          && ( events == rhs.events )
+          && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
+          && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
+          && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
+          && ( multisampleArrayImage == rhs.multisampleArrayImage )
+          && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
+          && ( pointPolygons == rhs.pointPolygons )
+          && ( samplerMipLodBias == rhs.samplerMipLodBias )
+          && ( separateStencilMaskRef == rhs.separateStencilMaskRef )
+          && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
+          && ( tessellationIsolines == rhs.tessellationIsolines )
+          && ( tessellationPointMode == rhs.tessellationPointMode )
+          && ( triangleFans == rhs.triangleFans )
+          && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
+    VULKAN_HPP_NAMESPACE::Bool32 events = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
+  {
+    using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct PhysicalDevicePortabilitySubsetPropertiesKHR
+  {
+    using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minVertexInputBindingStrideAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+    void * pNext = {};
+    uint32_t minVertexInputBindingStrideAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
+  {
+    using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct PhysicalDevicePresentBarrierFeaturesNV
+  {
+    using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentBarrier( presentBarrier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentBarrier = presentBarrier_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentBarrier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentBarrier == rhs.presentBarrier );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePresentBarrierFeaturesNV>
+  {
+    using Type = PhysicalDevicePresentBarrierFeaturesNV;
+  };
+
+  struct PhysicalDevicePresentIdFeaturesKHR
+  {
+    using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentId( presentId_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentId = presentId_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentId );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentId == rhs.presentId );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 presentId = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePresentIdFeaturesKHR>
+  {
+    using Type = PhysicalDevicePresentIdFeaturesKHR;
+  };
+
+  struct PhysicalDevicePresentWaitFeaturesKHR
+  {
+    using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentWait( presentWait_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentWait = presentWait_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentWait );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentWait == rhs.presentWait );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 presentWait = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePresentWaitFeaturesKHR>
+  {
+    using Type = PhysicalDevicePresentWaitFeaturesKHR;
+  };
+
+  struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), primitiveTopologyListRestart( primitiveTopologyListRestart_ ), primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveTopologyListRestart = primitiveTopologyListRestart_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart )
+          && ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
+  {
+    using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+  };
+
+  struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
+  {
+    using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), primitivesGeneratedQuery( primitivesGeneratedQuery_ ), primitivesGeneratedQueryWithRasterizerDiscard( primitivesGeneratedQueryWithRasterizerDiscard_ ), primitivesGeneratedQueryWithNonZeroStreams( primitivesGeneratedQueryWithNonZeroStreams_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitivesGeneratedQuery = primitivesGeneratedQuery_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery )
+          && ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard )
+          && ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
+  {
+    using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+  };
+
+  struct PhysicalDevicePrivateDataFeatures
+  {
+    using NativeType = VkPhysicalDevicePrivateDataFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), privateData( privateData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      privateData = privateData_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures*>( this );
+    }
+
+    operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, privateData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( privateData == rhs.privateData );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
+  {
+    using Type = PhysicalDevicePrivateDataFeatures;
+  };
+  using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
+
+  struct PhysicalDeviceSparseProperties
+  {
+    using NativeType = VkPhysicalDeviceSparseProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
+    : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( residencyStandard2DBlockShape, residencyStandard2DMultisampleBlockShape, residencyStandard3DBlockShape, residencyAlignedMipSize, residencyNonResidentStrict );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+          && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+          && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+          && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+          && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
+
+  };
+
+  struct PhysicalDeviceProperties
+  {
+    using NativeType = VkPhysicalDeviceProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceType const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( apiVersion == rhs.apiVersion )
+          && ( driverVersion == rhs.driverVersion )
+          && ( vendorID == rhs.vendorID )
+          && ( deviceID == rhs.deviceID )
+          && ( deviceType == rhs.deviceType )
+          && ( deviceName == rhs.deviceName )
+          && ( pipelineCacheUUID == rhs.pipelineCacheUUID )
+          && ( limits == rhs.limits )
+          && ( sparseProperties == rhs.sparseProperties );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t apiVersion = {};
+    uint32_t driverVersion = {};
+    uint32_t vendorID = {};
+    uint32_t deviceID = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
+
+  };
+
+  struct PhysicalDeviceProperties2
+  {
+    using NativeType = VkPhysicalDeviceProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), properties( properties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, properties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
+  {
+    using Type = PhysicalDeviceProperties2;
+  };
+  using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+
+  struct PhysicalDeviceProtectedMemoryFeatures
+  {
+    using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), protectedMemory( protectedMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedMemory = protectedMemory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, protectedMemory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedMemory == rhs.protectedMemory );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
+  {
+    using Type = PhysicalDeviceProtectedMemoryFeatures;
+  };
+
+  struct PhysicalDeviceProtectedMemoryProperties
+  {
+    using NativeType = VkPhysicalDeviceProtectedMemoryProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), protectedNoFault( protectedNoFault_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, protectedNoFault );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedNoFault == rhs.protectedNoFault );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
+  {
+    using Type = PhysicalDeviceProtectedMemoryProperties;
+  };
+
+  struct PhysicalDeviceProvokingVertexFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), provokingVertexLast( provokingVertexLast_ ), transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
+    {
+      provokingVertexLast = provokingVertexLast_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( provokingVertexLast == rhs.provokingVertexLast )
+          && ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
+  {
+    using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
+  };
+
+  struct PhysicalDeviceProvokingVertexPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ), transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline )
+          && ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
+  {
+    using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
+  };
+
+  struct PhysicalDevicePushDescriptorPropertiesKHR
+  {
+    using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxPushDescriptors( maxPushDescriptors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxPushDescriptors );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPushDescriptors == rhs.maxPushDescriptors );
+#endif
+    }
+
+    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+    void * pNext = {};
+    uint32_t maxPushDescriptors = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
+  {
+    using Type = PhysicalDevicePushDescriptorPropertiesKHR;
+  };
+
+  struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT>
+  {
+    using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+  };
+
+  struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ), rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ), rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess )
+          && ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess )
+          && ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
+  {
+    using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+  };
+  using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+  struct PhysicalDeviceRayQueryFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayQuery( rayQuery_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayQuery = rayQuery_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayQuery );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayQuery == rhs.rayQuery );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
+  {
+    using Type = PhysicalDeviceRayQueryFeaturesKHR;
+  };
+
+  struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayTracingInvocationReorder( rayTracingInvocationReorder_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingInvocationReorder = rayTracingInvocationReorder_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayTracingInvocationReorder );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV>
+  {
+    using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+  };
+
+  struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV(VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayTracingInvocationReorderReorderingHint( rayTracingInvocationReorderReorderingHint_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV>
+  {
+    using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+  };
+
+  struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayTracingMaintenance1( rayTracingMaintenance1_ ), rayTracingPipelineTraceRaysIndirect2( rayTracingPipelineTraceRaysIndirect2_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingMaintenance1 = rayTracingMaintenance1_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 )
+          && ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR>
+  {
+    using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+  };
+
+  struct PhysicalDeviceRayTracingMotionBlurFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayTracingMotionBlur( rayTracingMotionBlur_ ), rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingMotionBlur = rayTracingMotionBlur_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur )
+          && ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV>
+  {
+    using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
+  };
+
+  struct PhysicalDeviceRayTracingPipelineFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rayTracingPipeline( rayTracingPipeline_ ), rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ), rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ), rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ), rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPipeline = rayTracingPipeline_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rayTracingPipeline, rayTracingPipelineShaderGroupHandleCaptureReplay, rayTracingPipelineShaderGroupHandleCaptureReplayMixed, rayTracingPipelineTraceRaysIndirect, rayTraversalPrimitiveCulling );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracingPipeline == rhs.rayTracingPipeline )
+          && ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay )
+          && ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed )
+          && ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect )
+          && ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
+  {
+    using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
+  };
+
+  struct PhysicalDeviceRayTracingPipelinePropertiesKHR
+  {
+    using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRayRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}, uint32_t maxRayDispatchInvocationCount_ = {}, uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRayRecursionDepth( maxRayRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ), maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ), shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ), maxRayHitAttributeSize( maxRayHitAttributeSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderGroupHandleSize, maxRayRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, shaderGroupHandleCaptureReplaySize, maxRayDispatchInvocationCount, shaderGroupHandleAlignment, maxRayHitAttributeSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+          && ( maxRayRecursionDepth == rhs.maxRayRecursionDepth )
+          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+          && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize )
+          && ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount )
+          && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment )
+          && ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
+    void * pNext = {};
+    uint32_t shaderGroupHandleSize = {};
+    uint32_t maxRayRecursionDepth = {};
+    uint32_t maxShaderGroupStride = {};
+    uint32_t shaderGroupBaseAlignment = {};
+    uint32_t shaderGroupHandleCaptureReplaySize = {};
+    uint32_t maxRayDispatchInvocationCount = {};
+    uint32_t shaderGroupHandleAlignment = {};
+    uint32_t maxRayHitAttributeSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
+  {
+    using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
+  };
+
+  struct PhysicalDeviceRayTracingPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceRayTracingPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderGroupHandleSize, maxRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, maxGeometryCount, maxInstanceCount, maxTriangleCount, maxDescriptorSetAccelerationStructures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( maxInstanceCount == rhs.maxInstanceCount )
+          && ( maxTriangleCount == rhs.maxTriangleCount )
+          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+    void * pNext = {};
+    uint32_t shaderGroupHandleSize = {};
+    uint32_t maxRecursionDepth = {};
+    uint32_t maxShaderGroupStride = {};
+    uint32_t shaderGroupBaseAlignment = {};
+    uint64_t maxGeometryCount = {};
+    uint64_t maxInstanceCount = {};
+    uint64_t maxTriangleCount = {};
+    uint32_t maxDescriptorSetAccelerationStructures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
+  {
+    using Type = PhysicalDeviceRayTracingPropertiesNV;
+  };
+
+  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), representativeFragmentTest( representativeFragmentTest_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTest = representativeFragmentTest_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, representativeFragmentTest );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTest == rhs.representativeFragmentTest );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
+  {
+    using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+  };
+
+  struct PhysicalDeviceRobustness2FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess2 = robustBufferAccess2_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustImageAccess2 = robustImageAccess2_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      nullDescriptor = nullDescriptor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustBufferAccess2 == rhs.robustBufferAccess2 )
+          && ( robustImageAccess2 == rhs.robustImageAccess2 )
+          && ( nullDescriptor == rhs.nullDescriptor );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceRobustness2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceRobustness2PropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
+          && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
+  {
+    using Type = PhysicalDeviceRobustness2PropertiesEXT;
+  };
+
+  struct PhysicalDeviceSampleLocationsPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const & sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampleLocationSampleCounts, maxSampleLocationGridSize, sampleLocationCoordinateRange, sampleLocationSubPixelBits, variableSampleLocations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
+          && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
+          && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
+          && ( variableSampleLocations == rhs.variableSampleLocations );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
+    uint32_t sampleLocationSubPixelBits = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
+  {
+    using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
+  };
+
+  struct PhysicalDeviceSamplerFilterMinmaxProperties
+  {
+    using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
+  {
+    using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
+  };
+  using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
+
+  struct PhysicalDeviceSamplerYcbcrConversionFeatures
+  {
+    using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), samplerYcbcrConversion( samplerYcbcrConversion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversion = samplerYcbcrConversion_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, samplerYcbcrConversion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
+  {
+    using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
+  };
+  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+
+  struct PhysicalDeviceScalarBlockLayoutFeatures
+  {
+    using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), scalarBlockLayout( scalarBlockLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalarBlockLayout = scalarBlockLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, scalarBlockLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( scalarBlockLayout == rhs.scalarBlockLayout );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
+  {
+    using Type = PhysicalDeviceScalarBlockLayoutFeatures;
+  };
+  using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+
+  struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
+  {
+    using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateDepthStencilLayouts = separateDepthStencilLayouts_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, separateDepthStencilLayouts );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
+  {
+    using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+  };
+  using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+  struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ), shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ), shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ), shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ), shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ), shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ), shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ), shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ), shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ), shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ), shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ), sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderBufferFloat16Atomics, shaderBufferFloat16AtomicAdd, shaderBufferFloat16AtomicMinMax, shaderBufferFloat32AtomicMinMax, shaderBufferFloat64AtomicMinMax, shaderSharedFloat16Atomics, shaderSharedFloat16AtomicAdd, shaderSharedFloat16AtomicMinMax, shaderSharedFloat32AtomicMinMax, shaderSharedFloat64AtomicMinMax, shaderImageFloat32AtomicMinMax, sparseImageFloat32AtomicMinMax );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics )
+          && ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd )
+          && ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax )
+          && ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax )
+          && ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax )
+          && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics )
+          && ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd )
+          && ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax )
+          && ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax )
+          && ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax )
+          && ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax )
+          && ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderBufferFloat32Atomics, shaderBufferFloat32AtomicAdd, shaderBufferFloat64Atomics, shaderBufferFloat64AtomicAdd, shaderSharedFloat32Atomics, shaderSharedFloat32AtomicAdd, shaderSharedFloat64Atomics, shaderSharedFloat64AtomicAdd, shaderImageFloat32Atomics, shaderImageFloat32AtomicAdd, sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
+          && ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
+          && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
+          && ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
+          && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
+          && ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
+          && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
+          && ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
+          && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
+          && ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
+          && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
+          && ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderAtomicInt64Features
+  {
+    using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
+  {
+    using Type = PhysicalDeviceShaderAtomicInt64Features;
+  };
+  using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
+
+  struct PhysicalDeviceShaderClockFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupClock = shaderSubgroupClock_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDeviceClock = shaderDeviceClock_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupClock == rhs.shaderSubgroupClock )
+          && ( shaderDeviceClock == rhs.shaderDeviceClock );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
+  {
+    using Type = PhysicalDeviceShaderClockFeaturesKHR;
+  };
+
+  struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM
+  {
+    using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderCoreBuiltins( shaderCoreBuiltins_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderCoreBuiltins = shaderCoreBuiltins_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderCoreBuiltins );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM>
+  {
+    using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+  };
+
+  struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM
+  {
+    using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM(uint64_t shaderCoreMask_ = {}, uint32_t shaderCoreCount_ = {}, uint32_t shaderWarpsPerCore_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderCoreMask( shaderCoreMask_ ), shaderCoreCount( shaderCoreCount_ ), shaderWarpsPerCore( shaderWarpsPerCore_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderCoreMask == rhs.shaderCoreMask )
+          && ( shaderCoreCount == rhs.shaderCoreCount )
+          && ( shaderWarpsPerCore == rhs.shaderWarpsPerCore );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+    void * pNext = {};
+    uint64_t shaderCoreMask = {};
+    uint32_t shaderCoreCount = {};
+    uint32_t shaderWarpsPerCore = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM>
+  {
+    using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+  };
+
+  struct PhysicalDeviceShaderCoreProperties2AMD
+  {
+    using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderCoreFeatures == rhs.shaderCoreFeatures )
+          && ( activeComputeUnitCount == rhs.activeComputeUnitCount );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
+    uint32_t activeComputeUnitCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
+  {
+    using Type = PhysicalDeviceShaderCoreProperties2AMD;
+  };
+
+  struct PhysicalDeviceShaderCorePropertiesAMD
+  {
+    using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderEngineCount, shaderArraysPerEngineCount, computeUnitsPerShaderArray, simdPerComputeUnit, wavefrontsPerSimd, wavefrontSize, sgprsPerSimd, minSgprAllocation, maxSgprAllocation, sgprAllocationGranularity, vgprsPerSimd, minVgprAllocation, maxVgprAllocation, vgprAllocationGranularity );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderEngineCount == rhs.shaderEngineCount )
+          && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
+          && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
+          && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
+          && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
+          && ( wavefrontSize == rhs.wavefrontSize )
+          && ( sgprsPerSimd == rhs.sgprsPerSimd )
+          && ( minSgprAllocation == rhs.minSgprAllocation )
+          && ( maxSgprAllocation == rhs.maxSgprAllocation )
+          && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
+          && ( vgprsPerSimd == rhs.vgprsPerSimd )
+          && ( minVgprAllocation == rhs.minVgprAllocation )
+          && ( maxVgprAllocation == rhs.maxVgprAllocation )
+          && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+    void * pNext = {};
+    uint32_t shaderEngineCount = {};
+    uint32_t shaderArraysPerEngineCount = {};
+    uint32_t computeUnitsPerShaderArray = {};
+    uint32_t simdPerComputeUnit = {};
+    uint32_t wavefrontsPerSimd = {};
+    uint32_t wavefrontSize = {};
+    uint32_t sgprsPerSimd = {};
+    uint32_t minSgprAllocation = {};
+    uint32_t maxSgprAllocation = {};
+    uint32_t sgprAllocationGranularity = {};
+    uint32_t vgprsPerSimd = {};
+    uint32_t minVgprAllocation = {};
+    uint32_t maxVgprAllocation = {};
+    uint32_t vgprAllocationGranularity = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
+  {
+    using Type = PhysicalDeviceShaderCorePropertiesAMD;
+  };
+
+  struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures
+  {
+    using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderDemoteToHelperInvocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
+  {
+    using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+  };
+  using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+
+  struct PhysicalDeviceShaderDrawParametersFeatures
+  {
+    using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderDrawParameters( shaderDrawParameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDrawParameters = shaderDrawParameters_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderDrawParameters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDrawParameters == rhs.shaderDrawParameters );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
+  {
+    using Type = PhysicalDeviceShaderDrawParametersFeatures;
+  };
+  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+
+  struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD
+  {
+    using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderEarlyAndLateFragmentTests( shaderEarlyAndLateFragmentTests_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
+  {
+    using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+  };
+
+  struct PhysicalDeviceShaderFloat16Int8Features
+  {
+    using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat16 = shaderFloat16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt8 = shaderInt8_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderFloat16, shaderInt8 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderFloat16 == rhs.shaderFloat16 )
+          && ( shaderInt8 == rhs.shaderInt8 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
+  {
+    using Type = PhysicalDeviceShaderFloat16Int8Features;
+  };
+  using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+  using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+
+  struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderImageInt64Atomics( shaderImageInt64Atomics_ ), sparseImageInt64Atomics( sparseImageInt64Atomics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageInt64Atomics = shaderImageInt64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageInt64Atomics = sparseImageInt64Atomics_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics )
+          && ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderImageFootprintFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageFootprint( imageFootprint_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFootprint = imageFootprint_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageFootprint );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFootprint == rhs.imageFootprint );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
+  {
+    using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
+  };
+
+  struct PhysicalDeviceShaderIntegerDotProductFeatures
+  {
+    using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderIntegerDotProduct( shaderIntegerDotProduct_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderIntegerDotProduct = shaderIntegerDotProduct_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderIntegerDotProduct );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
+  {
+    using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
+  };
+  using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;
+
+  struct PhysicalDeviceShaderIntegerDotProductProperties
+  {
+    using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ), integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ), integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ), integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ), integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ), integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ), integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ), integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ), integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ), integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ), integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ), integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ), integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ), integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ), integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
+          && ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
+          && ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
+          && ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
+          && ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
+          && ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
+          && ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
+          && ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
+          && ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
+          && ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
+          && ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
+          && ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
+          && ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
+          && ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
+          && ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
+  {
+    using Type = PhysicalDeviceShaderIntegerDotProductProperties;
+  };
+  using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;
+
+  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
+  {
+    using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderIntegerFunctions2( shaderIntegerFunctions2_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderIntegerFunctions2 = shaderIntegerFunctions2_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderIntegerFunctions2 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
+  {
+    using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+  };
+
+  struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderModuleIdentifier( shaderModuleIdentifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderModuleIdentifier = shaderModuleIdentifier_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderModuleIdentifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierPropertiesEXT(std::array<uint8_t,VK_UUID_SIZE> const & shaderModuleIdentifierAlgorithmUUID_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderModuleIdentifierAlgorithmUUID( shaderModuleIdentifierAlgorithmUUID_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderModuleIdentifierAlgorithmUUID = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT>
+  {
+    using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+  };
+
+  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderSMBuiltins( shaderSMBuiltins_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSMBuiltins = shaderSMBuiltins_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderSMBuiltins );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
+  {
+    using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+  };
+
+  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMCount == rhs.shaderSMCount )
+          && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+    void * pNext = {};
+    uint32_t shaderSMCount = {};
+    uint32_t shaderWarpsPerSM = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
+  {
+    using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+  };
+
+  struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
+  {
+    using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderSubgroupExtendedTypes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
+  {
+    using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+  };
+  using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+  struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderSubgroupUniformControlFlow );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
+  {
+    using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+  };
+
+  struct PhysicalDeviceShaderTerminateInvocationFeatures
+  {
+    using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderTerminateInvocation( shaderTerminateInvocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTerminateInvocation = shaderTerminateInvocation_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderTerminateInvocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
+  {
+    using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
+  };
+  using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
+
+  struct PhysicalDeviceShadingRateImageFeaturesNV
+  {
+    using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImage = shadingRateImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImage == rhs.shadingRateImage )
+          && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
+  {
+    using Type = PhysicalDeviceShadingRateImageFeaturesNV;
+  };
+
+  struct PhysicalDeviceShadingRateImagePropertiesNV
+  {
+    using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateTexelSize == rhs.shadingRateTexelSize )
+          && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
+          && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
+    uint32_t shadingRatePaletteSize = {};
+    uint32_t shadingRateMaxCoarseSamples = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
+  {
+    using Type = PhysicalDeviceShadingRateImagePropertiesNV;
+  };
+
+  struct PhysicalDeviceSparseImageFormatInfo2
+  {
+    using NativeType = VkPhysicalDeviceSparseImageFormatInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+    }
+
+    operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageTiling const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, type, samples, usage, tiling );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( samples == rhs.samples )
+          && ( usage == rhs.usage )
+          && ( tiling == rhs.tiling );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
+  {
+    using Type = PhysicalDeviceSparseImageFormatInfo2;
+  };
+  using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+
+  struct PhysicalDeviceSubgroupProperties
+  {
+    using NativeType = VkPhysicalDeviceSubgroupProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSize == rhs.subgroupSize )
+          && ( supportedStages == rhs.supportedStages )
+          && ( supportedOperations == rhs.supportedOperations )
+          && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+    void * pNext = {};
+    uint32_t subgroupSize = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
+    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
+  {
+    using Type = PhysicalDeviceSubgroupProperties;
+  };
+
+  struct PhysicalDeviceSubgroupSizeControlFeatures
+  {
+    using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupSizeControl = subgroupSizeControl_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeFullSubgroups = computeFullSubgroups_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSizeControl == rhs.subgroupSizeControl )
+          && ( computeFullSubgroups == rhs.computeFullSubgroups );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
+  {
+    using Type = PhysicalDeviceSubgroupSizeControlFeatures;
+  };
+  using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
+
+  struct PhysicalDeviceSubgroupSizeControlProperties
+  {
+    using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minSubgroupSize == rhs.minSubgroupSize )
+          && ( maxSubgroupSize == rhs.maxSubgroupSize )
+          && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
+          && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
+    void * pNext = {};
+    uint32_t minSubgroupSize = {};
+    uint32_t maxSubgroupSize = {};
+    uint32_t maxComputeWorkgroupSubgroups = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
+  {
+    using Type = PhysicalDeviceSubgroupSizeControlProperties;
+  };
+  using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
+
+  struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subpassMergeFeedback( subpassMergeFeedback_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassMergeFeedback = subpassMergeFeedback_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subpassMergeFeedback );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassMergeFeedback == rhs.subpassMergeFeedback );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
+  {
+    using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+  };
+
+  struct PhysicalDeviceSubpassShadingFeaturesHUAWEI
+  {
+    using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subpassShading( subpassShading_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassShading = subpassShading_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
+    }
+
+    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subpassShading );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassShading == rhs.subpassShading );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI>
+  {
+    using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
+  };
+
+  struct PhysicalDeviceSubpassShadingPropertiesHUAWEI
+  {
+    using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
+    }
+
+    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
+    void * pNext = {};
+    uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI>
+  {
+    using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
+  };
+
+  struct PhysicalDeviceSurfaceInfo2KHR
+  {
+    using NativeType = VkPhysicalDeviceSurfaceInfo2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, surface );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surface == rhs.surface );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
+  {
+    using Type = PhysicalDeviceSurfaceInfo2KHR;
+  };
+
+  struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainMaintenance1( swapchainMaintenance1_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSwapchainMaintenance1FeaturesEXT( *reinterpret_cast<PhysicalDeviceSwapchainMaintenance1FeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setSwapchainMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainMaintenance1 = swapchainMaintenance1_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainMaintenance1 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainMaintenance1 == rhs.swapchainMaintenance1 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT>
+  {
+    using Type = PhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+  };
+
+  struct PhysicalDeviceSynchronization2Features
+  {
+    using NativeType = VkPhysicalDeviceSynchronization2Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), synchronization2( synchronization2_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      synchronization2 = synchronization2_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features*>( this );
+    }
+
+    operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, synchronization2 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( synchronization2 == rhs.synchronization2 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
+  {
+    using Type = PhysicalDeviceSynchronization2Features;
+  };
+  using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
+
+  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), texelBufferAlignment( texelBufferAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      texelBufferAlignment = texelBufferAlignment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, texelBufferAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( texelBufferAlignment == rhs.texelBufferAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
+  {
+    using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+  };
+
+  struct PhysicalDeviceTexelBufferAlignmentProperties
+  {
+    using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
+          && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
+          && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
+          && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
+  {
+    using Type = PhysicalDeviceTexelBufferAlignmentProperties;
+  };
+  using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;
+
+  struct PhysicalDeviceTextureCompressionASTCHDRFeatures
+  {
+    using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, textureCompressionASTC_HDR );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
+  {
+    using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
+  };
+  using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
+
+  struct PhysicalDeviceTilePropertiesFeaturesQCOM
+  {
+    using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), tileProperties( tileProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tileProperties = tileProperties_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
+    }
+
+    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, tileProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( tileProperties == rhs.tileProperties );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM>
+  {
+    using Type = PhysicalDeviceTilePropertiesFeaturesQCOM;
+  };
+
+  struct PhysicalDeviceTimelineSemaphoreFeatures
+  {
+    using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), timelineSemaphore( timelineSemaphore_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timelineSemaphore = timelineSemaphore_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, timelineSemaphore );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timelineSemaphore == rhs.timelineSemaphore );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
+  {
+    using Type = PhysicalDeviceTimelineSemaphoreFeatures;
+  };
+  using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+
+  struct PhysicalDeviceTimelineSemaphoreProperties
+  {
+    using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+    void * pNext = {};
+    uint64_t maxTimelineSemaphoreValueDifference = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
+  {
+    using Type = PhysicalDeviceTimelineSemaphoreProperties;
+  };
+  using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
+
+  struct PhysicalDeviceToolProperties
+  {
+    using NativeType = VkPhysicalDeviceToolProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceToolProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceToolProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, name, version, purposes, description, layer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceToolProperties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( name == rhs.name )
+          && ( version == rhs.version )
+          && ( purposes == rhs.purposes )
+          && ( description == rhs.description )
+          && ( layer == rhs.layer );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
+    VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
+  {
+    using Type = PhysicalDeviceToolProperties;
+  };
+  using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
+
+  struct PhysicalDeviceTransformFeedbackFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformFeedback = transformFeedback_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryStreams = geometryStreams_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, transformFeedback, geometryStreams );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transformFeedback == rhs.transformFeedback )
+          && ( geometryStreams == rhs.geometryStreams );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
+  {
+    using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
+  };
+
+  struct PhysicalDeviceTransformFeedbackPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxTransformFeedbackStreams, maxTransformFeedbackBuffers, maxTransformFeedbackBufferSize, maxTransformFeedbackStreamDataSize, maxTransformFeedbackBufferDataSize, maxTransformFeedbackBufferDataStride, transformFeedbackQueries, transformFeedbackStreamsLinesTriangles, transformFeedbackRasterizationStreamSelect, transformFeedbackDraw );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
+          && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
+          && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
+          && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
+          && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
+          && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
+          && ( transformFeedbackQueries == rhs.transformFeedbackQueries )
+          && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
+          && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
+          && ( transformFeedbackDraw == rhs.transformFeedbackDraw );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxTransformFeedbackStreams = {};
+    uint32_t maxTransformFeedbackBuffers = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
+    uint32_t maxTransformFeedbackStreamDataSize = {};
+    uint32_t maxTransformFeedbackBufferDataSize = {};
+    uint32_t maxTransformFeedbackBufferDataStride = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
+  {
+    using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
+  };
+
+  struct PhysicalDeviceUniformBufferStandardLayoutFeatures
+  {
+    using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBufferStandardLayout = uniformBufferStandardLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, uniformBufferStandardLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
+  {
+    using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+  };
+  using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+  struct PhysicalDeviceVariablePointersFeatures
+  {
+    using NativeType = VkPhysicalDeviceVariablePointersFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
+  {
+    using Type = PhysicalDeviceVariablePointersFeatures;
+  };
+  using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+
+  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
+          && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
+  {
+    using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+  };
+
+  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
+  {
+    using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxVertexAttribDivisor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+    void * pNext = {};
+    uint32_t maxVertexAttribDivisor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
+  {
+    using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+  };
+
+  struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexInputDynamicState( vertexInputDynamicState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexInputDynamicState = vertexInputDynamicState_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexInputDynamicState );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexInputDynamicState == rhs.vertexInputDynamicState );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
+  {
+    using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+  };
+
+  struct PhysicalDeviceVideoFormatInfoKHR
+  {
+    using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageUsage( imageUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageUsage );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageUsage == rhs.imageUsage );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
+  {
+    using Type = PhysicalDeviceVideoFormatInfoKHR;
+  };
+
+  struct PhysicalDeviceVulkan11Features
+  {
+    using NativeType = VkPhysicalDeviceVulkan11Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedMemory = protectedMemory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversion = samplerYcbcrConversion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDrawParameters = shaderDrawParameters_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16, multiview, multiviewGeometryShader, multiviewTessellationShader, variablePointersStorageBuffer, variablePointers, protectedMemory, samplerYcbcrConversion, shaderDrawParameters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers )
+          && ( protectedMemory == rhs.protectedMemory )
+          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
+          && ( shaderDrawParameters == rhs.shaderDrawParameters );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
+  {
+    using Type = PhysicalDeviceVulkan11Features;
+  };
+
+  struct PhysicalDeviceVulkan11Properties
+  {
+    using NativeType = VkPhysicalDeviceVulkan11Properties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid, subgroupSize, subgroupSupportedStages, subgroupSupportedOperations, subgroupQuadOperationsInAllStages, pointClippingBehavior, maxMultiviewViewCount, maxMultiviewInstanceIndex, protectedNoFault, maxPerSetDescriptors, maxMemoryAllocationSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceUUID == rhs.deviceUUID )
+          && ( driverUUID == rhs.driverUUID )
+          && ( deviceLUID == rhs.deviceLUID )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid )
+          && ( subgroupSize == rhs.subgroupSize )
+          && ( subgroupSupportedStages == rhs.subgroupSupportedStages )
+          && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
+          && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
+          && ( pointClippingBehavior == rhs.pointClippingBehavior )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
+          && ( protectedNoFault == rhs.protectedNoFault )
+          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
+    uint32_t deviceNodeMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+    uint32_t subgroupSize = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
+    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
+    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+    uint32_t maxMultiviewViewCount = {};
+    uint32_t maxMultiviewInstanceIndex = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+    uint32_t maxPerSetDescriptors = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
+  {
+    using Type = PhysicalDeviceVulkan11Properties;
+  };
+
+  struct PhysicalDeviceVulkan12Features
+  {
+    using NativeType = VkPhysicalDeviceVulkan12Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectCount = drawIndirectCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer8BitAccess = storageBuffer8BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant8 = storagePushConstant8_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat16 = shaderFloat16_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt8 = shaderInt8_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorIndexing = descriptorIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      runtimeDescriptorArray = runtimeDescriptorArray_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerFilterMinmax = samplerFilterMinmax_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalarBlockLayout = scalarBlockLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagelessFramebuffer = imagelessFramebuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBufferStandardLayout = uniformBufferStandardLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateDepthStencilLayouts = separateDepthStencilLayouts_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostQueryReset = hostQueryReset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timelineSemaphore = timelineSemaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModel = vulkanMemoryModel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderOutputViewportIndex = shaderOutputViewportIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderOutputLayer = shaderOutputLayer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, samplerMirrorClampToEdge, drawIndirectCount, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8, shaderBufferInt64Atomics, shaderSharedInt64Atomics, shaderFloat16, shaderInt8, descriptorIndexing, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray, samplerFilterMinmax, scalarBlockLayout, imagelessFramebuffer, uniformBufferStandardLayout, shaderSubgroupExtendedTypes, separateDepthStencilLayouts, hostQueryReset, timelineSemaphore, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains, shaderOutputViewportIndex, shaderOutputLayer, subgroupBroadcastDynamicId );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
+          && ( drawIndirectCount == rhs.drawIndirectCount )
+          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+          && ( storagePushConstant8 == rhs.storagePushConstant8 )
+          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
+          && ( shaderFloat16 == rhs.shaderFloat16 )
+          && ( shaderInt8 == rhs.shaderInt8 )
+          && ( descriptorIndexing == rhs.descriptorIndexing )
+          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
+          && ( samplerFilterMinmax == rhs.samplerFilterMinmax )
+          && ( scalarBlockLayout == rhs.scalarBlockLayout )
+          && ( imagelessFramebuffer == rhs.imagelessFramebuffer )
+          && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
+          && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
+          && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
+          && ( hostQueryReset == rhs.hostQueryReset )
+          && ( timelineSemaphore == rhs.timelineSemaphore )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
+          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+          && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
+          && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
+          && ( shaderOutputLayer == rhs.shaderOutputLayer )
+          && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
+  {
+    using Type = PhysicalDeviceVulkan12Features;
+  };
+
+  struct PhysicalDeviceVulkan12Properties
+  {
+    using NativeType = VkPhysicalDeviceVulkan12Properties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping, maxTimelineSemaphoreValueDifference, framebufferIntegerColorSampleCounts );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( driverID == rhs.driverID )
+          && ( driverName == rhs.driverName )
+          && ( driverInfo == rhs.driverInfo )
+          && ( conformanceVersion == rhs.conformanceVersion )
+          && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+          && ( roundingModeIndependence == rhs.roundingModeIndependence )
+          && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+          && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+          && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+          && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+          && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+          && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+          && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+          && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+          && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+          && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+          && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+          && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+          && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+          && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+          && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
+          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
+          && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+          && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+          && ( independentResolveNone == rhs.independentResolveNone )
+          && ( independentResolve == rhs.independentResolve )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
+          && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
+          && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
+    VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+    uint32_t maxPerStageUpdateAfterBindResources = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+    uint64_t maxTimelineSemaphoreValueDifference = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
+  {
+    using Type = PhysicalDeviceVulkan12Properties;
+  };
+
+  struct PhysicalDeviceVulkan13Features
+  {
+    using NativeType = VkPhysicalDeviceVulkan13Features;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), robustImageAccess( robustImageAccess_ ), inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ), pipelineCreationCacheControl( pipelineCreationCacheControl_ ), privateData( privateData_ ), shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ), shaderTerminateInvocation( shaderTerminateInvocation_ ), subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ ), synchronization2( synchronization2_ ), textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ), shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ), dynamicRendering( dynamicRendering_ ), shaderIntegerDotProduct( shaderIntegerDotProduct_ ), maintenance4( maintenance4_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustImageAccess = robustImageAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inlineUniformBlock = inlineUniformBlock_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCreationCacheControl = pipelineCreationCacheControl_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      privateData = privateData_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTerminateInvocation = shaderTerminateInvocation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupSizeControl = subgroupSizeControl_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeFullSubgroups = computeFullSubgroups_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      synchronization2 = synchronization2_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicRendering = dynamicRendering_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderIntegerDotProduct = shaderIntegerDotProduct_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maintenance4 = maintenance4_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan13Features*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, robustImageAccess, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind, pipelineCreationCacheControl, privateData, shaderDemoteToHelperInvocation, shaderTerminateInvocation, subgroupSizeControl, computeFullSubgroups, synchronization2, textureCompressionASTC_HDR, shaderZeroInitializeWorkgroupMemory, dynamicRendering, shaderIntegerDotProduct, maintenance4 );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustImageAccess == rhs.robustImageAccess )
+          && ( inlineUniformBlock == rhs.inlineUniformBlock )
+          && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind )
+          && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl )
+          && ( privateData == rhs.privateData )
+          && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation )
+          && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation )
+          && ( subgroupSizeControl == rhs.subgroupSizeControl )
+          && ( computeFullSubgroups == rhs.computeFullSubgroups )
+          && ( synchronization2 == rhs.synchronization2 )
+          && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR )
+          && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory )
+          && ( dynamicRendering == rhs.dynamicRendering )
+          && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct )
+          && ( maintenance4 == rhs.maintenance4 );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
+    VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
+    VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
+    VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
+  {
+    using Type = PhysicalDeviceVulkan13Features;
+  };
+
+  struct PhysicalDeviceVulkan13Properties
+  {
+    using NativeType = VkPhysicalDeviceVulkan13Properties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxInlineUniformTotalSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ), maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ), maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ), integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ), integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ), integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ), integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ), integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ), integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ), integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ), integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ), integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ), integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ), integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ), integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ), integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ), integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ), integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ), storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ), maxBufferSize( maxBufferSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks, maxInlineUniformTotalSize, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment, maxBufferSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minSubgroupSize == rhs.minSubgroupSize )
+          && ( maxSubgroupSize == rhs.maxSubgroupSize )
+          && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
+          && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages )
+          && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
+          && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
+          && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
+          && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
+          && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks )
+          && ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize )
+          && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
+          && ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
+          && ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
+          && ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
+          && ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
+          && ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
+          && ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
+          && ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
+          && ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
+          && ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
+          && ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
+          && ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
+          && ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
+          && ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
+          && ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
+          && ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated )
+          && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
+          && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
+          && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
+          && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment )
+          && ( maxBufferSize == rhs.maxBufferSize );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
+    void * pNext = {};
+    uint32_t minSubgroupSize = {};
+    uint32_t maxSubgroupSize = {};
+    uint32_t maxComputeWorkgroupSubgroups = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
+    uint32_t maxInlineUniformBlockSize = {};
+    uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
+    uint32_t maxInlineUniformTotalSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
+    VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
+  {
+    using Type = PhysicalDeviceVulkan13Properties;
+  };
+
+  struct PhysicalDeviceVulkanMemoryModelFeatures
+  {
+    using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModel = vulkanMemoryModel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+          && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
+  {
+    using Type = PhysicalDeviceVulkanMemoryModelFeatures;
+  };
+  using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
+
+  struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
+  {
+    using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ), workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ), workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ), workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, workgroupMemoryExplicitLayout, workgroupMemoryExplicitLayoutScalarBlockLayout, workgroupMemoryExplicitLayout8BitAccess, workgroupMemoryExplicitLayout16BitAccess );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout )
+          && ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout )
+          && ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess )
+          && ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
+  {
+    using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+  };
+
+  struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), ycbcr2plane444Formats( ycbcr2plane444Formats_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcr2plane444Formats = ycbcr2plane444Formats_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, ycbcr2plane444Formats );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
+  {
+    using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+  };
+
+  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
+  {
+    using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), ycbcrImageArrays( ycbcrImageArrays_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrImageArrays = ycbcrImageArrays_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, ycbcrImageArrays );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
+  {
+    using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
+  };
+
+  struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
+  {
+    using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
+    {}
+
+
+    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default;
+#else
+    bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
+#endif
+    }
+
+    bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
+  {
+    using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+  };
+  using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+
+  struct PipelineCacheCreateInfo
+  {
+    using NativeType = VkPipelineCacheCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialData_.size() * sizeof(T);
+      pInitialData = initialData_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
+    }
+
+    operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+#endif
+    }
+
+    bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
+    size_t initialDataSize = {};
+    const void * pInitialData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
+  {
+    using Type = PipelineCacheCreateInfo;
+  };
+
+  struct PipelineCacheHeaderVersionOne
+  {
+    using NativeType = VkPipelineCacheHeaderVersionOne;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}) VULKAN_HPP_NOEXCEPT
+    : headerSize( headerSize_ ), headerVersion( headerVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), pipelineCacheUUID( pipelineCacheUUID_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
+    {}
+
+
+    PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      headerSize = headerSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      headerVersion = headerVersion_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vendorID = vendorID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceID = deviceID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t,VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCacheUUID = pipelineCacheUUID_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne*>( this );
+    }
+
+    operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCacheHeaderVersionOne*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default;
+#else
+    bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( headerSize == rhs.headerSize )
+          && ( headerVersion == rhs.headerVersion )
+          && ( vendorID == rhs.vendorID )
+          && ( deviceID == rhs.deviceID )
+          && ( pipelineCacheUUID == rhs.pipelineCacheUUID );
+#endif
+    }
+
+    bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t headerSize = {};
+    VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
+    uint32_t vendorID = {};
+    uint32_t deviceID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+
+  };
+
+  struct PipelineColorBlendAdvancedStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcPremultiplied = srcPremultiplied_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstPremultiplied = dstPremultiplied_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendOverlap = blendOverlap_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcPremultiplied == rhs.srcPremultiplied )
+          && ( dstPremultiplied == rhs.dstPremultiplied )
+          && ( blendOverlap == rhs.blendOverlap );
+#endif
+    }
+
+    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
+  {
+    using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
+  };
+
+  struct PipelineColorWriteCreateInfoEXT
+  {
+    using NativeType = VkPipelineColorWriteCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachmentCount( attachmentCount_ ), pColorWriteEnables( pColorWriteEnables_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorWriteEnables = pColorWriteEnables_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineColorWriteCreateInfoEXT & setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( colorWriteEnables_.size() );
+      pColorWriteEnables = colorWriteEnables_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Bool32 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachmentCount, pColorWriteEnables );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pColorWriteEnables == rhs.pColorWriteEnables );
+#endif
+    }
+
+    bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
+  {
+    using Type = PipelineColorWriteCreateInfoEXT;
+  };
+
+  struct PipelineCompilerControlCreateInfoAMD
+  {
+    using NativeType = VkPipelineCompilerControlCreateInfoAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), compilerControlFlags( compilerControlFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
+    {}
+
+
+    PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compilerControlFlags = compilerControlFlags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+    operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, compilerControlFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default;
+#else
+    bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compilerControlFlags == rhs.compilerControlFlags );
+#endif
+    }
+
+    bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
+  {
+    using Type = PipelineCompilerControlCreateInfoAMD;
+  };
+
+  struct PipelineCoverageModulationStateCreateInfoNV
+  {
+    using NativeType = VkPipelineCoverageModulationStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float * pCoverageModulationTable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationMode = coverageModulationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationTableEnable = coverageModulationTableEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationTableCount = coverageModulationTableCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCoverageModulationTable = pCoverageModulationTable_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
+      pCoverageModulationTable = coverageModulationTable_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const float * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageModulationMode == rhs.coverageModulationMode )
+          && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
+          && ( coverageModulationTableCount == rhs.coverageModulationTableCount )
+          && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
+#endif
+    }
+
+    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
+    VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
+    uint32_t coverageModulationTableCount = {};
+    const float * pCoverageModulationTable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
+  {
+    using Type = PipelineCoverageModulationStateCreateInfoNV;
+  };
+
+  struct PipelineCoverageReductionStateCreateInfoNV
+  {
+    using NativeType = VkPipelineCoverageReductionStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), coverageReductionMode( coverageReductionMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageReductionMode = coverageReductionMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, coverageReductionMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageReductionMode == rhs.coverageReductionMode );
+#endif
+    }
+
+    bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
+  {
+    using Type = PipelineCoverageReductionStateCreateInfoNV;
+  };
+
+  struct PipelineCoverageToColorStateCreateInfoNV
+  {
+    using NativeType = VkPipelineCoverageToColorStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorEnable = coverageToColorEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorLocation = coverageToColorLocation_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageToColorEnable == rhs.coverageToColorEnable )
+          && ( coverageToColorLocation == rhs.coverageToColorLocation );
+#endif
+    }
+
+    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
+    uint32_t coverageToColorLocation = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
+  {
+    using Type = PipelineCoverageToColorStateCreateInfoNV;
+  };
+
+  struct PipelineCreationFeedback
+  {
+    using NativeType = VkPipelineCreationFeedback;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCreationFeedback(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), duration( duration_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
+    {}
+
+
+    PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedback*>( this );
+    }
+
+    operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedback*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( flags, duration );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCreationFeedback const & ) const = default;
+#else
+    bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( flags == rhs.flags )
+          && ( duration == rhs.duration );
+#endif
+    }
+
+    bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {};
+    uint64_t duration = {};
+
+  };
+  using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
+
+  struct PipelineCreationFeedbackCreateInfo
+  {
+    using NativeType = VkPipelineCreationFeedbackCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineCreationFeedback = pPipelineCreationFeedback_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
+      pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo*>( this );
+    }
+
+    operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
+          && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
+          && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
+#endif
+    }
+
+    bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {};
+    uint32_t pipelineStageCreationFeedbackCount = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
+  {
+    using Type = PipelineCreationFeedbackCreateInfo;
+  };
+  using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
+
+  struct PipelineDiscardRectangleStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleMode = discardRectangleMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleCount = discardRectangleCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDiscardRectangles = pDiscardRectangles_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
+      pDiscardRectangles = discardRectangles_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( discardRectangleMode == rhs.discardRectangleMode )
+          && ( discardRectangleCount == rhs.discardRectangleCount )
+          && ( pDiscardRectangles == rhs.pDiscardRectangles );
+#endif
+    }
+
+    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
+    uint32_t discardRectangleCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
+  {
+    using Type = PipelineDiscardRectangleStateCreateInfoEXT;
+  };
+
+  struct PipelineExecutableInfoKHR
+  {
+    using NativeType = VkPipelineExecutableInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipeline( pipeline_ ), executableIndex( executableIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
+    {}
+
+
+    PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      executableIndex = executableIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipeline, executableIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineExecutableInfoKHR const & ) const = default;
+#else
+    bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline )
+          && ( executableIndex == rhs.executableIndex );
+#endif
+    }
+
+    bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    uint32_t executableIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
+  {
+    using Type = PipelineExecutableInfoKHR;
+  };
+
+  struct PipelineExecutableInternalRepresentationKHR
+  {
+    using NativeType = VkPipelineExecutableInternalRepresentationKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void * pData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &, size_t const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, name, description, isText, dataSize, pData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default;
+#else
+    bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( name == rhs.name )
+          && ( description == rhs.description )
+          && ( isText == rhs.isText )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+#endif
+    }
+
+    bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::Bool32 isText = {};
+    size_t dataSize = {};
+    void * pData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
+  {
+    using Type = PipelineExecutableInternalRepresentationKHR;
+  };
+
+  struct PipelineExecutablePropertiesKHR
+  {
+    using NativeType = VkPipelineExecutablePropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint32_t subgroupSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+    operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stages, name, description, subgroupSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default;
+#else
+    bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stages == rhs.stages )
+          && ( name == rhs.name )
+          && ( description == rhs.description )
+          && ( subgroupSize == rhs.subgroupSize );
+#endif
+    }
+
+    bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    uint32_t subgroupSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
+  {
+    using Type = PipelineExecutablePropertiesKHR;
+  };
+
+  union PipelineExecutableStatisticValueKHR
+  {
+    using NativeType = VkPipelineExecutableStatisticValueKHR;
+#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
+      : b32( b32_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ )
+      : i64( i64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ )
+      : u64( u64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ )
+      : f64( f64_ )
+    {}
+#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b32 = b32_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      i64 = i64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      u64 = u64_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      f64 = f64_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+    operator VkPipelineExecutableStatisticValueKHR const &() const
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>( this );
+    }
+
+    operator VkPipelineExecutableStatisticValueKHR &()
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>( this );
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::Bool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#else
+    VkBool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+
+  };
+
+  struct PipelineExecutableStatisticKHR
+  {
+    using NativeType = VkPipelineExecutableStatisticKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), name( name_ ), description( description_ ), format( format_ ), value( value_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
+    {}
+
+
+    PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+    operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, name, description, format, value );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
+  {
+    using Type = PipelineExecutableStatisticKHR;
+  };
+
+  struct PipelineFragmentShadingRateEnumStateCreateInfoNV
+  {
+    using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shadingRateType( shadingRateType_ ), shadingRate( shadingRate_ ), combinerOps( combinerOps_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateType = shadingRateType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRate = shadingRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
+    {
+      combinerOps = combinerOps_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateType == rhs.shadingRateType )
+          && ( shadingRate == rhs.shadingRate )
+          && ( combinerOps == rhs.combinerOps );
+#endif
+    }
+
+    bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
+    VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
+  {
+    using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
+  };
+
+  struct PipelineFragmentShadingRateStateCreateInfoKHR
+  {
+    using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentSize( fragmentSize_ ), combinerOps( combinerOps_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentSize = fragmentSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
+    {
+      combinerOps = combinerOps_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
+    }
+
+    operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentSize, combinerOps );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentSize == rhs.fragmentSize )
+          && ( combinerOps == rhs.combinerOps );
+#endif
+    }
+
+    bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
+  {
+    using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
+  };
+
+  struct PipelineInfoKHR
+  {
+    using NativeType = VkPipelineInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipeline( pipeline_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
+    {}
+
+
+    PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
+    }
+
+    operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipeline );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineInfoKHR const & ) const = default;
+#else
+    bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline );
+#endif
+    }
+
+    bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineInfoKHR>
+  {
+    using Type = PipelineInfoKHR;
+  };
+  using PipelineInfoEXT = PipelineInfoKHR;
+
+  struct PushConstantRange
+  {
+    using NativeType = VkPushConstantRange;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
+    : stageFlags( stageFlags_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
+    {}
+
+
+    PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPushConstantRange*>( this );
+    }
+
+    operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPushConstantRange*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( stageFlags, offset, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PushConstantRange const & ) const = default;
+#else
+    bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( stageFlags == rhs.stageFlags )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+    uint32_t offset = {};
+    uint32_t size = {};
+
+  };
+
+  struct PipelineLayoutCreateInfo
+  {
+    using NativeType = VkPipelineLayoutCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      setLayoutCount = setLayoutCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
+      pSetLayouts = setLayouts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushConstantRangeCount = pushConstantRangeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPushConstantRanges = pPushConstantRanges_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
+      pPushConstantRanges = pushConstantRanges_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
+    }
+
+    operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PushConstantRange * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( setLayoutCount == rhs.setLayoutCount )
+          && ( pSetLayouts == rhs.pSetLayouts )
+          && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+          && ( pPushConstantRanges == rhs.pPushConstantRanges );
+#endif
+    }
+
+    bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
+    uint32_t setLayoutCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
+    uint32_t pushConstantRangeCount = {};
+    const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
+  {
+    using Type = PipelineLayoutCreateInfo;
+  };
+
+  struct PipelineLibraryCreateInfoKHR
+  {
+    using NativeType = VkPipelineLibraryCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), libraryCount( libraryCount_ ), pLibraries( pLibraries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      libraryCount = libraryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLibraries = pLibraries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      libraryCount = static_cast<uint32_t>( libraries_.size() );
+      pLibraries = libraries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
+    }
+
+    operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, libraryCount, pLibraries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( libraryCount == rhs.libraryCount )
+          && ( pLibraries == rhs.pLibraries );
+#endif
+    }
+
+    bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t libraryCount = {};
+    const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
+  {
+    using Type = PipelineLibraryCreateInfoKHR;
+  };
+
+  struct PipelinePropertiesIdentifierEXT
+  {
+    using NativeType = VkPipelinePropertiesIdentifierEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT(std::array<uint8_t,VK_UUID_SIZE> const & pipelineIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pipelineIdentifier( pipelineIdentifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelinePropertiesIdentifierEXT( *reinterpret_cast<PipelinePropertiesIdentifierEXT const *>( &rhs ) )
+    {}
+
+
+    PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPipelineIdentifier( std::array<uint8_t,VK_UUID_SIZE> pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineIdentifier = pipelineIdentifier_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelinePropertiesIdentifierEXT*>( this );
+    }
+
+    operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelinePropertiesIdentifierEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pipelineIdentifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default;
+#else
+    bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineIdentifier == rhs.pipelineIdentifier );
+#endif
+    }
+
+    bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelinePropertiesIdentifierEXT>
+  {
+    using Type = PipelinePropertiesIdentifierEXT;
+  };
+
+  struct PipelineRasterizationConservativeStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conservativeRasterizationMode = conservativeRasterizationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
+          && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
+    float extraPrimitiveOverestimationSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
+  };
+
+  struct PipelineRasterizationDepthClipStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), depthClipEnable( depthClipEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipEnable = depthClipEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, depthClipEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClipEnable == rhs.depthClipEnable );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
+  };
+
+  struct PipelineRasterizationLineStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineRasterizationMode = lineRasterizationMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledLineEnable = stippledLineEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineStippleFactor = lineStippleFactor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineStipplePattern = lineStipplePattern_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint16_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( lineRasterizationMode == rhs.lineRasterizationMode )
+          && ( stippledLineEnable == rhs.stippledLineEnable )
+          && ( lineStippleFactor == rhs.lineStippleFactor )
+          && ( lineStipplePattern == rhs.lineStipplePattern );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
+    uint32_t lineStippleFactor = {};
+    uint16_t lineStipplePattern = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationLineStateCreateInfoEXT;
+  };
+
+  struct PipelineRasterizationProvokingVertexStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), provokingVertexMode( provokingVertexMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      provokingVertexMode = provokingVertexMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, provokingVertexMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( provokingVertexMode == rhs.provokingVertexMode );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
+  };
+
+  struct PipelineRasterizationStateRasterizationOrderAMD
+  {
+    using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), rasterizationOrder( rasterizationOrder_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationOrder = rasterizationOrder_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
+    }
+
+    operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RasterizationOrderAMD const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, rasterizationOrder );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrder == rhs.rasterizationOrder );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
+  {
+    using Type = PipelineRasterizationStateRasterizationOrderAMD;
+  };
+
+  struct PipelineRasterizationStateStreamCreateInfoEXT
+  {
+    using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), rasterizationStream( rasterizationStream_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationStream = rasterizationStream_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, rasterizationStream );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationStream == rhs.rasterizationStream );
+#endif
+    }
+
+    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
+    uint32_t rasterizationStream = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationStateStreamCreateInfoEXT;
+  };
+
+  struct PipelineRenderingCreateInfo
+  {
+    using NativeType = VkPipelineRenderingCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentFormats( pColorAttachmentFormats_ ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineRenderingCreateInfo( uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachmentFormats = pColorAttachmentFormats_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineRenderingCreateInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
+      pColorAttachmentFormats = colorAttachmentFormats_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthAttachmentFormat = depthAttachmentFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilAttachmentFormat = stencilAttachmentFormat_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRenderingCreateInfo*>( this );
+    }
+
+    operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRenderingCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRenderingCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewMask == rhs.viewMask )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
+          && ( depthAttachmentFormat == rhs.depthAttachmentFormat )
+          && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
+#endif
+    }
+
+    bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo;
+    const void * pNext = {};
+    uint32_t viewMask = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
+    VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
+  {
+    using Type = PipelineRenderingCreateInfo;
+  };
+  using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
+
+  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
+  {
+    using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), representativeFragmentTestEnable( representativeFragmentTestEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTestEnable = representativeFragmentTestEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, representativeFragmentTestEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
+#endif
+    }
+
+    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
+  {
+    using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
+  };
+
+  struct PipelineRobustnessCreateInfoEXT
+  {
+    using NativeType = VkPipelineRobustnessCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), storageBuffers( storageBuffers_ ), uniformBuffers( uniformBuffers_ ), vertexInputs( vertexInputs_ ), images( images_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRobustnessCreateInfoEXT( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineRobustnessCreateInfoEXT( *reinterpret_cast<PipelineRobustnessCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineRobustnessCreateInfoEXT & operator=( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineRobustnessCreateInfoEXT & operator=( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffers = storageBuffers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBuffers = uniformBuffers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexInputs = vertexInputs_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ ) VULKAN_HPP_NOEXCEPT
+    {
+      images = images_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineRobustnessCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRobustnessCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRobustnessCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRobustnessCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineRobustnessCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffers == rhs.storageBuffers )
+          && ( uniformBuffers == rhs.uniformBuffers )
+          && ( vertexInputs == rhs.vertexInputs )
+          && ( images == rhs.images );
+#endif
+    }
+
+    bool operator!=( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+    VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRobustnessCreateInfoEXT>
+  {
+    using Type = PipelineRobustnessCreateInfoEXT;
+  };
+
+  struct PipelineSampleLocationsStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsEnable = sampleLocationsEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+#endif
+    }
+
+    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
+  {
+    using Type = PipelineSampleLocationsStateCreateInfoEXT;
+  };
+
+  struct PipelineShaderStageModuleIdentifierCreateInfoEXT
+  {
+    using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT(uint32_t identifierSize_ = {}, const uint8_t * pIdentifier_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), identifierSize( identifierSize_ ), pIdentifier( pIdentifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast<PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), identifierSize( static_cast<uint32_t>( identifier_.size() ) ), pIdentifier( identifier_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      identifierSize = identifierSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIdentifier = pIdentifier_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      identifierSize = static_cast<uint32_t>( identifier_.size() );
+      pIdentifier = identifier_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint8_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, identifierSize, pIdentifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( identifierSize == rhs.identifierSize )
+          && ( pIdentifier == rhs.pIdentifier );
+#endif
+    }
+
+    bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t identifierSize = {};
+    const uint8_t * pIdentifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT>
+  {
+    using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
+  };
+
+  struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
+  {
+    using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), requiredSubgroupSize( requiredSubgroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
+    }
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, requiredSubgroupSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
+#endif
+    }
+
+    bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
+    void * pNext = {};
+    uint32_t requiredSubgroupSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
+  {
+    using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+  };
+  using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+
+  struct PipelineTessellationDomainOriginStateCreateInfo
+  {
+    using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), domainOrigin( domainOrigin_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
+    {
+      domainOrigin = domainOrigin_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, domainOrigin );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
+#else
+    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( domainOrigin == rhs.domainOrigin );
+#endif
+    }
+
+    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
+  {
+    using Type = PipelineTessellationDomainOriginStateCreateInfo;
+  };
+  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+
+  struct VertexInputBindingDivisorDescriptionEXT
+  {
+    using NativeType = VkVertexInputBindingDivisorDescriptionEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), divisor( divisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VertexInputBindingDivisorDescriptionEXT( *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
+    {}
+
+
+    VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      divisor = divisor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+    operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( binding, divisor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default;
+#else
+    bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( binding == rhs.binding )
+          && ( divisor == rhs.divisor );
+#endif
+    }
+
+    bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t binding = {};
+    uint32_t divisor = {};
+
+  };
+
+  struct PipelineVertexInputDivisorStateCreateInfoEXT
+  {
+    using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineVertexInputDivisorStateCreateInfoEXT( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDivisorCount = vertexBindingDivisorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDivisors = pVertexBindingDivisors_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
+      pVertexBindingDivisors = vertexBindingDivisors_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
+          && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
+#endif
+    }
+
+    bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t vertexBindingDivisorCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
+  {
+    using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
+  };
+
+  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
+  {
+    using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleOrderType = sampleOrderType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customSampleOrderCount = customSampleOrderCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCustomSampleOrders = pCustomSampleOrders_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
+      pCustomSampleOrders = customSampleOrders_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleOrderType == rhs.sampleOrderType )
+          && ( customSampleOrderCount == rhs.customSampleOrderCount )
+          && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
+#endif
+    }
+
+    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
+    uint32_t customSampleOrderCount = {};
+    const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
+  {
+    using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+  };
+
+  struct PipelineViewportDepthClipControlCreateInfoEXT
+  {
+    using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), negativeOneToOne( negativeOneToOne_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      negativeOneToOne = negativeOneToOne_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, negativeOneToOne );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( negativeOneToOne == rhs.negativeOneToOne );
+#endif
+    }
+
+    bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportDepthClipControlCreateInfoEXT>
+  {
+    using Type = PipelineViewportDepthClipControlCreateInfoEXT;
+  };
+
+  struct PipelineViewportExclusiveScissorStateCreateInfoNV
+  {
+    using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissorCount = exclusiveScissorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pExclusiveScissors = pExclusiveScissors_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
+      pExclusiveScissors = exclusiveScissors_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissorCount == rhs.exclusiveScissorCount )
+          && ( pExclusiveScissors == rhs.pExclusiveScissors );
+#endif
+    }
+
+    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+    const void * pNext = {};
+    uint32_t exclusiveScissorCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
+  {
+    using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
+  };
+
+  struct ShadingRatePaletteNV
+  {
+    using NativeType = VkShadingRatePaletteNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+    : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
+      pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
+    }
+
+    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShadingRatePaletteNV const & ) const = default;
+#else
+    bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+#endif
+    }
+
+    bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t shadingRatePaletteEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {};
+
+  };
+
+  struct PipelineViewportShadingRateImageStateCreateInfoNV
+  {
+    using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImageEnable = shadingRateImageEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePalettes = pShadingRatePalettes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
+      pShadingRatePalettes = shadingRatePalettes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImageEnable == rhs.shadingRateImageEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
+#endif
+    }
+
+    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
+  {
+    using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
+  };
+
+  struct ViewportSwizzleNV
+  {
+    using NativeType = VkViewportSwizzleNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ ), w( w_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
+    {}
+
+
+    ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
+    {
+      w = w_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
+    }
+
+    operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportSwizzleNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( x, y, z, w );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ViewportSwizzleNV const & ) const = default;
+#else
+    bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z )
+          && ( w == rhs.w );
+#endif
+    }
+
+    bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+
+  };
+
+  struct PipelineViewportSwizzleStateCreateInfoNV
+  {
+    using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportSwizzles = pViewportSwizzles_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
+      pViewportSwizzles = viewportSwizzles_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportSwizzles == rhs.pViewportSwizzles );
+#endif
+    }
+
+    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
+  {
+    using Type = PipelineViewportSwizzleStateCreateInfoNV;
+  };
+
+  struct ViewportWScalingNV
+  {
+    using NativeType = VkViewportWScalingNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
+    : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
+    {}
+
+
+    ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xcoeff = xcoeff_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycoeff = ycoeff_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportWScalingNV*>( this );
+    }
+
+    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportWScalingNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<float const &, float const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( xcoeff, ycoeff );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ViewportWScalingNV const & ) const = default;
+#else
+    bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( xcoeff == rhs.xcoeff )
+          && ( ycoeff == rhs.ycoeff );
+#endif
+    }
+
+    bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    float xcoeff = {};
+    float ycoeff = {};
+
+  };
+
+  struct PipelineViewportWScalingStateCreateInfoNV
+  {
+    using NativeType = VkPipelineViewportWScalingStateCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportWScalingEnable = viewportWScalingEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportWScalings = pViewportWScalings_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
+      pViewportWScalings = viewportWScalings_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default;
+#else
+    bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportWScalings == rhs.pViewportWScalings );
+#endif
+    }
+
+    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
+  {
+    using Type = PipelineViewportWScalingStateCreateInfoNV;
+  };
+
+#if defined( VK_USE_PLATFORM_GGP )
+  struct PresentFrameTokenGGP
+  {
+    using NativeType = VkPresentFrameTokenGGP;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), frameToken( frameToken_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
+    {}
+
+
+    PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameToken = frameToken_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
+    }
+
+    operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, GgpFrameToken const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, frameToken );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
+    }
+
+    bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
+    const void * pNext = {};
+    GgpFrameToken frameToken = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
+  {
+    using Type = PresentFrameTokenGGP;
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct PresentIdKHR
+  {
+    using NativeType = VkPresentIdKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentIdKHR(uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pPresentIds( pPresentIds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentIdKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPresentIds = pPresentIds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( presentIds_.size() );
+      pPresentIds = presentIds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentIdKHR*>( this );
+    }
+
+    operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentIdKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pPresentIds );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentIdKHR const & ) const = default;
+#else
+    bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pPresentIds == rhs.pPresentIds );
+#endif
+    }
+
+    bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR;
+    const void * pNext = {};
+    uint32_t swapchainCount = {};
+    const uint64_t * pPresentIds = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentIdKHR>
+  {
+    using Type = PresentIdKHR;
+  };
+
+  struct PresentInfoKHR
+  {
+    using NativeType = VkPresentInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, const uint32_t * pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
+      VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
+      VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
+#else
+      if ( swapchains_.size() != imageIndices_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
+      }
+      if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
+      }
+      if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSwapchains = pSwapchains_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( swapchains_.size() );
+      pSwapchains = swapchains_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
+      pImageIndices = imageIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResults = pResults_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( results_.size() );
+      pResults = results_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentInfoKHR*>( this );
+    }
+
+    operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SwapchainKHR * const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::Result * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentInfoKHR const & ) const = default;
+#else
+    bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pSwapchains == rhs.pSwapchains )
+          && ( pImageIndices == rhs.pImageIndices )
+          && ( pResults == rhs.pResults );
+#endif
+    }
+
+    bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
+    const void * pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {};
+    const uint32_t * pImageIndices = {};
+    VULKAN_HPP_NAMESPACE::Result * pResults = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentInfoKHR>
+  {
+    using Type = PresentInfoKHR;
+  };
+
+  struct RectLayerKHR
+  {
+    using NativeType = VkRectLayerKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), extent( extent_ ), layer( layer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
+    {}
+
+
+    explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} )
+      : offset( rect2D.offset )
+      , extent( rect2D.extent )
+    , layer( layer_ )
+    {}
+
+    RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layer = layer_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRectLayerKHR*>( this );
+    }
+
+    operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRectLayerKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( offset, extent, layer );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RectLayerKHR const & ) const = default;
+#else
+    bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( layer == rhs.layer );
+#endif
+    }
+
+    bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+    uint32_t layer = {};
+
+  };
+
+  struct PresentRegionKHR
+  {
+    using NativeType = VkPresentRegionKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+    : rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
+    : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangleCount = rectangleCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRectangles = pRectangles_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangleCount = static_cast<uint32_t>( rectangles_.size() );
+      pRectangles = rectangles_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionKHR*>( this );
+    }
+
+    operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::RectLayerKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( rectangleCount, pRectangles );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentRegionKHR const & ) const = default;
+#else
+    bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( rectangleCount == rhs.rectangleCount )
+          && ( pRectangles == rhs.pRectangles );
+#endif
+    }
+
+    bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t rectangleCount = {};
+    const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {};
+
+  };
+
+  struct PresentRegionsKHR
+  {
+    using NativeType = VkPresentRegionsKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
+    }
+
+    operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionsKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentRegionsKHR const & ) const = default;
+#else
+    bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
+    const void * pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentRegionsKHR>
+  {
+    using Type = PresentRegionsKHR;
+  };
+
+  struct PresentTimeGOOGLE
+  {
+    using NativeType = VkPresentTimeGOOGLE;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
+    : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
+    {}
+
+
+    PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentID = presentID_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
+    {
+      desiredPresentTime = desiredPresentTime_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
+    }
+
+    operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( presentID, desiredPresentTime );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentTimeGOOGLE const & ) const = default;
+#else
+    bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime );
+#endif
+    }
+
+    bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t presentID = {};
+    uint64_t desiredPresentTime = {};
+
+  };
+
+  struct PresentTimesInfoGOOGLE
+  {
+    using NativeType = VkPresentTimesInfoGOOGLE;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pTimes( pTimes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTimes = pTimes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( times_.size() );
+      pTimes = times_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+    operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pTimes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default;
+#else
+    bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pTimes == rhs.pTimes );
+#endif
+    }
+
+    bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
+    const void * pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
+  {
+    using Type = PresentTimesInfoGOOGLE;
+  };
+
+  struct PrivateDataSlotCreateInfo
+  {
+    using NativeType = VkPrivateDataSlotCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
+    {}
+
+
+    PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPrivateDataSlotCreateInfo*>( this );
+    }
+
+    operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPrivateDataSlotCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default;
+#else
+    bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
+  {
+    using Type = PrivateDataSlotCreateInfo;
+  };
+  using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
+
+  struct ProtectedSubmitInfo
+  {
+    using NativeType = VkProtectedSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), protectedSubmit( protectedSubmit_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
+    {}
+
+
+    ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedSubmit = protectedSubmit_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
+    }
+
+    operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, protectedSubmit );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ProtectedSubmitInfo const & ) const = default;
+#else
+    bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedSubmit == rhs.protectedSubmit );
+#endif
+    }
+
+    bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
+  {
+    using Type = ProtectedSubmitInfo;
+  };
+
+  struct QueryPoolCreateInfo
+  {
+    using NativeType = VkQueryPoolCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
+    {}
+
+
+    QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryType = queryType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryCount = queryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
+    }
+
+    operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags const &, VULKAN_HPP_NAMESPACE::QueryType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueryPoolCreateInfo const & ) const = default;
+#else
+    bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queryType == rhs.queryType )
+          && ( queryCount == rhs.queryCount )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+#endif
+    }
+
+    bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
+    uint32_t queryCount = {};
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
+  {
+    using Type = QueryPoolCreateInfo;
+  };
+
+  struct QueryPoolPerformanceCreateInfoKHR
+  {
+    using NativeType = VkQueryPoolPerformanceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t * pCounterIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterIndexCount = counterIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCounterIndices = pCounterIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
+      pCounterIndices = counterIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+    operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( counterIndexCount == rhs.counterIndexCount )
+          && ( pCounterIndices == rhs.pCounterIndices );
+#endif
+    }
+
+    bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t counterIndexCount = {};
+    const uint32_t * pCounterIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
+  {
+    using Type = QueryPoolPerformanceCreateInfoKHR;
+  };
+
+  struct QueryPoolPerformanceQueryCreateInfoINTEL
+  {
+    using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), performanceCountersSampling( performanceCountersSampling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
+    {}
+
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCountersSampling = performanceCountersSampling_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
+    }
+
+    operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, performanceCountersSampling );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default;
+#else
+    bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCountersSampling == rhs.performanceCountersSampling );
+#endif
+    }
+
+    bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
+  {
+    using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
+  };
+  using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
+
+  struct QueueFamilyCheckpointProperties2NV
+  {
+    using NativeType = VkQueueFamilyCheckpointProperties2NV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
+    }
+
+    operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, checkpointExecutionStageMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
+#else
+    bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+#endif
+    }
+
+    bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
+  {
+    using Type = QueueFamilyCheckpointProperties2NV;
+  };
+
+  struct QueueFamilyCheckpointPropertiesNV
+  {
+    using NativeType = VkQueueFamilyCheckpointPropertiesNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+    operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, checkpointExecutionStageMask );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default;
+#else
+    bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+#endif
+    }
+
+    bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
+  {
+    using Type = QueueFamilyCheckpointPropertiesNV;
+  };
+
+  struct QueueFamilyGlobalPriorityPropertiesKHR
+  {
+    using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR(uint32_t priorityCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR,VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const & priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), priorityCount( priorityCount_ ), priorities( priorities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      priorityCount = priorityCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorities( std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR,VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      priorities = priorities_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyGlobalPriorityPropertiesKHR*>( this );
+    }
+
+    operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyGlobalPriorityPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, priorityCount, priorities );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default;
+#else
+    bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( priorityCount == rhs.priorityCount )
+          && ( priorities == rhs.priorities );
+#endif
+    }
+
+    bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
+    void * pNext = {};
+    uint32_t priorityCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityPropertiesKHR>
+  {
+    using Type = QueueFamilyGlobalPriorityPropertiesKHR;
+  };
+  using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR;
+
+  struct QueueFamilyProperties
+  {
+    using NativeType = VkQueueFamilyProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+    : queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
+    }
+
+    operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyProperties const & ) const = default;
+#else
+    bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( queueFlags == rhs.queueFlags )
+          && ( queueCount == rhs.queueCount )
+          && ( timestampValidBits == rhs.timestampValidBits )
+          && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+#endif
+    }
+
+    bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
+    uint32_t queueCount = {};
+    uint32_t timestampValidBits = {};
+    VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
+
+  };
+
+  struct QueueFamilyProperties2
+  {
+    using NativeType = VkQueueFamilyProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), queueFamilyProperties( queueFamilyProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
+    }
+
+    operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, queueFamilyProperties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyProperties2 const & ) const = default;
+#else
+    bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyProperties == rhs.queueFamilyProperties );
+#endif
+    }
+
+    bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
+  {
+    using Type = QueueFamilyProperties2;
+  };
+  using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+
+  struct QueueFamilyQueryResultStatusPropertiesKHR
+  {
+    using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), queryResultStatusSupport( queryResultStatusSupport_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast<QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
+    }
+
+    operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, queryResultStatusSupport );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default;
+#else
+    bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queryResultStatusSupport == rhs.queryResultStatusSupport );
+#endif
+    }
+
+    bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusPropertiesKHR>
+  {
+    using Type = QueueFamilyQueryResultStatusPropertiesKHR;
+  };
+
+  struct QueueFamilyVideoPropertiesKHR
+  {
+    using NativeType = VkQueueFamilyVideoPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), videoCodecOperations( videoCodecOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : QueueFamilyVideoPropertiesKHR( *reinterpret_cast<QueueFamilyVideoPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR*>( this );
+    }
+
+    operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyVideoPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, videoCodecOperations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default;
+#else
+    bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( videoCodecOperations == rhs.videoCodecOperations );
+#endif
+    }
+
+    bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyVideoPropertiesKHR>
+  {
+    using Type = QueueFamilyVideoPropertiesKHR;
+  };
+
+  struct RayTracingShaderGroupCreateInfoKHR
+  {
+    using NativeType = VkRayTracingShaderGroupCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void * pShaderGroupCaptureReplayHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      generalShader = generalShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      closestHitShader = closestHitShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anyHitShader = anyHitShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      intersectionShader = intersectionShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( generalShader == rhs.generalShader )
+          && ( closestHitShader == rhs.closestHitShader )
+          && ( anyHitShader == rhs.anyHitShader )
+          && ( intersectionShader == rhs.intersectionShader )
+          && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
+#endif
+    }
+
+    bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+    uint32_t generalShader = {};
+    uint32_t closestHitShader = {};
+    uint32_t anyHitShader = {};
+    uint32_t intersectionShader = {};
+    const void * pShaderGroupCaptureReplayHandle = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
+  {
+    using Type = RayTracingShaderGroupCreateInfoKHR;
+  };
+
+  struct RayTracingPipelineInterfaceCreateInfoKHR
+  {
+    using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ), maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize )
+          && ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
+#endif
+    }
+
+    bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t maxPipelineRayPayloadSize = {};
+    uint32_t maxPipelineRayHitAttributeSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
+  {
+    using Type = RayTracingPipelineInterfaceCreateInfoKHR;
+  };
+
+  struct RayTracingPipelineCreateInfoKHR
+  {
+    using NativeType = VkRayTracingPipelineCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLibraryInfo = pLibraryInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLibraryInterface = pLibraryInterface_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicState = pDynamicState_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxPipelineRayRecursionDepth, pLibraryInfo, pLibraryInterface, pDynamicState, layout, basePipelineHandle, basePipelineIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth )
+          && ( pLibraryInfo == rhs.pLibraryInfo )
+          && ( pLibraryInterface == rhs.pLibraryInterface )
+          && ( pDynamicState == rhs.pDynamicState )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+#endif
+    }
+
+    bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {};
+    uint32_t maxPipelineRayRecursionDepth = {};
+    const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {};
+    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
+  {
+    using Type = RayTracingPipelineCreateInfoKHR;
+  };
+
+  struct RayTracingShaderGroupCreateInfoNV
+  {
+    using NativeType = VkRayTracingShaderGroupCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      generalShader = generalShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      closestHitShader = closestHitShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anyHitShader = anyHitShader_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      intersectionShader = intersectionShader_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default;
+#else
+    bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( generalShader == rhs.generalShader )
+          && ( closestHitShader == rhs.closestHitShader )
+          && ( anyHitShader == rhs.anyHitShader )
+          && ( intersectionShader == rhs.intersectionShader );
+#endif
+    }
+
+    bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+    uint32_t generalShader = {};
+    uint32_t closestHitShader = {};
+    uint32_t anyHitShader = {};
+    uint32_t intersectionShader = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
+  {
+    using Type = RayTracingShaderGroupCreateInfoNV;
+  };
+
+  struct RayTracingPipelineCreateInfoNV
+  {
+    using NativeType = VkRayTracingPipelineCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxRecursionDepth = maxRecursionDepth_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default;
+#else
+    bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+#endif
+    }
+
+    bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {};
+    uint32_t maxRecursionDepth = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
+  {
+    using Type = RayTracingPipelineCreateInfoNV;
+  };
+
+  struct RefreshCycleDurationGOOGLE
+  {
+    using NativeType = VkRefreshCycleDurationGOOGLE;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
+    : refreshDuration( refreshDuration_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
+    {}
+
+
+    RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+    operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( refreshDuration );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default;
+#else
+    bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( refreshDuration == rhs.refreshDuration );
+#endif
+    }
+
+    bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint64_t refreshDuration = {};
+
+  };
+
+  struct ReleaseSwapchainImagesInfoEXT
+  {
+    using NativeType = VkReleaseSwapchainImagesInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseSwapchainImagesInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndexCount_ = {}, const uint32_t * pImageIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( imageIndexCount_ ), pImageIndices( pImageIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ReleaseSwapchainImagesInfoEXT( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ReleaseSwapchainImagesInfoEXT( *reinterpret_cast<ReleaseSwapchainImagesInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast<uint32_t>( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ReleaseSwapchainImagesInfoEXT & operator=( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ReleaseSwapchainImagesInfoEXT & operator=( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageIndexCount = imageIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ReleaseSwapchainImagesInfoEXT & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageIndexCount = static_cast<uint32_t>( imageIndices_.size() );
+      pImageIndices = imageIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkReleaseSwapchainImagesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT*>( this );
+    }
+
+    operator VkReleaseSwapchainImagesInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkReleaseSwapchainImagesInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ReleaseSwapchainImagesInfoEXT const & ) const = default;
+#else
+    bool operator==( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( imageIndexCount == rhs.imageIndexCount )
+          && ( pImageIndices == rhs.pImageIndices );
+#endif
+    }
+
+    bool operator!=( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseSwapchainImagesInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+    uint32_t imageIndexCount = {};
+    const uint32_t * pImageIndices = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eReleaseSwapchainImagesInfoEXT>
+  {
+    using Type = ReleaseSwapchainImagesInfoEXT;
+  };
+
+  struct RenderPassAttachmentBeginInfo
+  {
+    using NativeType = VkRenderPassAttachmentBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
+    }
+
+    operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachmentCount, pAttachments );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
+#else
+    bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments );
+#endif
+    }
+
+    bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
+    const void * pNext = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
+  {
+    using Type = RenderPassAttachmentBeginInfo;
+  };
+  using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
+
+  struct RenderPassBeginInfo
+  {
+    using NativeType = VkRenderPassBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValueCount = clearValueCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pClearValues = pClearValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
+      pClearValues = clearValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
+    }
+
+    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ClearValue * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassBeginInfo const & ) const = default;
+#else
+    bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( renderArea == rhs.renderArea )
+          && ( clearValueCount == rhs.clearValueCount )
+          && ( pClearValues == rhs.pClearValues );
+#endif
+    }
+
+    bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+    uint32_t clearValueCount = {};
+    const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+  {
+    using Type = RenderPassBeginInfo;
+  };
+
+  struct SubpassDescription
+  {
+    using NativeType = VkSubpassDescription;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+      pInputAttachments = inputAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+      pColorAttachments = colorAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+      pResolveAttachments = resolveAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+      pPreserveAttachments = preserveAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription*>( this );
+    }
+
+    operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( flags, pipelineBindPoint, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassDescription const & ) const = default;
+#else
+    bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+#endif
+    }
+
+    bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t inputAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {};
+    uint32_t preserveAttachmentCount = {};
+    const uint32_t * pPreserveAttachments = {};
+
+  };
+
+  struct SubpassDependency
+  {
+    using NativeType = VkSubpassDependency;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
+    {}
+
+
+    SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency*>( this );
+    }
+
+    operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassDependency const & ) const = default;
+#else
+    bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags );
+#endif
+    }
+
+    bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t srcSubpass = {};
+    uint32_t dstSubpass = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+
+  };
+
+  struct RenderPassCreateInfo
+  {
+    using NativeType = VkRenderPassCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( subpasses_.size() );
+      pSubpasses = subpasses_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+      pDependencies = dependencies_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
+    }
+
+    operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassCreateInfo const & ) const = default;
+#else
+    bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies );
+#endif
+    }
+
+    bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {};
+    uint32_t subpassCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {};
+    uint32_t dependencyCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
+  {
+    using Type = RenderPassCreateInfo;
+  };
+
+  struct SubpassDescription2
+  {
+    using NativeType = VkSubpassDescription2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+      pInputAttachments = inputAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+      pColorAttachments = colorAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+      pResolveAttachments = resolveAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+      pPreserveAttachments = preserveAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription2*>( this );
+    }
+
+    operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, pipelineBindPoint, viewMask, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassDescription2 const & ) const = default;
+#else
+    bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( viewMask == rhs.viewMask )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+#endif
+    }
+
+    bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t viewMask = {};
+    uint32_t inputAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {};
+    uint32_t preserveAttachmentCount = {};
+    const uint32_t * pPreserveAttachments = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDescription2>
+  {
+    using Type = SubpassDescription2;
+  };
+  using SubpassDescription2KHR = SubpassDescription2;
+
+  struct SubpassDependency2
+  {
+    using NativeType = VkSubpassDependency2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
+    {}
+
+
+    SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewOffset = viewOffset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency2*>( this );
+    }
+
+    operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassDependency2 const & ) const = default;
+#else
+    bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags )
+          && ( viewOffset == rhs.viewOffset );
+#endif
+    }
+
+    bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
+    const void * pNext = {};
+    uint32_t srcSubpass = {};
+    uint32_t dstSubpass = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+    int32_t viewOffset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDependency2>
+  {
+    using Type = SubpassDependency2;
+  };
+  using SubpassDependency2KHR = SubpassDependency2;
+
+  struct RenderPassCreateInfo2
+  {
+    using NativeType = VkRenderPassCreateInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t * pCorrelatedViewMasks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( subpasses_.size() );
+      pSubpasses = subpasses_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+      pDependencies = dependencies_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlatedViewMaskCount = correlatedViewMaskCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelatedViewMasks = pCorrelatedViewMasks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
+      pCorrelatedViewMasks = correlatedViewMasks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
+    }
+
+    operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies, correlatedViewMaskCount, pCorrelatedViewMasks );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
+#else
+    bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies )
+          && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
+          && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
+#endif
+    }
+
+    bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
+    uint32_t subpassCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
+    uint32_t dependencyCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
+    uint32_t correlatedViewMaskCount = {};
+    const uint32_t * pCorrelatedViewMasks = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
+  {
+    using Type = RenderPassCreateInfo2;
+  };
+  using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+
+  struct RenderPassCreationControlEXT
+  {
+    using NativeType = VkRenderPassCreationControlEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT(VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), disallowMerging( disallowMerging_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassCreationControlEXT( *reinterpret_cast<RenderPassCreationControlEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disallowMerging = disallowMerging_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreationControlEXT*>( this );
+    }
+
+    operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreationControlEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, disallowMerging );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassCreationControlEXT const & ) const = default;
+#else
+    bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disallowMerging == rhs.disallowMerging );
+#endif
+    }
+
+    bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreationControlEXT>
+  {
+    using Type = RenderPassCreationControlEXT;
+  };
+
+  struct RenderPassCreationFeedbackInfoEXT
+  {
+    using NativeType = VkRenderPassCreationFeedbackInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT(uint32_t postMergeSubpassCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : postMergeSubpassCount( postMergeSubpassCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT*>( this );
+    }
+
+    operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( postMergeSubpassCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( postMergeSubpassCount == rhs.postMergeSubpassCount );
+#endif
+    }
+
+    bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t postMergeSubpassCount = {};
+
+  };
+
+  struct RenderPassCreationFeedbackCreateInfoEXT
+  {
+    using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pRenderPassFeedback( pRenderPassFeedback_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRenderPassFeedback = pRenderPassFeedback_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+    operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pRenderPassFeedback );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pRenderPassFeedback == rhs.pRenderPassFeedback );
+#endif
+    }
+
+    bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreationFeedbackCreateInfoEXT>
+  {
+    using Type = RenderPassCreationFeedbackCreateInfoEXT;
+  };
+
+  struct RenderPassFragmentDensityMapCreateInfoEXT
+  {
+    using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentReference const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityMapAttachment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
+#endif
+    }
+
+    bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
+  {
+    using Type = RenderPassFragmentDensityMapCreateInfoEXT;
+  };
+
+  struct RenderPassInputAttachmentAspectCreateInfo
+  {
+    using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectReferenceCount = aspectReferenceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAspectReferences = pAspectReferences_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
+      pAspectReferences = aspectReferences_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
+#else
+    bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( aspectReferenceCount == rhs.aspectReferenceCount )
+          && ( pAspectReferences == rhs.pAspectReferences );
+#endif
+    }
+
+    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+    const void * pNext = {};
+    uint32_t aspectReferenceCount = {};
+    const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
+  {
+    using Type = RenderPassInputAttachmentAspectCreateInfo;
+  };
+  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+
+  struct RenderPassMultiviewCreateInfo
+  {
+    using NativeType = VkRenderPassMultiviewCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t * pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t * pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t * pCorrelationMasks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewMasks = pViewMasks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( viewMasks_.size() );
+      pViewMasks = viewMasks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewOffsets = pViewOffsets_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
+      pViewOffsets = viewOffsets_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlationMaskCount = correlationMaskCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelationMasks = pCorrelationMasks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
+      pCorrelationMasks = correlationMasks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+    operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const int32_t * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
+#else
+    bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pViewMasks == rhs.pViewMasks )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pViewOffsets == rhs.pViewOffsets )
+          && ( correlationMaskCount == rhs.correlationMaskCount )
+          && ( pCorrelationMasks == rhs.pCorrelationMasks );
+#endif
+    }
+
+    bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
+    const void * pNext = {};
+    uint32_t subpassCount = {};
+    const uint32_t * pViewMasks = {};
+    uint32_t dependencyCount = {};
+    const int32_t * pViewOffsets = {};
+    uint32_t correlationMaskCount = {};
+    const uint32_t * pCorrelationMasks = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
+  {
+    using Type = RenderPassMultiviewCreateInfo;
+  };
+  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+
+  struct SubpassSampleLocationsEXT
+  {
+    using NativeType = VkSubpassSampleLocationsEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
+    {}
+
+
+    SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassIndex = subpassIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
+    }
+
+    operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( subpassIndex, sampleLocationsInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
+#else
+    bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( subpassIndex == rhs.subpassIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+#endif
+    }
+
+    bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t subpassIndex = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+
+  struct RenderPassSampleLocationsBeginInfoEXT
+  {
+    using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
+      pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
+      pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
+          && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
+          && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
+          && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
+#endif
+    }
+
+    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+    const void * pNext = {};
+    uint32_t attachmentInitialSampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
+    uint32_t postSubpassSampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
+  {
+    using Type = RenderPassSampleLocationsBeginInfoEXT;
+  };
+
+  struct RenderPassSubpassFeedbackInfoEXT
+  {
+    using NativeType = VkRenderPassSubpassFeedbackInfoEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT(VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint32_t postMergeIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpassMergeStatus( subpassMergeStatus_ ), description( description_ ), postMergeIndex( postMergeIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT*>( this );
+    }
+
+    operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( subpassMergeStatus, description, postMergeIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassSubpassFeedbackInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( subpassMergeStatus == rhs.subpassMergeStatus )
+          && ( description == rhs.description )
+          && ( postMergeIndex == rhs.postMergeIndex );
+#endif
+    }
+
+    bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    uint32_t postMergeIndex = {};
+
+  };
+
+  struct RenderPassSubpassFeedbackCreateInfoEXT
+  {
+    using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pSubpassFeedback( pSubpassFeedback_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpassFeedback = pSubpassFeedback_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
+    }
+
+    operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pSubpassFeedback );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pSubpassFeedback == rhs.pSubpassFeedback );
+#endif
+    }
+
+    bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassSubpassFeedbackCreateInfoEXT>
+  {
+    using Type = RenderPassSubpassFeedbackCreateInfoEXT;
+  };
+
+  struct RenderPassTransformBeginInfoQCOM
+  {
+    using NativeType = VkRenderPassTransformBeginInfoQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), transform( transform_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
+    {}
+
+
+    RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
+    }
+
+    operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, transform );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default;
+#else
+    bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transform == rhs.transform );
+#endif
+    }
+
+    bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
+  {
+    using Type = RenderPassTransformBeginInfoQCOM;
+  };
+
+  struct RenderingAttachmentInfo
+  {
+    using NativeType = VkRenderingAttachmentInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageView( imageView_ ), imageLayout( imageLayout_ ), resolveMode( resolveMode_ ), resolveImageView( resolveImageView_ ), resolveImageLayout( resolveImageLayout_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), clearValue( clearValue_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
+    {}
+
+
+    RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resolveMode = resolveMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resolveImageView = resolveImageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resolveImageLayout = resolveImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValue = clearValue_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderingAttachmentInfo*>( this );
+    }
+
+    operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderingAttachmentInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+    VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
+  {
+    using Type = RenderingAttachmentInfo;
+  };
+  using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
+
+  struct RenderingFragmentDensityMapAttachmentInfoEXT
+  {
+    using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
+    {}
+
+
+    RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
+    }
+
+    operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageView, imageLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default;
+#else
+    bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
+#endif
+    }
+
+    bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT>
+  {
+    using Type = RenderingFragmentDensityMapAttachmentInfoEXT;
+  };
+
+  struct RenderingFragmentShadingRateAttachmentInfoKHR
+  {
+    using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), imageView( imageView_ ), imageLayout( imageLayout_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
+    {}
+
+
+    RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+    operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default;
+#else
+    bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout )
+          && ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
+#endif
+    }
+
+    bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR>
+  {
+    using Type = RenderingFragmentShadingRateAttachmentInfoKHR;
+  };
+
+  struct RenderingInfo
+  {
+    using NativeType = VkRenderingInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 RenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t layerCount_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), renderArea( renderArea_ ), layerCount( layerCount_ ), viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pDepthAttachment( pDepthAttachment_ ), pStencilAttachment( pStencilAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, uint32_t layerCount_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), renderArea( renderArea_ ), layerCount( layerCount_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pDepthAttachment( pDepthAttachment_ ), pStencilAttachment( pStencilAttachment_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    RenderingInfo & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+      pColorAttachments = colorAttachments_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthAttachment = pDepthAttachment_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStencilAttachment = pStencilAttachment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderingInfo*>( this );
+    }
+
+    operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderingInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( RenderingInfo const & ) const = default;
+#else
+    bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( renderArea == rhs.renderArea )
+          && ( layerCount == rhs.layerCount )
+          && ( viewMask == rhs.viewMask )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pDepthAttachment == rhs.pDepthAttachment )
+          && ( pStencilAttachment == rhs.pStencilAttachment );
+#endif
+    }
+
+    bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+    uint32_t layerCount = {};
+    uint32_t viewMask = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {};
+    const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {};
+    const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderingInfo>
+  {
+    using Type = RenderingInfo;
+  };
+  using RenderingInfoKHR = RenderingInfo;
+
+  struct ResolveImageInfo2
+  {
+    using NativeType = VkResolveImageInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ResolveImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ResolveImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkResolveImageInfo2*>( this );
+    }
+
+    operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkResolveImageInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageResolve2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ResolveImageInfo2 const & ) const = default;
+#else
+    bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+#endif
+    }
+
+    bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eResolveImageInfo2>
+  {
+    using Type = ResolveImageInfo2;
+  };
+  using ResolveImageInfo2KHR = ResolveImageInfo2;
+
+  struct SamplerBorderColorComponentMappingCreateInfoEXT
+  {
+    using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), components( components_ ), srgb( srgb_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srgb = srgb_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
+    }
+
+    operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, components, srgb );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( components == rhs.components )
+          && ( srgb == rhs.srgb );
+#endif
+    }
+
+    bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+    VULKAN_HPP_NAMESPACE::Bool32 srgb = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT>
+  {
+    using Type = SamplerBorderColorComponentMappingCreateInfoEXT;
+  };
+
+  struct SamplerCaptureDescriptorDataInfoEXT
+  {
+    using NativeType = VkSamplerCaptureDescriptorDataInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCaptureDescriptorDataInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sampler( sampler_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast<SamplerCaptureDescriptorDataInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+    operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCaptureDescriptorDataInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Sampler const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sampler );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default;
+#else
+    bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampler == rhs.sampler );
+#endif
+    }
+
+    bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCaptureDescriptorDataInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerCaptureDescriptorDataInfoEXT>
+  {
+    using Type = SamplerCaptureDescriptorDataInfoEXT;
+  };
+
+  struct SamplerCreateInfo
+  {
+    using NativeType = VkSamplerCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
+    {}
+
+
+    SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      magFilter = magFilter_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minFilter = minFilter_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipmapMode = mipmapMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeU = addressModeU_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeV = addressModeV_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeW = addressModeW_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLodBias = mipLodBias_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anisotropyEnable = anisotropyEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxAnisotropy = maxAnisotropy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareEnable = compareEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLod = minLod_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLod = maxLod_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      borderColor = borderColor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      unnormalizedCoordinates = unnormalizedCoordinates_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
+    }
+
+    operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerCreateFlags const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::SamplerMipmapMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, float const &, float const &, VULKAN_HPP_NAMESPACE::BorderColor const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, magFilter, minFilter, mipmapMode, addressModeU, addressModeV, addressModeW, mipLodBias, anisotropyEnable, maxAnisotropy, compareEnable, compareOp, minLod, maxLod, borderColor, unnormalizedCoordinates );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerCreateInfo const & ) const = default;
+#else
+    bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( magFilter == rhs.magFilter )
+          && ( minFilter == rhs.minFilter )
+          && ( mipmapMode == rhs.mipmapMode )
+          && ( addressModeU == rhs.addressModeU )
+          && ( addressModeV == rhs.addressModeV )
+          && ( addressModeW == rhs.addressModeW )
+          && ( mipLodBias == rhs.mipLodBias )
+          && ( anisotropyEnable == rhs.anisotropyEnable )
+          && ( maxAnisotropy == rhs.maxAnisotropy )
+          && ( compareEnable == rhs.compareEnable )
+          && ( compareOp == rhs.compareOp )
+          && ( minLod == rhs.minLod )
+          && ( maxLod == rhs.maxLod )
+          && ( borderColor == rhs.borderColor )
+          && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+#endif
+    }
+
+    bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    float mipLodBias = {};
+    VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
+    float maxAnisotropy = {};
+    VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    float minLod = {};
+    float maxLod = {};
+    VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
+    VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerCreateInfo>
+  {
+    using Type = SamplerCreateInfo;
+  };
+
+  struct SamplerCustomBorderColorCreateInfoEXT
+  {
+    using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), customBorderColor( customBorderColor_ ), format( format_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColor = customBorderColor_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
+    }
+
+    operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ClearColorValue const &, VULKAN_HPP_NAMESPACE::Format const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, customBorderColor, format );
+    }
+#endif
+
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
+  {
+    using Type = SamplerCustomBorderColorCreateInfoEXT;
+  };
+
+  struct SamplerReductionModeCreateInfo
+  {
+    using NativeType = VkSamplerReductionModeCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), reductionMode( reductionMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
+    {}
+
+
+    SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reductionMode = reductionMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
+    }
+
+    operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, reductionMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
+#else
+    bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( reductionMode == rhs.reductionMode );
+#endif
+    }
+
+    bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
+  {
+    using Type = SamplerReductionModeCreateInfo;
+  };
+  using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
+
+  struct SamplerYcbcrConversionCreateInfo
+  {
+    using NativeType = VkSamplerYcbcrConversionCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
+    {}
+
+
+    SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrModel = ycbcrModel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrRange = ycbcrRange_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xChromaOffset = xChromaOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      yChromaOffset = yChromaOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      chromaFilter = chromaFilter_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+    {
+      forceExplicitReconstruction = forceExplicitReconstruction_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( ycbcrModel == rhs.ycbcrModel )
+          && ( ycbcrRange == rhs.ycbcrRange )
+          && ( components == rhs.components )
+          && ( xChromaOffset == rhs.xChromaOffset )
+          && ( yChromaOffset == rhs.yChromaOffset )
+          && ( chromaFilter == rhs.chromaFilter )
+          && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+#endif
+    }
+
+    bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+    VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
+  {
+    using Type = SamplerYcbcrConversionCreateInfo;
+  };
+  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+
+  struct SamplerYcbcrConversionImageFormatProperties
+  {
+    using NativeType = VkSamplerYcbcrConversionImageFormatProperties;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
+    {}
+
+
+    SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, combinedImageSamplerDescriptorCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
+#endif
+    }
+
+    bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+    void * pNext = {};
+    uint32_t combinedImageSamplerDescriptorCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
+  {
+    using Type = SamplerYcbcrConversionImageFormatProperties;
+  };
+  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+
+  struct SamplerYcbcrConversionInfo
+  {
+    using NativeType = VkSamplerYcbcrConversionInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), conversion( conversion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
+    {}
+
+
+    SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conversion = conversion_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, conversion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conversion == rhs.conversion );
+#endif
+    }
+
+    bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
+  {
+    using Type = SamplerYcbcrConversionInfo;
+  };
+  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  struct ScreenSurfaceCreateInfoQNX
+  {
+    using NativeType = VkScreenSurfaceCreateInfoQNX;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, struct _screen_context * context_ = {}, struct _screen_window * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), context( context_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
+    {}
+
+
+    ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
+    {
+      context = context_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX*>( this );
+    }
+
+    operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX const &, struct _screen_context * const &, struct _screen_window * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, context, window );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default;
+#else
+    bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( context == rhs.context )
+          && ( window == rhs.window );
+#endif
+    }
+
+    bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {};
+    struct _screen_context * context = {};
+    struct _screen_window * window = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
+  {
+    using Type = ScreenSurfaceCreateInfoQNX;
+  };
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  struct SemaphoreCreateInfo
+  {
+    using NativeType = VkSemaphoreCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
+    {}
+
+
+    SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreCreateInfo const & ) const = default;
+#else
+    bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
+  {
+    using Type = SemaphoreCreateInfo;
+  };
+
+  struct SemaphoreGetFdInfoKHR
+  {
+    using NativeType = VkSemaphoreGetFdInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
+    {}
+
+
+    SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
+#else
+    bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
+  {
+    using Type = SemaphoreGetFdInfoKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct SemaphoreGetWin32HandleInfoKHR
+  {
+    using NativeType = VkSemaphoreGetWin32HandleInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
+    {}
+
+
+    SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default;
+#else
+    bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
+  {
+    using Type = SemaphoreGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  struct SemaphoreGetZirconHandleInfoFUCHSIA
+  {
+    using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
+    {}
+
+
+    SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
+    }
+
+    operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, handleType );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default;
+#else
+    bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+#endif
+    }
+
+    bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
+  {
+    using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct SemaphoreSignalInfo
+  {
+    using NativeType = VkSemaphoreSignalInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), value( value_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
+    {}
+
+
+    SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value = value_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
+    }
+
+    operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, value );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreSignalInfo const & ) const = default;
+#else
+    bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( value == rhs.value );
+#endif
+    }
+
+    bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    uint64_t value = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
+  {
+    using Type = SemaphoreSignalInfo;
+  };
+  using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
+
+  struct SemaphoreSubmitInfo
+  {
+    using NativeType = VkSemaphoreSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphore( semaphore_ ), value( value_ ), stageMask( stageMask_ ), deviceIndex( deviceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
+    {}
+
+
+    SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value = value_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageMask = stageMask_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndex = deviceIndex_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreSubmitInfo*>( this );
+    }
+
+    operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreSubmitInfo const & ) const = default;
+#else
+    bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( value == rhs.value )
+          && ( stageMask == rhs.stageMask )
+          && ( deviceIndex == rhs.deviceIndex );
+#endif
+    }
+
+    bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    uint64_t value = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {};
+    uint32_t deviceIndex = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
+  {
+    using Type = SemaphoreSubmitInfo;
+  };
+  using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
+
+  struct SemaphoreTypeCreateInfo
+  {
+    using NativeType = VkSemaphoreTypeCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), semaphoreType( semaphoreType_ ), initialValue( initialValue_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
+    {}
+
+
+    SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreType = semaphoreType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialValue = initialValue_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
+    }
+
+    operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreType const &, uint64_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, semaphoreType, initialValue );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
+#else
+    bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphoreType == rhs.semaphoreType )
+          && ( initialValue == rhs.initialValue );
+#endif
+    }
+
+    bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
+    uint64_t initialValue = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
+  {
+    using Type = SemaphoreTypeCreateInfo;
+  };
+  using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
+
+  struct SemaphoreWaitInfo
+  {
+    using NativeType = VkSemaphoreWaitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, const uint64_t * pValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
+#else
+      if ( semaphores_.size() != values_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = semaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSemaphores = pSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
+      pSemaphores = semaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pValues = pValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = static_cast<uint32_t>( values_.size() );
+      pValues = values_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
+    }
+
+    operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SemaphoreWaitInfo const & ) const = default;
+#else
+    bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( semaphoreCount == rhs.semaphoreCount )
+          && ( pSemaphores == rhs.pSemaphores )
+          && ( pValues == rhs.pValues );
+#endif
+    }
+
+    bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
+    uint32_t semaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {};
+    const uint64_t * pValues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
+  {
+    using Type = SemaphoreWaitInfo;
+  };
+  using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
+
+  struct SetStateFlagsIndirectCommandNV
+  {
+    using NativeType = VkSetStateFlagsIndirectCommandNV;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
+    : data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
+    {}
+
+
+    SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
+    }
+
+    operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( data );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default;
+#else
+    bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( data == rhs.data );
+#endif
+    }
+
+    bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t data = {};
+
+  };
+
+  struct ShaderModuleCreateInfo
+  {
+    using NativeType = VkShaderModuleCreateInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t * pCode_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codeSize = codeSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCode = pCode_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codeSize = code_.size() * 4;
+      pCode = code_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
+    }
+
+    operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags const &, size_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, codeSize, pCode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShaderModuleCreateInfo const & ) const = default;
+#else
+    bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( codeSize == rhs.codeSize )
+          && ( pCode == rhs.pCode );
+#endif
+    }
+
+    bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
+    size_t codeSize = {};
+    const uint32_t * pCode = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
+  {
+    using Type = ShaderModuleCreateInfo;
+  };
+
+  struct ShaderModuleIdentifierEXT
+  {
+    using NativeType = VkShaderModuleIdentifierEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT(uint32_t identifierSize_ = {}, std::array<uint8_t,VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const & identifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), identifierSize( identifierSize_ ), identifier( identifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShaderModuleIdentifierEXT( *reinterpret_cast<ShaderModuleIdentifierEXT const *>( &rhs ) )
+    {}
+
+
+    ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleIdentifierEXT*>( this );
+    }
+
+    operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleIdentifierEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, identifierSize, identifier );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShaderModuleIdentifierEXT const & ) const = default;
+#else
+    bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( identifierSize == rhs.identifierSize )
+          && ( identifier == rhs.identifier );
+#endif
+    }
+
+    bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT;
+    void * pNext = {};
+    uint32_t identifierSize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> identifier = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eShaderModuleIdentifierEXT>
+  {
+    using Type = ShaderModuleIdentifierEXT;
+  };
+
+  struct ShaderModuleValidationCacheCreateInfoEXT
+  {
+    using NativeType = VkShaderModuleValidationCacheCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), validationCache( validationCache_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
+    {
+      validationCache = validationCache_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, validationCache );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( validationCache == rhs.validationCache );
+#endif
+    }
+
+    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
+  {
+    using Type = ShaderModuleValidationCacheCreateInfoEXT;
+  };
+
+  struct ShaderResourceUsageAMD
+  {
+    using NativeType = VkShaderResourceUsageAMD;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
+    : numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
+    {}
+
+
+    ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
+    }
+
+    operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShaderResourceUsageAMD const & ) const = default;
+#else
+    bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( numUsedVgprs == rhs.numUsedVgprs )
+          && ( numUsedSgprs == rhs.numUsedSgprs )
+          && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
+          && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
+          && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
+#endif
+    }
+
+    bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t numUsedVgprs = {};
+    uint32_t numUsedSgprs = {};
+    uint32_t ldsSizePerLocalWorkGroup = {};
+    size_t ldsUsageSizeInBytes = {};
+    size_t scratchMemUsageInBytes = {};
+
+  };
+
+  struct ShaderStatisticsInfoAMD
+  {
+    using NativeType = VkShaderStatisticsInfoAMD;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const & computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
+    {}
+
+
+    ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
+    }
+
+    operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default;
+#else
+    bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( shaderStageMask == rhs.shaderStageMask )
+          && ( resourceUsage == rhs.resourceUsage )
+          && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
+          && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
+          && ( numAvailableVgprs == rhs.numAvailableVgprs )
+          && ( numAvailableSgprs == rhs.numAvailableSgprs )
+          && ( computeWorkGroupSize == rhs.computeWorkGroupSize );
+#endif
+    }
+
+    bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
+    VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
+    uint32_t numPhysicalVgprs = {};
+    uint32_t numPhysicalSgprs = {};
+    uint32_t numAvailableVgprs = {};
+    uint32_t numAvailableSgprs = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
+
+  };
+
+  struct SharedPresentSurfaceCapabilitiesKHR
+  {
+    using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, sharedPresentSupportedUsageFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+#endif
+    }
+
+    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
+  {
+    using Type = SharedPresentSurfaceCapabilitiesKHR;
+  };
+
+  struct SparseImageFormatProperties
+  {
+    using NativeType = VkSparseImageFormatProperties;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
+    {}
+
+
+    SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
+    }
+
+    operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( aspectMask, imageGranularity, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageFormatProperties const & ) const = default;
+#else
+    bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( aspectMask == rhs.aspectMask )
+          && ( imageGranularity == rhs.imageGranularity )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
+    VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
+
+  };
+
+  struct SparseImageFormatProperties2
+  {
+    using NativeType = VkSparseImageFormatProperties2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), properties( properties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
+    {}
+
+
+    SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
+    }
+
+    operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, properties );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageFormatProperties2 const & ) const = default;
+#else
+    bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+#endif
+    }
+
+    bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
+  {
+    using Type = SparseImageFormatProperties2;
+  };
+  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+
+  struct SparseImageMemoryRequirements
+  {
+    using NativeType = VkSparseImageMemoryRequirements;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
+    : formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
+    {}
+
+
+    SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageMemoryRequirements const & ) const = default;
+#else
+    bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( formatProperties == rhs.formatProperties )
+          && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+          && ( imageMipTailSize == rhs.imageMipTailSize )
+          && ( imageMipTailOffset == rhs.imageMipTailOffset )
+          && ( imageMipTailStride == rhs.imageMipTailStride );
+#endif
+    }
+
+    bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
+    uint32_t imageMipTailFirstLod = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
+
+  };
+
+  struct SparseImageMemoryRequirements2
+  {
+    using NativeType = VkSparseImageMemoryRequirements2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
+    {}
+
+
+    SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryRequirements );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default;
+#else
+    bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+#endif
+    }
+
+    bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
+  {
+    using Type = SparseImageMemoryRequirements2;
+  };
+  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  struct StreamDescriptorSurfaceCreateInfoGGP
+  {
+    using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), streamDescriptor( streamDescriptor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+      : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
+    {}
+
+
+    StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamDescriptor = streamDescriptor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP const &, GgpStreamDescriptor const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, streamDescriptor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
+    }
+
+    bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
+    GgpStreamDescriptor streamDescriptor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
+  {
+    using Type = StreamDescriptorSurfaceCreateInfoGGP;
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct StridedDeviceAddressRegionKHR
+  {
+    using NativeType = VkStridedDeviceAddressRegionKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
+    {}
+
+
+    StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
+    }
+
+    operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( deviceAddress, stride, size );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default;
+#else
+    bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( deviceAddress == rhs.deviceAddress )
+          && ( stride == rhs.stride )
+          && ( size == rhs.size );
+#endif
+    }
+
+    bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+
+  struct SubmitInfo
+  {
+    using NativeType = VkSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
+#else
+      if ( waitSemaphores_.size() != waitDstStageMask_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitDstStageMask = pWaitDstStageMask_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
+      pWaitDstStageMask = waitDstStageMask_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBuffers = pCommandBuffers_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
+      pCommandBuffers = commandBuffers_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+      pSignalSemaphores = signalSemaphores_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubmitInfo*>( this );
+    }
+
+    operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBuffer * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubmitInfo const & ) const = default;
+#else
+    bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBuffers == rhs.pCommandBuffers )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+#endif
+    }
+
+    bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
+    const void * pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
+    const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {};
+    uint32_t commandBufferCount = {};
+    const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {};
+    uint32_t signalSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubmitInfo>
+  {
+    using Type = SubmitInfo;
+  };
+
+  struct SubmitInfo2
+  {
+    using NativeType = VkSubmitInfo2;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubmitInfo2(VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ), pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ), commandBufferInfoCount( commandBufferInfoCount_ ), pCommandBufferInfos( pCommandBufferInfos_ ), signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ), pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) ), pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ), commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) ), pCommandBufferInfos( commandBufferInfos_.data() ), signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) ), pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2 const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo2 & setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
+      pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferInfoCount = commandBufferInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBufferInfos = pCommandBufferInfos_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo2 & setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
+      pCommandBufferInfos = commandBufferInfos_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubmitInfo2 & setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
+      pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubmitInfo2*>( this );
+    }
+
+    operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubmitInfo2*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubmitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, waitSemaphoreInfoCount, pWaitSemaphoreInfos, commandBufferInfoCount, pCommandBufferInfos, signalSemaphoreInfoCount, pSignalSemaphoreInfos );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubmitInfo2 const & ) const = default;
+#else
+    bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount )
+          && ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos )
+          && ( commandBufferInfoCount == rhs.commandBufferInfoCount )
+          && ( pCommandBufferInfos == rhs.pCommandBufferInfos )
+          && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount )
+          && ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
+#endif
+    }
+
+    bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SubmitFlags flags = {};
+    uint32_t waitSemaphoreInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {};
+    uint32_t commandBufferInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {};
+    uint32_t signalSemaphoreInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubmitInfo2>
+  {
+    using Type = SubmitInfo2;
+  };
+  using SubmitInfo2KHR = SubmitInfo2;
+
+  struct SubpassBeginInfo
+  {
+    using NativeType = VkSubpassBeginInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), contents( contents_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
+    {}
+
+
+    SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+    {
+      contents = contents_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
+    }
+
+    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassBeginInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassContents const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, contents );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassBeginInfo const & ) const = default;
+#else
+    bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( contents == rhs.contents );
+#endif
+    }
+
+    bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
+  {
+    using Type = SubpassBeginInfo;
+  };
+  using SubpassBeginInfoKHR = SubpassBeginInfo;
+
+  struct SubpassDescriptionDepthStencilResolve
+  {
+    using NativeType = VkSubpassDescriptionDepthStencilResolve;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
+    {}
+
+
+    SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthResolveMode = depthResolveMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilResolveMode = stencilResolveMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
+    }
+
+    operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
+#else
+    bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthResolveMode == rhs.depthResolveMode )
+          && ( stencilResolveMode == rhs.stencilResolveMode )
+          && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
+#endif
+    }
+
+    bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
+  {
+    using Type = SubpassDescriptionDepthStencilResolve;
+  };
+  using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
+
+  struct SubpassEndInfo
+  {
+    using NativeType = VkSubpassEndInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassEndInfo(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
+    {}
+
+
+    SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassEndInfo*>( this );
+    }
+
+    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassEndInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassEndInfo const & ) const = default;
+#else
+    bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+#endif
+    }
+
+    bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
+    const void * pNext = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassEndInfo>
+  {
+    using Type = SubpassEndInfo;
+  };
+  using SubpassEndInfoKHR = SubpassEndInfo;
+
+  struct SubpassFragmentDensityMapOffsetEndInfoQCOM
+  {
+    using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(uint32_t fragmentDensityOffsetCount_ = {}, const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ), pFragmentDensityOffsets( pFragmentDensityOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast<SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassFragmentDensityMapOffsetEndInfoQCOM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) ), pFragmentDensityOffsets( fragmentDensityOffsets_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityOffsetCount = fragmentDensityOffsetCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFragmentDensityOffsets = pFragmentDensityOffsets_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityOffsetCount = static_cast<uint32_t>( fragmentDensityOffsets_.size() );
+      pFragmentDensityOffsets = fragmentDensityOffsets_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassFragmentDensityMapOffsetEndInfoQCOM*>( this );
+    }
+
+    operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassFragmentDensityMapOffsetEndInfoQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Offset2D * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default;
+#else
+    bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount )
+          && ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets );
+#endif
+    }
+
+    bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
+    const void * pNext = {};
+    uint32_t fragmentDensityOffsetCount = {};
+    const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM>
+  {
+    using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM;
+  };
+
+  struct SubpassResolvePerformanceQueryEXT
+  {
+    using NativeType = VkSubpassResolvePerformanceQueryEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT(VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), optimal( optimal_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassResolvePerformanceQueryEXT( *reinterpret_cast<SubpassResolvePerformanceQueryEXT const *>( &rhs ) )
+    {}
+
+
+    SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT*>( this );
+    }
+
+    operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassResolvePerformanceQueryEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, optimal );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default;
+#else
+    bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( optimal == rhs.optimal );
+#endif
+    }
+
+    bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 optimal = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassResolvePerformanceQueryEXT>
+  {
+    using Type = SubpassResolvePerformanceQueryEXT;
+  };
+
+  struct SubpassShadingPipelineCreateInfoHUAWEI
+  {
+    using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), renderPass( renderPass_ ), subpass( subpass_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
+    {}
+
+
+    SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
+    }
+
+    operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, renderPass, subpass );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default;
+#else
+    bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass );
+#endif
+    }
+
+    bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t subpass = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassShadingPipelineCreateInfoHUAWEI>
+  {
+    using Type = SubpassShadingPipelineCreateInfoHUAWEI;
+  };
+
+  struct SubresourceLayout2EXT
+  {
+    using NativeType = VkSubresourceLayout2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), subresourceLayout( subresourceLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubresourceLayout2EXT( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SubresourceLayout2EXT( *reinterpret_cast<SubresourceLayout2EXT const *>( &rhs ) )
+    {}
+
+
+    SubresourceLayout2EXT & operator=( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SubresourceLayout2EXT & operator=( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSubresourceLayout2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubresourceLayout2EXT*>( this );
+    }
+
+    operator VkSubresourceLayout2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubresourceLayout2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SubresourceLayout const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, subresourceLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SubresourceLayout2EXT const & ) const = default;
+#else
+    bool operator==( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subresourceLayout == rhs.subresourceLayout );
+#endif
+    }
+
+    bool operator!=( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2EXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubresourceLayout2EXT>
+  {
+    using Type = SubresourceLayout2EXT;
+  };
+
+  struct SurfaceCapabilities2EXT
+  {
+    using NativeType = VkSurfaceCapabilities2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
+    {}
+
+
+    SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
+    }
+
+    operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags, supportedSurfaceCounters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
+#else
+    bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags )
+          && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
+#endif
+    }
+
+    bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+    void * pNext = {};
+    uint32_t minImageCount = {};
+    uint32_t maxImageCount = {};
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+    uint32_t maxImageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
+  {
+    using Type = SurfaceCapabilities2EXT;
+  };
+
+  struct SurfaceCapabilitiesKHR
+  {
+    using NativeType = VkSurfaceCapabilitiesKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+#endif
+    }
+
+    bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t minImageCount = {};
+    uint32_t maxImageCount = {};
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+    uint32_t maxImageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+
+  };
+
+  struct SurfaceCapabilities2KHR
+  {
+    using NativeType = VkSurfaceCapabilities2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), surfaceCapabilities( surfaceCapabilities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
+    {}
+
+
+    SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
+    }
+
+    operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, surfaceCapabilities );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
+#else
+    bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCapabilities == rhs.surfaceCapabilities );
+#endif
+    }
+
+    bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
+  {
+    using Type = SurfaceCapabilities2KHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct SurfaceCapabilitiesFullScreenExclusiveEXT
+  {
+    using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
+    {}
+
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fullScreenExclusiveSupported );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default;
+#else
+    bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
+#endif
+    }
+
+    bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
+  {
+    using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SurfaceCapabilitiesPresentBarrierNV
+  {
+    using NativeType = VkSurfaceCapabilitiesPresentBarrierNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentBarrierSupported( presentBarrierSupported_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs ) )
+    {}
+
+
+    SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPresentBarrierSupported( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentBarrierSupported = presentBarrierSupported_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentBarrierSupported );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default;
+#else
+    bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentBarrierSupported == rhs.presentBarrierSupported );
+#endif
+    }
+
+    bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentBarrierNV>
+  {
+    using Type = SurfaceCapabilitiesPresentBarrierNV;
+  };
+
+  struct SurfaceFormatKHR
+  {
+    using NativeType = VkSurfaceFormatKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), colorSpace( colorSpace_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
+    {}
+
+
+    SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
+    }
+
+    operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( format, colorSpace );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceFormatKHR const & ) const = default;
+#else
+    bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( format == rhs.format )
+          && ( colorSpace == rhs.colorSpace );
+#endif
+    }
+
+    bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+
+  };
+
+  struct SurfaceFormat2KHR
+  {
+    using NativeType = VkSurfaceFormat2KHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), surfaceFormat( surfaceFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
+    {}
+
+
+    SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
+    }
+
+    operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, surfaceFormat );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceFormat2KHR const & ) const = default;
+#else
+    bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceFormat == rhs.surfaceFormat );
+#endif
+    }
+
+    bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
+  {
+    using Type = SurfaceFormat2KHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct SurfaceFullScreenExclusiveInfoEXT
+  {
+    using NativeType = VkSurfaceFullScreenExclusiveInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), fullScreenExclusive( fullScreenExclusive_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusive = fullScreenExclusive_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, fullScreenExclusive );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default;
+#else
+    bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusive == rhs.fullScreenExclusive );
+#endif
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
+  {
+    using Type = SurfaceFullScreenExclusiveInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct SurfaceFullScreenExclusiveWin32InfoEXT
+  {
+    using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), hmonitor( hmonitor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
+    {}
+
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hmonitor = hmonitor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, HMONITOR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, hmonitor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default;
+#else
+    bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hmonitor == rhs.hmonitor );
+#endif
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+    const void * pNext = {};
+    HMONITOR hmonitor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
+  {
+    using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SurfacePresentModeCompatibilityEXT
+  {
+    using NativeType = VkSurfacePresentModeCompatibilityEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeCompatibilityEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT(uint32_t presentModeCount_ = {}, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentModeCount( presentModeCount_ ), pPresentModes( pPresentModes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfacePresentModeCompatibilityEXT( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfacePresentModeCompatibilityEXT( *reinterpret_cast<SurfacePresentModeCompatibilityEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SurfacePresentModeCompatibilityEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SurfacePresentModeCompatibilityEXT & operator=( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfacePresentModeCompatibilityEXT & operator=( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentModeCount = presentModeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPresentModes = pPresentModes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SurfacePresentModeCompatibilityEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentModeCount = static_cast<uint32_t>( presentModes_.size() );
+      pPresentModes = presentModes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfacePresentModeCompatibilityEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfacePresentModeCompatibilityEXT*>( this );
+    }
+
+    operator VkSurfacePresentModeCompatibilityEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentModeCount, pPresentModes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfacePresentModeCompatibilityEXT const & ) const = default;
+#else
+    bool operator==( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentModeCount == rhs.presentModeCount )
+          && ( pPresentModes == rhs.pPresentModes );
+#endif
+    }
+
+    bool operator!=( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeCompatibilityEXT;
+    void * pNext = {};
+    uint32_t presentModeCount = {};
+    VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfacePresentModeCompatibilityEXT>
+  {
+    using Type = SurfacePresentModeCompatibilityEXT;
+  };
+
+  struct SurfacePresentModeEXT
+  {
+    using NativeType = VkSurfacePresentModeEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT(VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentMode( presentMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfacePresentModeEXT( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfacePresentModeEXT( *reinterpret_cast<SurfacePresentModeEXT const *>( &rhs ) )
+    {}
+
+
+    SurfacePresentModeEXT & operator=( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfacePresentModeEXT & operator=( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfacePresentModeEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfacePresentModeEXT*>( this );
+    }
+
+    operator VkSurfacePresentModeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfacePresentModeEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PresentModeKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfacePresentModeEXT const & ) const = default;
+#else
+    bool operator==( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentMode == rhs.presentMode );
+#endif
+    }
+
+    bool operator!=( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfacePresentModeEXT>
+  {
+    using Type = SurfacePresentModeEXT;
+  };
+
+  struct SurfacePresentScalingCapabilitiesEXT
+  {
+    using NativeType = VkSurfacePresentScalingCapabilitiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentScalingCapabilitiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT(VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportedPresentScaling( supportedPresentScaling_ ), supportedPresentGravityX( supportedPresentGravityX_ ), supportedPresentGravityY( supportedPresentGravityY_ ), minScaledImageExtent( minScaledImageExtent_ ), maxScaledImageExtent( maxScaledImageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfacePresentScalingCapabilitiesEXT( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfacePresentScalingCapabilitiesEXT( *reinterpret_cast<SurfacePresentScalingCapabilitiesEXT const *>( &rhs ) )
+    {}
+
+
+    SurfacePresentScalingCapabilitiesEXT & operator=( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfacePresentScalingCapabilitiesEXT & operator=( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setSupportedPresentScaling( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportedPresentScaling = supportedPresentScaling_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setSupportedPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportedPresentGravityX = supportedPresentGravityX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setSupportedPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportedPresentGravityY = supportedPresentGravityY_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setMinScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & minScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minScaledImageExtent = minScaledImageExtent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setMaxScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxScaledImageExtent = maxScaledImageExtent_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfacePresentScalingCapabilitiesEXT*>( this );
+    }
+
+    operator VkSurfacePresentScalingCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfacePresentScalingCapabilitiesEXT const & ) const = default;
+#else
+    bool operator==( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedPresentScaling == rhs.supportedPresentScaling )
+          && ( supportedPresentGravityX == rhs.supportedPresentGravityX )
+          && ( supportedPresentGravityY == rhs.supportedPresentGravityY )
+          && ( minScaledImageExtent == rhs.minScaledImageExtent )
+          && ( maxScaledImageExtent == rhs.maxScaledImageExtent );
+#endif
+    }
+
+    bool operator!=( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentScalingCapabilitiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling = {};
+    VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX = {};
+    VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfacePresentScalingCapabilitiesEXT>
+  {
+    using Type = SurfacePresentScalingCapabilitiesEXT;
+  };
+
+  struct SurfaceProtectedCapabilitiesKHR
+  {
+    using NativeType = VkSurfaceProtectedCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportsProtected( supportsProtected_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportsProtected = supportsProtected_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportsProtected );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsProtected == rhs.supportsProtected );
+#endif
+    }
+
+    bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
+  {
+    using Type = SurfaceProtectedCapabilitiesKHR;
+  };
+
+  struct SwapchainCounterCreateInfoEXT
+  {
+    using NativeType = VkSwapchainCounterCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), surfaceCounters( surfaceCounters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surfaceCounters = surfaceCounters_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+    operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, surfaceCounters );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCounters == rhs.surfaceCounters );
+#endif
+    }
+
+    bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
+  {
+    using Type = SwapchainCounterCreateInfoEXT;
+  };
+
+  struct SwapchainCreateInfoKHR
+  {
+    using NativeType = VkSwapchainCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minImageCount = minImageCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageColorSpace = imageColorSpace_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageArrayLayers = imageArrayLayers_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSharingMode = imageSharingMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preTransform = preTransform_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compositeAlpha = compositeAlpha_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clipped = clipped_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldSwapchain = oldSwapchain_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::PresentModeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, surface, minImageCount, imageFormat, imageColorSpace, imageExtent, imageArrayLayers, imageUsage, imageSharingMode, queueFamilyIndexCount, pQueueFamilyIndices, preTransform, compositeAlpha, presentMode, clipped, oldSwapchain );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( surface == rhs.surface )
+          && ( minImageCount == rhs.minImageCount )
+          && ( imageFormat == rhs.imageFormat )
+          && ( imageColorSpace == rhs.imageColorSpace )
+          && ( imageExtent == rhs.imageExtent )
+          && ( imageArrayLayers == rhs.imageArrayLayers )
+          && ( imageUsage == rhs.imageUsage )
+          && ( imageSharingMode == rhs.imageSharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( preTransform == rhs.preTransform )
+          && ( compositeAlpha == rhs.compositeAlpha )
+          && ( presentMode == rhs.presentMode )
+          && ( clipped == rhs.clipped )
+          && ( oldSwapchain == rhs.oldSwapchain );
+#endif
+    }
+
+    bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+    uint32_t minImageCount = {};
+    VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+    uint32_t imageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t * pQueueFamilyIndices = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
+    VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+    VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
+  {
+    using Type = SwapchainCreateInfoKHR;
+  };
+
+  struct SwapchainDisplayNativeHdrCreateInfoAMD
+  {
+    using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), localDimmingEnable( localDimmingEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
+    {}
+
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      localDimmingEnable = localDimmingEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, localDimmingEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default;
+#else
+    bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingEnable == rhs.localDimmingEnable );
+#endif
+    }
+
+    bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
+  {
+    using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
+  };
+
+  struct SwapchainPresentBarrierCreateInfoNV
+  {
+    using NativeType = VkSwapchainPresentBarrierCreateInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentBarrierEnable( presentBarrierEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs ) )
+    {}
+
+
+    SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentBarrierEnable = presentBarrierEnable_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV*>( this );
+    }
+
+    operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentBarrierEnable );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default;
+#else
+    bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentBarrierEnable == rhs.presentBarrierEnable );
+#endif
+    }
+
+    bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainPresentBarrierCreateInfoNV>
+  {
+    using Type = SwapchainPresentBarrierCreateInfoNV;
+  };
+
+  struct SwapchainPresentFenceInfoEXT
+  {
+    using NativeType = VkSwapchainPresentFenceInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentFenceInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pFences( pFences_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainPresentFenceInfoEXT( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainPresentFenceInfoEXT( *reinterpret_cast<SwapchainPresentFenceInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentFenceInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Fence> const & fences_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( fences_.size() ) ), pFences( fences_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SwapchainPresentFenceInfoEXT & operator=( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainPresentFenceInfoEXT & operator=( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPFences( const VULKAN_HPP_NAMESPACE::Fence * pFences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFences = pFences_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentFenceInfoEXT & setFences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Fence> const & fences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( fences_.size() );
+      pFences = fences_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainPresentFenceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>( this );
+    }
+
+    operator VkSwapchainPresentFenceInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainPresentFenceInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Fence * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pFences );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainPresentFenceInfoEXT const & ) const = default;
+#else
+    bool operator==( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pFences == rhs.pFences );
+#endif
+    }
+
+    bool operator!=( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentFenceInfoEXT;
+    void * pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::Fence * pFences = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainPresentFenceInfoEXT>
+  {
+    using Type = SwapchainPresentFenceInfoEXT;
+  };
+
+  struct SwapchainPresentModeInfoEXT
+  {
+    using NativeType = VkSwapchainPresentModeInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModeInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), swapchainCount( swapchainCount_ ), pPresentModes( pPresentModes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainPresentModeInfoEXT( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainPresentModeInfoEXT( *reinterpret_cast<SwapchainPresentModeInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentModeInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SwapchainPresentModeInfoEXT & operator=( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainPresentModeInfoEXT & operator=( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPresentModes = pPresentModes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentModeInfoEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( presentModes_.size() );
+      pPresentModes = presentModes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainPresentModeInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>( this );
+    }
+
+    operator VkSwapchainPresentModeInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainPresentModeInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, swapchainCount, pPresentModes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainPresentModeInfoEXT const & ) const = default;
+#else
+    bool operator==( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pPresentModes == rhs.pPresentModes );
+#endif
+    }
+
+    bool operator!=( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModeInfoEXT;
+    void * pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainPresentModeInfoEXT>
+  {
+    using Type = SwapchainPresentModeInfoEXT;
+  };
+
+  struct SwapchainPresentModesCreateInfoEXT
+  {
+    using NativeType = VkSwapchainPresentModesCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModesCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT(uint32_t presentModeCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), presentModeCount( presentModeCount_ ), pPresentModes( pPresentModes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainPresentModesCreateInfoEXT( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainPresentModesCreateInfoEXT( *reinterpret_cast<SwapchainPresentModesCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentModesCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, void * pNext_ = nullptr )
+    : pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    SwapchainPresentModesCreateInfoEXT & operator=( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainPresentModesCreateInfoEXT & operator=( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentModeCount = presentModeCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPresentModes = pPresentModes_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    SwapchainPresentModesCreateInfoEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentModeCount = static_cast<uint32_t>( presentModes_.size() );
+      pPresentModes = presentModes_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainPresentModesCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainPresentModesCreateInfoEXT*>( this );
+    }
+
+    operator VkSwapchainPresentModesCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainPresentModesCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, presentModeCount, pPresentModes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainPresentModesCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentModeCount == rhs.presentModeCount )
+          && ( pPresentModes == rhs.pPresentModes );
+#endif
+    }
+
+    bool operator!=( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModesCreateInfoEXT;
+    void * pNext = {};
+    uint32_t presentModeCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainPresentModesCreateInfoEXT>
+  {
+    using Type = SwapchainPresentModesCreateInfoEXT;
+  };
+
+  struct SwapchainPresentScalingCreateInfoEXT
+  {
+    using NativeType = VkSwapchainPresentScalingCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentScalingCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT(VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), scalingBehavior( scalingBehavior_ ), presentGravityX( presentGravityX_ ), presentGravityY( presentGravityY_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainPresentScalingCreateInfoEXT( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : SwapchainPresentScalingCreateInfoEXT( *reinterpret_cast<SwapchainPresentScalingCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    SwapchainPresentScalingCreateInfoEXT & operator=( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    SwapchainPresentScalingCreateInfoEXT & operator=( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setScalingBehavior( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalingBehavior = scalingBehavior_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentGravityX = presentGravityX_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentGravityY = presentGravityY_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkSwapchainPresentScalingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainPresentScalingCreateInfoEXT*>( this );
+    }
+
+    operator VkSwapchainPresentScalingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainPresentScalingCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( SwapchainPresentScalingCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( scalingBehavior == rhs.scalingBehavior )
+          && ( presentGravityX == rhs.presentGravityX )
+          && ( presentGravityY == rhs.presentGravityY );
+#endif
+    }
+
+    bool operator!=( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentScalingCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior = {};
+    VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX = {};
+    VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainPresentScalingCreateInfoEXT>
+  {
+    using Type = SwapchainPresentScalingCreateInfoEXT;
+  };
+
+  struct TextureLODGatherFormatPropertiesAMD
+  {
+    using NativeType = VkTextureLODGatherFormatPropertiesAMD;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
+    {}
+
+
+    TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+    operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default;
+#else
+    bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+#endif
+    }
+
+    bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
+  {
+    using Type = TextureLODGatherFormatPropertiesAMD;
+  };
+
+  struct TilePropertiesQCOM
+  {
+    using NativeType = VkTilePropertiesQCOM;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR TilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), tileSize( tileSize_ ), apronSize( apronSize_ ), origin( origin_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TilePropertiesQCOM( *reinterpret_cast<TilePropertiesQCOM const *>( &rhs ) )
+    {}
+
+
+    TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tileSize = tileSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      apronSize = apronSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT
+    {
+      origin = origin_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTilePropertiesQCOM*>( this );
+    }
+
+    operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTilePropertiesQCOM*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, tileSize, apronSize, origin );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TilePropertiesQCOM const & ) const = default;
+#else
+    bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( tileSize == rhs.tileSize )
+          && ( apronSize == rhs.apronSize )
+          && ( origin == rhs.origin );
+#endif
+    }
+
+    bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent3D tileSize = {};
+    VULKAN_HPP_NAMESPACE::Extent2D apronSize = {};
+    VULKAN_HPP_NAMESPACE::Offset2D origin = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eTilePropertiesQCOM>
+  {
+    using Type = TilePropertiesQCOM;
+  };
+
+  struct TimelineSemaphoreSubmitInfo
+  {
+    using NativeType = VkTimelineSemaphoreSubmitInfo;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValueCount = waitSemaphoreValueCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+      pWaitSemaphoreValues = waitSemaphoreValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValueCount = signalSemaphoreValueCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+      pSignalSemaphoreValues = signalSemaphoreValues_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
+    }
+
+    operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
+#else
+    bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+#endif
+    }
+
+    bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
+    const void * pNext = {};
+    uint32_t waitSemaphoreValueCount = {};
+    const uint64_t * pWaitSemaphoreValues = {};
+    uint32_t signalSemaphoreValueCount = {};
+    const uint64_t * pSignalSemaphoreValues = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
+  {
+    using Type = TimelineSemaphoreSubmitInfo;
+  };
+  using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+
+  struct TraceRaysIndirectCommand2KHR
+  {
+    using NativeType = VkTraceRaysIndirectCommand2KHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+    : raygenShaderRecordAddress( raygenShaderRecordAddress_ ), raygenShaderRecordSize( raygenShaderRecordSize_ ), missShaderBindingTableAddress( missShaderBindingTableAddress_ ), missShaderBindingTableSize( missShaderBindingTableSize_ ), missShaderBindingTableStride( missShaderBindingTableStride_ ), hitShaderBindingTableAddress( hitShaderBindingTableAddress_ ), hitShaderBindingTableSize( hitShaderBindingTableSize_ ), hitShaderBindingTableStride( hitShaderBindingTableStride_ ), callableShaderBindingTableAddress( callableShaderBindingTableAddress_ ), callableShaderBindingTableSize( callableShaderBindingTableSize_ ), callableShaderBindingTableStride( callableShaderBindingTableStride_ ), width( width_ ), height( height_ ), depth( depth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TraceRaysIndirectCommand2KHR( *reinterpret_cast<TraceRaysIndirectCommand2KHR const *>( &rhs ) )
+    {}
+
+
+    TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      raygenShaderRecordAddress = raygenShaderRecordAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      raygenShaderRecordSize = raygenShaderRecordSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      missShaderBindingTableAddress = missShaderBindingTableAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      missShaderBindingTableSize = missShaderBindingTableSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      missShaderBindingTableStride = missShaderBindingTableStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hitShaderBindingTableAddress = hitShaderBindingTableAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hitShaderBindingTableSize = hitShaderBindingTableSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hitShaderBindingTableStride = hitShaderBindingTableStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      callableShaderBindingTableAddress = callableShaderBindingTableAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      callableShaderBindingTableSize = callableShaderBindingTableSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      callableShaderBindingTableStride = callableShaderBindingTableStride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTraceRaysIndirectCommand2KHR*>( this );
+    }
+
+    operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTraceRaysIndirectCommand2KHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( raygenShaderRecordAddress, raygenShaderRecordSize, missShaderBindingTableAddress, missShaderBindingTableSize, missShaderBindingTableStride, hitShaderBindingTableAddress, hitShaderBindingTableSize, hitShaderBindingTableStride, callableShaderBindingTableAddress, callableShaderBindingTableSize, callableShaderBindingTableStride, width, height, depth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default;
+#else
+    bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress )
+          && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize )
+          && ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress )
+          && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize )
+          && ( missShaderBindingTableStride == rhs.missShaderBindingTableStride )
+          && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress )
+          && ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize )
+          && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride )
+          && ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress )
+          && ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize )
+          && ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+#endif
+    }
+
+    bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t depth = {};
+
+  };
+
+  struct TraceRaysIndirectCommandKHR
+  {
+    using NativeType = VkTraceRaysIndirectCommandKHR;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ ), depth( depth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
+    {}
+
+
+    explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
+      : width( extent2D.width )
+      , height( extent2D.height )
+    , depth( depth_ )
+    {}
+
+    TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
+    }
+
+    operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( width, height, depth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default;
+#else
+    bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+#endif
+    }
+
+    bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t depth = {};
+
+  };
+
+  struct ValidationCacheCreateInfoEXT
+  {
+    using NativeType = VkValidationCacheCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialData_.size() * sizeof(T);
+      pInitialData = initialData_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT const &, size_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+#endif
+    }
+
+    bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
+    size_t initialDataSize = {};
+    const void * pInitialData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
+  {
+    using Type = ValidationCacheCreateInfoEXT;
+  };
+
+  struct ValidationFeaturesEXT
+  {
+    using NativeType = VkValidationFeaturesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledValidationFeatureCount = enabledValidationFeatureCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledValidationFeatures = pEnabledValidationFeatures_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
+      pEnabledValidationFeatures = enabledValidationFeatures_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationFeatureCount = disabledValidationFeatureCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationFeatures = pDisabledValidationFeatures_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
+      pDisabledValidationFeatures = disabledValidationFeatures_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
+    }
+
+    operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ValidationFeaturesEXT const & ) const = default;
+#else
+    bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
+          && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
+          && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
+          && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
+#endif
+    }
+
+    bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
+    const void * pNext = {};
+    uint32_t enabledValidationFeatureCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {};
+    uint32_t disabledValidationFeatureCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
+  {
+    using Type = ValidationFeaturesEXT;
+  };
+
+  struct ValidationFlagsEXT
+  {
+    using NativeType = VkValidationFlagsEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationCheckCount = disabledValidationCheckCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationChecks = pDisabledValidationChecks_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
+      pDisabledValidationChecks = disabledValidationChecks_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
+    }
+
+    operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFlagsEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ValidationFlagsEXT const & ) const = default;
+#else
+    bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+#endif
+    }
+
+    bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
+    const void * pNext = {};
+    uint32_t disabledValidationCheckCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationFlagsEXT>
+  {
+    using Type = ValidationFlagsEXT;
+  };
+
+  struct VertexInputAttributeDescription2EXT
+  {
+    using NativeType = VkVertexInputAttributeDescription2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
+    {}
+
+
+    VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
+    {
+      location = location_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT*>( this );
+    }
+
+    operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputAttributeDescription2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, location, binding, format, offset );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
+#else
+    bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( location == rhs.location )
+          && ( binding == rhs.binding )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset );
+#endif
+    }
+
+    bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT;
+    void * pNext = {};
+    uint32_t location = {};
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint32_t offset = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
+  {
+    using Type = VertexInputAttributeDescription2EXT;
+  };
+
+  struct VertexInputBindingDescription2EXT
+  {
+    using NativeType = VkVertexInputBindingDescription2EXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, uint32_t divisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), binding( binding_ ), stride( stride_ ), inputRate( inputRate_ ), divisor( divisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
+    {}
+
+
+    VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputRate = inputRate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      divisor = divisor_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDescription2EXT*>( this );
+    }
+
+    operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDescription2EXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, binding, stride, inputRate, divisor );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
+#else
+    bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( binding == rhs.binding )
+          && ( stride == rhs.stride )
+          && ( inputRate == rhs.inputRate )
+          && ( divisor == rhs.divisor );
+#endif
+    }
+
+    bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT;
+    void * pNext = {};
+    uint32_t binding = {};
+    uint32_t stride = {};
+    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
+    uint32_t divisor = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
+  {
+    using Type = VertexInputBindingDescription2EXT;
+  };
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  struct ViSurfaceCreateInfoNN
+  {
+    using NativeType = VkViSurfaceCreateInfoNN;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+      : ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
+    {}
+
+
+    ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
+    }
+
+    operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN const &, void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, window );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default;
+#else
+    bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+#endif
+    }
+
+    bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
+    void * window = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
+  {
+    using Type = ViSurfaceCreateInfoNN;
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  struct VideoPictureResourceInfoKHR
+  {
+    using NativeType = VkVideoPictureResourceInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR(VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, uint32_t baseArrayLayer_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), codedOffset( codedOffset_ ), codedExtent( codedExtent_ ), baseArrayLayer( baseArrayLayer_ ), imageViewBinding( imageViewBinding_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoPictureResourceInfoKHR( *reinterpret_cast<VideoPictureResourceInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codedOffset = codedOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codedExtent = codedExtent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewBinding = imageViewBinding_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoPictureResourceInfoKHR*>( this );
+    }
+
+    operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoPictureResourceInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageView const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( codedOffset == rhs.codedOffset )
+          && ( codedExtent == rhs.codedExtent )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( imageViewBinding == rhs.imageViewBinding );
+#endif
+    }
+
+    bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
+    uint32_t baseArrayLayer = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoPictureResourceInfoKHR>
+  {
+    using Type = VideoPictureResourceInfoKHR;
+  };
+
+  struct VideoReferenceSlotInfoKHR
+  {
+    using NativeType = VkVideoReferenceSlotInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR(int32_t slotIndex_ = {}, const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), slotIndex( slotIndex_ ), pPictureResource( pPictureResource_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoReferenceSlotInfoKHR( *reinterpret_cast<VideoReferenceSlotInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      slotIndex = slotIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPictureResource = pPictureResource_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoReferenceSlotInfoKHR*>( this );
+    }
+
+    operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoReferenceSlotInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int32_t const &, const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, slotIndex, pPictureResource );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( slotIndex == rhs.slotIndex )
+          && ( pPictureResource == rhs.pPictureResource );
+#endif
+    }
+
+    bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR;
+    const void * pNext = {};
+    int32_t slotIndex = {};
+    const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoReferenceSlotInfoKHR>
+  {
+    using Type = VideoReferenceSlotInfoKHR;
+  };
+
+  struct VideoBeginCodingInfoKHR
+  {
+    using NativeType = VkVideoBeginCodingInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), videoSession( videoSession_ ), videoSessionParameters( videoSessionParameters_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), videoSession( videoSession_ ), videoSessionParameters( videoSessionParameters_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoSession = videoSession_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoSessionParameters = videoSessionParameters_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = referenceSlotCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceSlots = pReferenceSlots_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoBeginCodingInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
+      pReferenceSlots = referenceSlots_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoBeginCodingInfoKHR*>( this );
+    }
+
+    operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoBeginCodingInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( videoSession == rhs.videoSession )
+          && ( videoSessionParameters == rhs.videoSessionParameters )
+          && ( referenceSlotCount == rhs.referenceSlotCount )
+          && ( pReferenceSlots == rhs.pReferenceSlots );
+#endif
+    }
+
+    bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
+    uint32_t referenceSlotCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
+  {
+    using Type = VideoBeginCodingInfoKHR;
+  };
+
+  struct VideoCapabilitiesKHR
+  {
+    using NativeType = VkVideoCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, uint32_t maxDpbSlots_ = {}, uint32_t maxActiveReferencePictures_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ), minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ), pictureAccessGranularity( pictureAccessGranularity_ ), minCodedExtent( minCodedExtent_ ), maxCodedExtent( maxCodedExtent_ ), maxDpbSlots( maxDpbSlots_ ), maxActiveReferencePictures( maxActiveReferencePictures_ ), stdHeaderVersion( stdHeaderVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoCapabilitiesKHR*>( this );
+    }
+
+    operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, minBitstreamBufferOffsetAlignment, minBitstreamBufferSizeAlignment, pictureAccessGranularity, minCodedExtent, maxCodedExtent, maxDpbSlots, maxActiveReferencePictures, stdHeaderVersion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment )
+          && ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment )
+          && ( pictureAccessGranularity == rhs.pictureAccessGranularity )
+          && ( minCodedExtent == rhs.minCodedExtent )
+          && ( maxCodedExtent == rhs.maxCodedExtent )
+          && ( maxDpbSlots == rhs.maxDpbSlots )
+          && ( maxActiveReferencePictures == rhs.maxActiveReferencePictures )
+          && ( stdHeaderVersion == rhs.stdHeaderVersion );
+#endif
+    }
+
+    bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {};
+    VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
+    uint32_t maxDpbSlots = {};
+    uint32_t maxActiveReferencePictures = {};
+    VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
+  {
+    using Type = VideoCapabilitiesKHR;
+  };
+
+  struct VideoCodingControlInfoKHR
+  {
+    using NativeType = VkVideoCodingControlInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoCodingControlInfoKHR*>( this );
+    }
+
+    operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoCodingControlInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoCodingControlInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
+  {
+    using Type = VideoCodingControlInfoKHR;
+  };
+
+  struct VideoDecodeCapabilitiesKHR
+  {
+    using NativeType = VkVideoDecodeCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeCapabilitiesKHR( *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeCapabilitiesKHR*>( this );
+    }
+
+    operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeCapabilitiesKHR>
+  {
+    using Type = VideoDecodeCapabilitiesKHR;
+  };
+
+  struct VideoDecodeH264CapabilitiesKHR
+  {
+    using NativeType = VkVideoDecodeH264CapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR(StdVideoH264LevelIdc maxLevelIdc_ = {}, VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxLevelIdc( maxLevelIdc_ ), fieldOffsetGranularity( fieldOffsetGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264CapabilitiesKHR( *reinterpret_cast<VideoDecodeH264CapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264CapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH264LevelIdc const &, VULKAN_HPP_NAMESPACE::Offset2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 )
+          && ( fieldOffsetGranularity == rhs.fieldOffsetGranularity );
+    }
+
+    bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesKHR;
+    void * pNext = {};
+    StdVideoH264LevelIdc maxLevelIdc = {};
+    VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesKHR>
+  {
+    using Type = VideoDecodeH264CapabilitiesKHR;
+  };
+
+  struct VideoDecodeH264DpbSlotInfoKHR
+  {
+    using NativeType = VkVideoDecodeH264DpbSlotInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR(const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pStdReferenceInfo( pStdReferenceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH264DpbSlotInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdReferenceInfo = pStdReferenceInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264ReferenceInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pStdReferenceInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoKHR;
+    const void * pNext = {};
+    const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoKHR>
+  {
+    using Type = VideoDecodeH264DpbSlotInfoKHR;
+  };
+
+  struct VideoDecodeH264PictureInfoKHR
+  {
+    using NativeType = VkVideoDecodeH264PictureInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR(const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, uint32_t sliceCount_ = {}, const uint32_t * pSliceOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( sliceCount_ ), pSliceOffsets( pSliceOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264PictureInfoKHR( *reinterpret_cast<VideoDecodeH264PictureInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast<uint32_t>( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPictureInfo = pStdPictureInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sliceCount = sliceCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSliceOffsets = pSliceOffsets_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH264PictureInfoKHR & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sliceCount = static_cast<uint32_t>( sliceOffsets_.size() );
+      pSliceOffsets = sliceOffsets_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264PictureInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264PictureInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264PictureInfo * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pStdPictureInfo == rhs.pStdPictureInfo )
+          && ( sliceCount == rhs.sliceCount )
+          && ( pSliceOffsets == rhs.pSliceOffsets );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoKHR;
+    const void * pNext = {};
+    const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
+    uint32_t sliceCount = {};
+    const uint32_t * pSliceOffsets = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoKHR>
+  {
+    using Type = VideoDecodeH264PictureInfoKHR;
+  };
+
+  struct VideoDecodeH264ProfileInfoKHR
+  {
+    using NativeType = VkVideoDecodeH264ProfileInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR(StdVideoH264ProfileIdc stdProfileIdc_ = {}, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdProfileIdc( stdProfileIdc_ ), pictureLayout( pictureLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264ProfileInfoKHR( *reinterpret_cast<VideoDecodeH264ProfileInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdProfileIdc = stdProfileIdc_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pictureLayout = pictureLayout_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264ProfileInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264ProfileInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdProfileIdc, pictureLayout );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+      if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) return cmp;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 )
+          && ( pictureLayout == rhs.pictureLayout );
+    }
+
+    bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoKHR;
+    const void * pNext = {};
+    StdVideoH264ProfileIdc stdProfileIdc = {};
+    VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileInfoKHR>
+  {
+    using Type = VideoDecodeH264ProfileInfoKHR;
+  };
+
+  struct VideoDecodeH264SessionParametersAddInfoKHR
+  {
+    using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR(uint32_t stdSPSCount_ = {}, const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdSPSCount( stdSPSCount_ ), pStdSPSs( pStdSPSs_ ), stdPPSCount( stdPPSCount_ ), pStdPPSs( pStdPPSs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = stdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdSPSs = pStdSPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+      pStdSPSs = stdSPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = stdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPPSs = pStdPPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+      pStdPPSs = stdPPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stdSPSCount == rhs.stdSPSCount )
+          && ( pStdSPSs == rhs.pStdSPSs )
+          && ( stdPPSCount == rhs.stdPPSCount )
+          && ( pStdPPSs == rhs.pStdPPSs );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;
+    const void * pNext = {};
+    uint32_t stdSPSCount = {};
+    const StdVideoH264SequenceParameterSet * pStdSPSs = {};
+    uint32_t stdPPSCount = {};
+    const StdVideoH264PictureParameterSet * pStdPPSs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoKHR>
+  {
+    using Type = VideoDecodeH264SessionParametersAddInfoKHR;
+  };
+
+  struct VideoDecodeH264SessionParametersCreateInfoKHR
+  {
+    using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoKHR(uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxStdSPSCount( maxStdSPSCount_ ), maxStdPPSCount( maxStdPPSCount_ ), pParametersAddInfo( pParametersAddInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdSPSCount = maxStdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdPPSCount = maxStdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pParametersAddInfo = pParametersAddInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxStdSPSCount == rhs.maxStdSPSCount )
+          && ( maxStdPPSCount == rhs.maxStdPPSCount )
+          && ( pParametersAddInfo == rhs.pParametersAddInfo );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t maxStdSPSCount = {};
+    uint32_t maxStdPPSCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR>
+  {
+    using Type = VideoDecodeH264SessionParametersCreateInfoKHR;
+  };
+
+  struct VideoDecodeH265CapabilitiesKHR
+  {
+    using NativeType = VkVideoDecodeH265CapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR(StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxLevelIdc( maxLevelIdc_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265CapabilitiesKHR( *reinterpret_cast<VideoDecodeH265CapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265CapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH265LevelIdc const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxLevelIdc );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
+    }
+
+    bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesKHR;
+    void * pNext = {};
+    StdVideoH265LevelIdc maxLevelIdc = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesKHR>
+  {
+    using Type = VideoDecodeH265CapabilitiesKHR;
+  };
+
+  struct VideoDecodeH265DpbSlotInfoKHR
+  {
+    using NativeType = VkVideoDecodeH265DpbSlotInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR(const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pStdReferenceInfo( pStdReferenceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH265DpbSlotInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdReferenceInfo = pStdReferenceInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH265ReferenceInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pStdReferenceInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoKHR;
+    const void * pNext = {};
+    const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoKHR>
+  {
+    using Type = VideoDecodeH265DpbSlotInfoKHR;
+  };
+
+  struct VideoDecodeH265PictureInfoKHR
+  {
+    using NativeType = VkVideoDecodeH265PictureInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR(StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, uint32_t sliceSegmentCount_ = {}, const uint32_t * pSliceSegmentOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceSegmentCount( sliceSegmentCount_ ), pSliceSegmentOffsets( pSliceSegmentOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265PictureInfoKHR( *reinterpret_cast<VideoDecodeH265PictureInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265PictureInfoKHR( StdVideoDecodeH265PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceSegmentCount( static_cast<uint32_t>( sliceSegmentOffsets_.size() ) ), pSliceSegmentOffsets( sliceSegmentOffsets_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPictureInfo = pStdPictureInfo_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sliceSegmentCount = sliceSegmentCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSliceSegmentOffsets = pSliceSegmentOffsets_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265PictureInfoKHR & setSliceSegmentOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sliceSegmentCount = static_cast<uint32_t>( sliceSegmentOffsets_.size() );
+      pSliceSegmentOffsets = sliceSegmentOffsets_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265PictureInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265PictureInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoDecodeH265PictureInfo * const &, uint32_t const &, const uint32_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pStdPictureInfo == rhs.pStdPictureInfo )
+          && ( sliceSegmentCount == rhs.sliceSegmentCount )
+          && ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoKHR;
+    const void * pNext = {};
+    StdVideoDecodeH265PictureInfo * pStdPictureInfo = {};
+    uint32_t sliceSegmentCount = {};
+    const uint32_t * pSliceSegmentOffsets = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoKHR>
+  {
+    using Type = VideoDecodeH265PictureInfoKHR;
+  };
+
+  struct VideoDecodeH265ProfileInfoKHR
+  {
+    using NativeType = VkVideoDecodeH265ProfileInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdProfileIdc( stdProfileIdc_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265ProfileInfoKHR( *reinterpret_cast<VideoDecodeH265ProfileInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdProfileIdc = stdProfileIdc_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265ProfileInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265ProfileInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdProfileIdc );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
+    }
+
+    bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoKHR;
+    const void * pNext = {};
+    StdVideoH265ProfileIdc stdProfileIdc = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileInfoKHR>
+  {
+    using Type = VideoDecodeH265ProfileInfoKHR;
+  };
+
+  struct VideoDecodeH265SessionParametersAddInfoKHR
+  {
+    using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR(uint32_t stdVPSCount_ = {}, const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, uint32_t stdSPSCount_ = {}, const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdVPSCount( stdVPSCount_ ), pStdVPSs( pStdVPSs_ ), stdSPSCount( stdSPSCount_ ), pStdSPSs( pStdSPSs_ ), stdPPSCount( stdPPSCount_ ), pStdPPSs( pStdPPSs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) ), pStdVPSs( stdVPSs_.data() ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdVPSCount = stdVPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdVPSs = pStdVPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
+      pStdVPSs = stdVPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = stdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdSPSs = pStdSPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+      pStdSPSs = stdSPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = stdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPPSs = pStdPPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+      pStdPPSs = stdPPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265VideoParameterSet * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stdVPSCount == rhs.stdVPSCount )
+          && ( pStdVPSs == rhs.pStdVPSs )
+          && ( stdSPSCount == rhs.stdSPSCount )
+          && ( pStdSPSs == rhs.pStdSPSs )
+          && ( stdPPSCount == rhs.stdPPSCount )
+          && ( pStdPPSs == rhs.pStdPPSs );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;
+    const void * pNext = {};
+    uint32_t stdVPSCount = {};
+    const StdVideoH265VideoParameterSet * pStdVPSs = {};
+    uint32_t stdSPSCount = {};
+    const StdVideoH265SequenceParameterSet * pStdSPSs = {};
+    uint32_t stdPPSCount = {};
+    const StdVideoH265PictureParameterSet * pStdPPSs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoKHR>
+  {
+    using Type = VideoDecodeH265SessionParametersAddInfoKHR;
+  };
+
+  struct VideoDecodeH265SessionParametersCreateInfoKHR
+  {
+    using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoKHR(uint32_t maxStdVPSCount_ = {}, uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxStdVPSCount( maxStdVPSCount_ ), maxStdSPSCount( maxStdSPSCount_ ), maxStdPPSCount( maxStdPPSCount_ ), pParametersAddInfo( pParametersAddInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdVPSCount = maxStdVPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdSPSCount = maxStdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdPPSCount = maxStdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pParametersAddInfo = pParametersAddInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxStdVPSCount == rhs.maxStdVPSCount )
+          && ( maxStdSPSCount == rhs.maxStdSPSCount )
+          && ( maxStdPPSCount == rhs.maxStdPPSCount )
+          && ( pParametersAddInfo == rhs.pParametersAddInfo );
+#endif
+    }
+
+    bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t maxStdVPSCount = {};
+    uint32_t maxStdSPSCount = {};
+    uint32_t maxStdPPSCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR>
+  {
+    using Type = VideoDecodeH265SessionParametersCreateInfoKHR;
+  };
+
+  struct VideoDecodeInfoKHR
+  {
+    using NativeType = VkVideoDecodeInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), srcBuffer( srcBuffer_ ), srcBufferOffset( srcBufferOffset_ ), srcBufferRange( srcBufferRange_ ), dstPictureResource( dstPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), srcBuffer( srcBuffer_ ), srcBufferOffset( srcBufferOffset_ ), srcBufferRange( srcBufferRange_ ), dstPictureResource( dstPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBuffer = srcBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBufferOffset = srcBufferOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBufferRange = srcBufferRange_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstPictureResource = dstPictureResource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetupReferenceSlot = pSetupReferenceSlot_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = referenceSlotCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceSlots = pReferenceSlots_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoDecodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
+      pReferenceSlots = referenceSlots_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( srcBuffer == rhs.srcBuffer )
+          && ( srcBufferOffset == rhs.srcBufferOffset )
+          && ( srcBufferRange == rhs.srcBufferRange )
+          && ( dstPictureResource == rhs.dstPictureResource )
+          && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
+          && ( referenceSlotCount == rhs.referenceSlotCount )
+          && ( pReferenceSlots == rhs.pReferenceSlots );
+#endif
+    }
+
+    bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {};
+    VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {};
+    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
+    uint32_t referenceSlotCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
+  {
+    using Type = VideoDecodeInfoKHR;
+  };
+
+  struct VideoDecodeUsageInfoKHR
+  {
+    using NativeType = VkVideoDecodeUsageInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), videoUsageHints( videoUsageHints_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoDecodeUsageInfoKHR( *reinterpret_cast<VideoDecodeUsageInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoUsageHints = videoUsageHints_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoDecodeUsageInfoKHR*>( this );
+    }
+
+    operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoDecodeUsageInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, videoUsageHints );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( videoUsageHints == rhs.videoUsageHints );
+#endif
+    }
+
+    bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoDecodeUsageInfoKHR>
+  {
+    using Type = VideoDecodeUsageInfoKHR;
+  };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeCapabilitiesKHR
+  {
+    using NativeType = VkVideoEncodeCapabilitiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, uint8_t rateControlLayerCount_ = {}, uint8_t qualityLevelCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), rateControlModes( rateControlModes_ ), rateControlLayerCount( rateControlLayerCount_ ), qualityLevelCount( qualityLevelCount_ ), inputImageDataFillAlignment( inputImageDataFillAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeCapabilitiesKHR( *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeCapabilitiesKHR*>( this );
+    }
+
+    operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeCapabilitiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, rateControlModes, rateControlLayerCount, qualityLevelCount, inputImageDataFillAlignment );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default;
+#else
+    bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rateControlModes == rhs.rateControlModes )
+          && ( rateControlLayerCount == rhs.rateControlLayerCount )
+          && ( qualityLevelCount == rhs.qualityLevelCount )
+          && ( inputImageDataFillAlignment == rhs.inputImageDataFillAlignment );
+#endif
+    }
+
+    bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {};
+    uint8_t rateControlLayerCount = {};
+    uint8_t qualityLevelCount = {};
+    VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeCapabilitiesKHR>
+  {
+    using Type = VideoEncodeCapabilitiesKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264CapabilitiesEXT
+  {
+    using NativeType = VkVideoEncodeH264CapabilitiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ = {}, uint32_t maxBytesPerPicDenom_ = {}, uint32_t maxBitsPerMbDenom_ = {}, uint32_t log2MaxMvLengthHorizontal_ = {}, uint32_t log2MaxMvLengthVertical_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), inputModeFlags( inputModeFlags_ ), outputModeFlags( outputModeFlags_ ), maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ), maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ), maxL1ReferenceCount( maxL1ReferenceCount_ ), motionVectorsOverPicBoundariesFlag( motionVectorsOverPicBoundariesFlag_ ), maxBytesPerPicDenom( maxBytesPerPicDenom_ ), maxBitsPerMbDenom( maxBitsPerMbDenom_ ), log2MaxMvLengthHorizontal( log2MaxMvLengthHorizontal_ ), log2MaxMvLengthVertical( log2MaxMvLengthVertical_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast<VideoEncodeH264CapabilitiesEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264CapabilitiesEXT & operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264CapabilitiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT const &, uint8_t const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, inputModeFlags, outputModeFlags, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, motionVectorsOverPicBoundariesFlag, maxBytesPerPicDenom, maxBitsPerMbDenom, log2MaxMvLengthHorizontal, log2MaxMvLengthVertical );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( inputModeFlags == rhs.inputModeFlags )
+          && ( outputModeFlags == rhs.outputModeFlags )
+          && ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
+          && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
+          && ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
+          && ( motionVectorsOverPicBoundariesFlag == rhs.motionVectorsOverPicBoundariesFlag )
+          && ( maxBytesPerPicDenom == rhs.maxBytesPerPicDenom )
+          && ( maxBitsPerMbDenom == rhs.maxBitsPerMbDenom )
+          && ( log2MaxMvLengthHorizontal == rhs.log2MaxMvLengthHorizontal )
+          && ( log2MaxMvLengthVertical == rhs.log2MaxMvLengthVertical );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {};
+    uint8_t maxPPictureL0ReferenceCount = {};
+    uint8_t maxBPictureL0ReferenceCount = {};
+    uint8_t maxL1ReferenceCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag = {};
+    uint32_t maxBytesPerPicDenom = {};
+    uint32_t maxBitsPerMbDenom = {};
+    uint32_t log2MaxMvLengthHorizontal = {};
+    uint32_t log2MaxMvLengthVertical = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesEXT>
+  {
+    using Type = VideoEncodeH264CapabilitiesEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264DpbSlotInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264DpbSlotInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), slotIndex( slotIndex_ ), pStdReferenceInfo( pStdReferenceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH264DpbSlotInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264DpbSlotInfoEXT & operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      slotIndex = slotIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdReferenceInfo = pStdReferenceInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH264ReferenceInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, slotIndex, pStdReferenceInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( slotIndex == rhs.slotIndex )
+          && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
+    const void * pNext = {};
+    int8_t slotIndex = {};
+    const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoEXT>
+  {
+    using Type = VideoEncodeH264DpbSlotInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264EmitPictureParametersInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264EmitPictureParametersInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT(uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t * ppsIdEntries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), spsId( spsId_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( ppsIdEntryCount_ ), ppsIdEntries( ppsIdEntries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264EmitPictureParametersInfoEXT( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264EmitPictureParametersInfoEXT( *reinterpret_cast<VideoEncodeH264EmitPictureParametersInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264EmitPictureParametersInfoEXT( uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), spsId( spsId_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) ), ppsIdEntries( psIdEntries_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      spsId = spsId_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      emitSpsEnable = emitSpsEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntryCount = ppsIdEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntries = ppsIdEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264EmitPictureParametersInfoEXT & setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
+      ppsIdEntries = psIdEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264EmitPictureParametersInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264EmitPictureParametersInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const uint8_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, spsId, emitSpsEnable, ppsIdEntryCount, ppsIdEntries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264EmitPictureParametersInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( spsId == rhs.spsId )
+          && ( emitSpsEnable == rhs.emitSpsEnable )
+          && ( ppsIdEntryCount == rhs.ppsIdEntryCount )
+          && ( ppsIdEntries == rhs.ppsIdEntries );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT;
+    const void * pNext = {};
+    uint8_t spsId = {};
+    VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {};
+    uint32_t ppsIdEntryCount = {};
+    const uint8_t * ppsIdEntries = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT>
+  {
+    using Type = VideoEncodeH264EmitPictureParametersInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264FrameSizeEXT
+  {
+    using NativeType = VkVideoEncodeH264FrameSizeEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : frameISize( frameISize_ ), framePSize( framePSize_ ), frameBSize( frameBSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264FrameSizeEXT( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264FrameSizeEXT( *reinterpret_cast<VideoEncodeH264FrameSizeEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264FrameSizeEXT & operator=( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264FrameSizeEXT & operator=( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameISize = frameISize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framePSize = framePSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameBSize = frameBSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264FrameSizeEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264FrameSizeEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( frameISize, framePSize, frameBSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264FrameSizeEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( frameISize == rhs.frameISize )
+          && ( framePSize == rhs.framePSize )
+          && ( frameBSize == rhs.frameBSize );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t frameISize = {};
+    uint32_t framePSize = {};
+    uint32_t frameBSize = {};
+
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264ReferenceListsInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264ReferenceListsInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), referenceList0EntryCount( referenceList0EntryCount_ ), pReferenceList0Entries( pReferenceList0Entries_ ), referenceList1EntryCount( referenceList1EntryCount_ ), pReferenceList1Entries( pReferenceList1Entries_ ), pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264ReferenceListsInfoEXT( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264ReferenceListsInfoEXT( *reinterpret_cast<VideoEncodeH264ReferenceListsInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264ReferenceListsInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) ), pReferenceList0Entries( referenceList0Entries_.data() ), referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) ), pReferenceList1Entries( referenceList1Entries_.data() ), pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH264ReferenceListsInfoEXT & operator=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264ReferenceListsInfoEXT & operator=( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList0EntryCount = referenceList0EntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceList0Entries = pReferenceList0Entries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
+      pReferenceList0Entries = referenceList0Entries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList1EntryCount = referenceList1EntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceList1Entries = pReferenceList1Entries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
+      pReferenceList1Entries = referenceList1Entries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPMemMgmtCtrlOperations( const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMemMgmtCtrlOperations = pMemMgmtCtrlOperations_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264ReferenceListsInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264ReferenceListsInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &, const StdVideoEncodeH264RefMemMgmtCtrlOperations * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pMemMgmtCtrlOperations );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264ReferenceListsInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( referenceList0EntryCount == rhs.referenceList0EntryCount )
+          && ( pReferenceList0Entries == rhs.pReferenceList0Entries )
+          && ( referenceList1EntryCount == rhs.referenceList1EntryCount )
+          && ( pReferenceList1Entries == rhs.pReferenceList1Entries )
+          && ( pMemMgmtCtrlOperations == rhs.pMemMgmtCtrlOperations );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT;
+    const void * pNext = {};
+    uint8_t referenceList0EntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries = {};
+    uint8_t referenceList1EntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries = {};
+    const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264ReferenceListsInfoEXT>
+  {
+    using Type = VideoEncodeH264ReferenceListsInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264NaluSliceInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264NaluSliceInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT(uint32_t mbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), mbCount( mbCount_ ), pReferenceFinalLists( pReferenceFinalLists_ ), pSliceHeaderStd( pSliceHeaderStd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264NaluSliceInfoEXT( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264NaluSliceInfoEXT( *reinterpret_cast<VideoEncodeH264NaluSliceInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264NaluSliceInfoEXT & operator=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264NaluSliceInfoEXT & operator=( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mbCount = mbCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceFinalLists = pReferenceFinalLists_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSliceHeaderStd = pSliceHeaderStd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264NaluSliceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264NaluSliceInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264NaluSliceInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * const &, const StdVideoEncodeH264SliceHeader * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, mbCount, pReferenceFinalLists, pSliceHeaderStd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264NaluSliceInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mbCount == rhs.mbCount )
+          && ( pReferenceFinalLists == rhs.pReferenceFinalLists )
+          && ( pSliceHeaderStd == rhs.pSliceHeaderStd );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoEXT;
+    const void * pNext = {};
+    uint32_t mbCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {};
+    const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceInfoEXT>
+  {
+    using Type = VideoEncodeH264NaluSliceInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264ProfileInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264ProfileInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT(StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdProfileIdc( stdProfileIdc_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264ProfileInfoEXT( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264ProfileInfoEXT( *reinterpret_cast<VideoEncodeH264ProfileInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264ProfileInfoEXT & operator=( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264ProfileInfoEXT & operator=( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdProfileIdc = stdProfileIdc_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264ProfileInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264ProfileInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdProfileIdc );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
+    }
+
+    bool operator!=( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoEXT;
+    const void * pNext = {};
+    StdVideoH264ProfileIdc stdProfileIdc = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileInfoEXT>
+  {
+    using Type = VideoEncodeH264ProfileInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264QpEXT
+  {
+    using NativeType = VkVideoEncodeH264QpEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
+    : qpI( qpI_ ), qpP( qpP_ ), qpB( qpB_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264QpEXT( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264QpEXT( *reinterpret_cast<VideoEncodeH264QpEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264QpEXT & operator=( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264QpEXT & operator=( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpI = qpI_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpP = qpP_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpB = qpB_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264QpEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264QpEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<int32_t const &, int32_t const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( qpI, qpP, qpB );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264QpEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( qpI == rhs.qpI )
+          && ( qpP == rhs.qpP )
+          && ( qpB == rhs.qpB );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    int32_t qpI = {};
+    int32_t qpP = {};
+    int32_t qpB = {};
+
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264RateControlInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264RateControlInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown, uint8_t temporalLayerCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), gopFrameCount( gopFrameCount_ ), idrPeriod( idrPeriod_ ), consecutiveBFrameCount( consecutiveBFrameCount_ ), rateControlStructure( rateControlStructure_ ), temporalLayerCount( temporalLayerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264RateControlInfoEXT( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264RateControlInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264RateControlInfoEXT & operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264RateControlInfoEXT & operator=( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      gopFrameCount = gopFrameCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      idrPeriod = idrPeriod_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      consecutiveBFrameCount = consecutiveBFrameCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rateControlStructure = rateControlStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      temporalLayerCount = temporalLayerCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264RateControlInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264RateControlInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT const &, uint8_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264RateControlInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( gopFrameCount == rhs.gopFrameCount )
+          && ( idrPeriod == rhs.idrPeriod )
+          && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
+          && ( rateControlStructure == rhs.rateControlStructure )
+          && ( temporalLayerCount == rhs.temporalLayerCount );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT;
+    const void * pNext = {};
+    uint32_t gopFrameCount = {};
+    uint32_t idrPeriod = {};
+    uint32_t consecutiveBFrameCount = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown;
+    uint8_t temporalLayerCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlInfoEXT>
+  {
+    using Type = VideoEncodeH264RateControlInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264RateControlLayerInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT(uint8_t temporalLayerId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), temporalLayerId( temporalLayerId_ ), useInitialRcQp( useInitialRcQp_ ), initialRcQp( initialRcQp_ ), useMinQp( useMinQp_ ), minQp( minQp_ ), useMaxQp( useMaxQp_ ), maxQp( maxQp_ ), useMaxFrameSize( useMaxFrameSize_ ), maxFrameSize( maxFrameSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264RateControlLayerInfoEXT( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264RateControlLayerInfoEXT & operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264RateControlLayerInfoEXT & operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      temporalLayerId = temporalLayerId_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useInitialRcQp = useInitialRcQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialRcQp = initialRcQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMinQp = useMinQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minQp = minQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMaxQp = useMaxQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxQp = maxQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMaxFrameSize = useMaxFrameSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFrameSize = maxFrameSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, temporalLayerId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264RateControlLayerInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( temporalLayerId == rhs.temporalLayerId )
+          && ( useInitialRcQp == rhs.useInitialRcQp )
+          && ( initialRcQp == rhs.initialRcQp )
+          && ( useMinQp == rhs.useMinQp )
+          && ( minQp == rhs.minQp )
+          && ( useMaxQp == rhs.useMaxQp )
+          && ( maxQp == rhs.maxQp )
+          && ( useMaxFrameSize == rhs.useMaxFrameSize )
+          && ( maxFrameSize == rhs.maxFrameSize );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
+    const void * pNext = {};
+    uint8_t temporalLayerId = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlLayerInfoEXT>
+  {
+    using Type = VideoEncodeH264RateControlLayerInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264SessionParametersAddInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(uint32_t stdSPSCount_ = {}, const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdSPSCount( stdSPSCount_ ), pStdSPSs( pStdSPSs_ ), stdPPSCount( stdPPSCount_ ), pStdPPSs( pStdPPSs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH264SessionParametersAddInfoEXT & operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264SessionParametersAddInfoEXT & operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = stdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdSPSs = pStdSPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264SessionParametersAddInfoEXT & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+      pStdSPSs = stdSPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = stdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPPSs = pStdPPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264SessionParametersAddInfoEXT & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+      pStdPPSs = stdPPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stdSPSCount == rhs.stdSPSCount )
+          && ( pStdSPSs == rhs.pStdSPSs )
+          && ( stdPPSCount == rhs.stdPPSCount )
+          && ( pStdPPSs == rhs.pStdPPSs );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
+    const void * pNext = {};
+    uint32_t stdSPSCount = {};
+    const StdVideoH264SequenceParameterSet * pStdSPSs = {};
+    uint32_t stdPPSCount = {};
+    const StdVideoH264PictureParameterSet * pStdPPSs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoEXT>
+  {
+    using Type = VideoEncodeH264SessionParametersAddInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264SessionParametersCreateInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxStdSPSCount( maxStdSPSCount_ ), maxStdPPSCount( maxStdPPSCount_ ), pParametersAddInfo( pParametersAddInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdSPSCount = maxStdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdPPSCount = maxStdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pParametersAddInfo = pParametersAddInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxStdSPSCount == rhs.maxStdSPSCount )
+          && ( maxStdPPSCount == rhs.maxStdPPSCount )
+          && ( pParametersAddInfo == rhs.pParametersAddInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t maxStdSPSCount = {};
+    uint32_t maxStdPPSCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT>
+  {
+    using Type = VideoEncodeH264SessionParametersCreateInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH264VclFrameInfoEXT
+  {
+    using NativeType = VkVideoEncodeH264VclFrameInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, uint32_t naluSliceEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ = {}, const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceEntryCount( naluSliceEntryCount_ ), pNaluSliceEntries( pNaluSliceEntries_ ), pCurrentPictureInfo( pCurrentPictureInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH264VclFrameInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT> const & naluSliceEntries_, const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) ), pNaluSliceEntries( naluSliceEntries_.data() ), pCurrentPictureInfo( pCurrentPictureInfo_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH264VclFrameInfoEXT & operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceFinalLists = pReferenceFinalLists_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      naluSliceEntryCount = naluSliceEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNaluSliceEntries = pNaluSliceEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT> const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
+      pNaluSliceEntries = naluSliceEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo( const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCurrentPictureInfo = pCurrentPictureInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH264VclFrameInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH264VclFrameInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * const &, const StdVideoEncodeH264PictureInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pReferenceFinalLists == rhs.pReferenceFinalLists )
+          && ( naluSliceEntryCount == rhs.naluSliceEntryCount )
+          && ( pNaluSliceEntries == rhs.pNaluSliceEntries )
+          && ( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
+    const void * pNext = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {};
+    uint32_t naluSliceEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries = {};
+    const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH264VclFrameInfoEXT>
+  {
+    using Type = VideoEncodeH264VclFrameInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265CapabilitiesEXT
+  {
+    using NativeType = VkVideoEncodeH265CapabilitiesEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, uint8_t maxSubLayersCount_ = {}, uint8_t minLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t minLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t minMaxTransformHierarchyDepthInter_ = {}, uint8_t maxMaxTransformHierarchyDepthInter_ = {}, uint8_t minMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxDiffCuQpDeltaDepth_ = {}, uint8_t minMaxNumMergeCand_ = {}, uint8_t maxMaxNumMergeCand_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), inputModeFlags( inputModeFlags_ ), outputModeFlags( outputModeFlags_ ), ctbSizes( ctbSizes_ ), transformBlockSizes( transformBlockSizes_ ), maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ), maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ), maxL1ReferenceCount( maxL1ReferenceCount_ ), maxSubLayersCount( maxSubLayersCount_ ), minLog2MinLumaCodingBlockSizeMinus3( minLog2MinLumaCodingBlockSizeMinus3_ ), maxLog2MinLumaCodingBlockSizeMinus3( maxLog2MinLumaCodingBlockSizeMinus3_ ), minLog2MinLumaTransformBlockSizeMinus2( minLog2MinLumaTransformBlockSizeMinus2_ ), maxLog2MinLumaTransformBlockSizeMinus2( maxLog2MinLumaTransformBlockSizeMinus2_ ), minMaxTransformHierarchyDepthInter( minMaxTransformHierarchyDepthInter_ ), maxMaxTransformHierarchyDepthInter( maxMaxTransformHierarchyDepthInter_ ), minMaxTransformHierarchyDepthIntra( minMaxTransformHierarchyDepthIntra_ ), maxMaxTransformHierarchyDepthIntra( maxMaxTransformHierarchyDepthIntra_ ), maxDiffCuQpDeltaDepth( maxDiffCuQpDeltaDepth_ ), minMaxNumMergeCand( minMaxNumMergeCand_ ), maxMaxNumMergeCand( maxMaxNumMergeCand_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265CapabilitiesEXT( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265CapabilitiesEXT( *reinterpret_cast<VideoEncodeH265CapabilitiesEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265CapabilitiesEXT & operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265CapabilitiesEXT & operator=( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265CapabilitiesEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265CapabilitiesEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, inputModeFlags, outputModeFlags, ctbSizes, transformBlockSizes, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, maxSubLayersCount, minLog2MinLumaCodingBlockSizeMinus3, maxLog2MinLumaCodingBlockSizeMinus3, minLog2MinLumaTransformBlockSizeMinus2, maxLog2MinLumaTransformBlockSizeMinus2, minMaxTransformHierarchyDepthInter, maxMaxTransformHierarchyDepthInter, minMaxTransformHierarchyDepthIntra, maxMaxTransformHierarchyDepthIntra, maxDiffCuQpDeltaDepth, minMaxNumMergeCand, maxMaxNumMergeCand );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265CapabilitiesEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( inputModeFlags == rhs.inputModeFlags )
+          && ( outputModeFlags == rhs.outputModeFlags )
+          && ( ctbSizes == rhs.ctbSizes )
+          && ( transformBlockSizes == rhs.transformBlockSizes )
+          && ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
+          && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
+          && ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
+          && ( maxSubLayersCount == rhs.maxSubLayersCount )
+          && ( minLog2MinLumaCodingBlockSizeMinus3 == rhs.minLog2MinLumaCodingBlockSizeMinus3 )
+          && ( maxLog2MinLumaCodingBlockSizeMinus3 == rhs.maxLog2MinLumaCodingBlockSizeMinus3 )
+          && ( minLog2MinLumaTransformBlockSizeMinus2 == rhs.minLog2MinLumaTransformBlockSizeMinus2 )
+          && ( maxLog2MinLumaTransformBlockSizeMinus2 == rhs.maxLog2MinLumaTransformBlockSizeMinus2 )
+          && ( minMaxTransformHierarchyDepthInter == rhs.minMaxTransformHierarchyDepthInter )
+          && ( maxMaxTransformHierarchyDepthInter == rhs.maxMaxTransformHierarchyDepthInter )
+          && ( minMaxTransformHierarchyDepthIntra == rhs.minMaxTransformHierarchyDepthIntra )
+          && ( maxMaxTransformHierarchyDepthIntra == rhs.maxMaxTransformHierarchyDepthIntra )
+          && ( maxDiffCuQpDeltaDepth == rhs.maxDiffCuQpDeltaDepth )
+          && ( minMaxNumMergeCand == rhs.minMaxNumMergeCand )
+          && ( maxMaxNumMergeCand == rhs.maxMaxNumMergeCand );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes = {};
+    uint8_t maxPPictureL0ReferenceCount = {};
+    uint8_t maxBPictureL0ReferenceCount = {};
+    uint8_t maxL1ReferenceCount = {};
+    uint8_t maxSubLayersCount = {};
+    uint8_t minLog2MinLumaCodingBlockSizeMinus3 = {};
+    uint8_t maxLog2MinLumaCodingBlockSizeMinus3 = {};
+    uint8_t minLog2MinLumaTransformBlockSizeMinus2 = {};
+    uint8_t maxLog2MinLumaTransformBlockSizeMinus2 = {};
+    uint8_t minMaxTransformHierarchyDepthInter = {};
+    uint8_t maxMaxTransformHierarchyDepthInter = {};
+    uint8_t minMaxTransformHierarchyDepthIntra = {};
+    uint8_t maxMaxTransformHierarchyDepthIntra = {};
+    uint8_t maxDiffCuQpDeltaDepth = {};
+    uint8_t minMaxNumMergeCand = {};
+    uint8_t maxMaxNumMergeCand = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265CapabilitiesEXT>
+  {
+    using Type = VideoEncodeH265CapabilitiesEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265DpbSlotInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265DpbSlotInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), slotIndex( slotIndex_ ), pStdReferenceInfo( pStdReferenceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265DpbSlotInfoEXT( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH265DpbSlotInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265DpbSlotInfoEXT & operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265DpbSlotInfoEXT & operator=( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      slotIndex = slotIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdReferenceInfo = pStdReferenceInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265DpbSlotInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH265ReferenceInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, slotIndex, pStdReferenceInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265DpbSlotInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( slotIndex == rhs.slotIndex )
+          && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoEXT;
+    const void * pNext = {};
+    int8_t slotIndex = {};
+    const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265DpbSlotInfoEXT>
+  {
+    using Type = VideoEncodeH265DpbSlotInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265EmitPictureParametersInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265EmitPictureParametersInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT(uint8_t vpsId_ = {}, uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t * ppsIdEntries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), vpsId( vpsId_ ), spsId( spsId_ ), emitVpsEnable( emitVpsEnable_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( ppsIdEntryCount_ ), ppsIdEntries( ppsIdEntries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265EmitPictureParametersInfoEXT( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265EmitPictureParametersInfoEXT( *reinterpret_cast<VideoEncodeH265EmitPictureParametersInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265EmitPictureParametersInfoEXT( uint8_t vpsId_, uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), vpsId( vpsId_ ), spsId( spsId_ ), emitVpsEnable( emitVpsEnable_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) ), ppsIdEntries( psIdEntries_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vpsId = vpsId_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      spsId = spsId_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      emitVpsEnable = emitVpsEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      emitSpsEnable = emitSpsEnable_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntryCount = ppsIdEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntries = ppsIdEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265EmitPictureParametersInfoEXT & setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
+      ppsIdEntries = psIdEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265EmitPictureParametersInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265EmitPictureParametersInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const uint8_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, vpsId, spsId, emitVpsEnable, emitSpsEnable, ppsIdEntryCount, ppsIdEntries );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265EmitPictureParametersInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vpsId == rhs.vpsId )
+          && ( spsId == rhs.spsId )
+          && ( emitVpsEnable == rhs.emitVpsEnable )
+          && ( emitSpsEnable == rhs.emitSpsEnable )
+          && ( ppsIdEntryCount == rhs.ppsIdEntryCount )
+          && ( ppsIdEntries == rhs.ppsIdEntries );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT;
+    const void * pNext = {};
+    uint8_t vpsId = {};
+    uint8_t spsId = {};
+    VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {};
+    uint32_t ppsIdEntryCount = {};
+    const uint8_t * ppsIdEntries = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT>
+  {
+    using Type = VideoEncodeH265EmitPictureParametersInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265FrameSizeEXT
+  {
+    using NativeType = VkVideoEncodeH265FrameSizeEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : frameISize( frameISize_ ), framePSize( framePSize_ ), frameBSize( frameBSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265FrameSizeEXT( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265FrameSizeEXT( *reinterpret_cast<VideoEncodeH265FrameSizeEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265FrameSizeEXT & operator=( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265FrameSizeEXT & operator=( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameISize = frameISize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framePSize = framePSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameBSize = frameBSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265FrameSizeEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265FrameSizeEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( frameISize, framePSize, frameBSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265FrameSizeEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( frameISize == rhs.frameISize )
+          && ( framePSize == rhs.framePSize )
+          && ( frameBSize == rhs.frameBSize );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    uint32_t frameISize = {};
+    uint32_t framePSize = {};
+    uint32_t frameBSize = {};
+
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265ReferenceListsInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265ReferenceListsInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), referenceList0EntryCount( referenceList0EntryCount_ ), pReferenceList0Entries( pReferenceList0Entries_ ), referenceList1EntryCount( referenceList1EntryCount_ ), pReferenceList1Entries( pReferenceList1Entries_ ), pReferenceModifications( pReferenceModifications_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265ReferenceListsInfoEXT( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265ReferenceListsInfoEXT( *reinterpret_cast<VideoEncodeH265ReferenceListsInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265ReferenceListsInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) ), pReferenceList0Entries( referenceList0Entries_.data() ), referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) ), pReferenceList1Entries( referenceList1Entries_.data() ), pReferenceModifications( pReferenceModifications_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH265ReferenceListsInfoEXT & operator=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265ReferenceListsInfoEXT & operator=( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList0EntryCount = referenceList0EntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceList0Entries = pReferenceList0Entries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
+      pReferenceList0Entries = referenceList0Entries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList1EntryCount = referenceList1EntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceList1Entries = pReferenceList1Entries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
+      pReferenceList1Entries = referenceList1Entries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPReferenceModifications( const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceModifications = pReferenceModifications_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265ReferenceListsInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265ReferenceListsInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * const &, const StdVideoEncodeH265ReferenceModifications * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pReferenceModifications );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265ReferenceListsInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( referenceList0EntryCount == rhs.referenceList0EntryCount )
+          && ( pReferenceList0Entries == rhs.pReferenceList0Entries )
+          && ( referenceList1EntryCount == rhs.referenceList1EntryCount )
+          && ( pReferenceList1Entries == rhs.pReferenceList1Entries )
+          && ( pReferenceModifications == rhs.pReferenceModifications );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT;
+    const void * pNext = {};
+    uint8_t referenceList0EntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries = {};
+    uint8_t referenceList1EntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries = {};
+    const StdVideoEncodeH265ReferenceModifications * pReferenceModifications = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265ReferenceListsInfoEXT>
+  {
+    using Type = VideoEncodeH265ReferenceListsInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265NaluSliceSegmentInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT(uint32_t ctbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), ctbCount( ctbCount_ ), pReferenceFinalLists( pReferenceFinalLists_ ), pSliceSegmentHeaderStd( pSliceSegmentHeaderStd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265NaluSliceSegmentInfoEXT( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265NaluSliceSegmentInfoEXT( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ctbCount = ctbCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceFinalLists = pReferenceFinalLists_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setPSliceSegmentHeaderStd( const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSliceSegmentHeaderStd = pSliceSegmentHeaderStd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265NaluSliceSegmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265NaluSliceSegmentInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * const &, const StdVideoEncodeH265SliceSegmentHeader * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, ctbCount, pReferenceFinalLists, pSliceSegmentHeaderStd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ctbCount == rhs.ctbCount )
+          && ( pReferenceFinalLists == rhs.pReferenceFinalLists )
+          && ( pSliceSegmentHeaderStd == rhs.pSliceSegmentHeaderStd );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT;
+    const void * pNext = {};
+    uint32_t ctbCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {};
+    const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT>
+  {
+    using Type = VideoEncodeH265NaluSliceSegmentInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265ProfileInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265ProfileInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdProfileIdc( stdProfileIdc_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265ProfileInfoEXT( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265ProfileInfoEXT( *reinterpret_cast<VideoEncodeH265ProfileInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265ProfileInfoEXT & operator=( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265ProfileInfoEXT & operator=( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdProfileIdc = stdProfileIdc_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265ProfileInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265ProfileInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdProfileIdc );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
+    }
+
+    bool operator!=( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoEXT;
+    const void * pNext = {};
+    StdVideoH265ProfileIdc stdProfileIdc = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileInfoEXT>
+  {
+    using Type = VideoEncodeH265ProfileInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265QpEXT
+  {
+    using NativeType = VkVideoEncodeH265QpEXT;
+
+
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
+    : qpI( qpI_ ), qpP( qpP_ ), qpB( qpB_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265QpEXT( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265QpEXT( *reinterpret_cast<VideoEncodeH265QpEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265QpEXT & operator=( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265QpEXT & operator=( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpI = qpI_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpP = qpP_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qpB = qpB_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265QpEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265QpEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<int32_t const &, int32_t const &, int32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( qpI, qpP, qpB );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265QpEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( qpI == rhs.qpI )
+          && ( qpP == rhs.qpP )
+          && ( qpB == rhs.qpB );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    int32_t qpI = {};
+    int32_t qpP = {};
+    int32_t qpB = {};
+
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265RateControlInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265RateControlInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown, uint8_t subLayerCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), gopFrameCount( gopFrameCount_ ), idrPeriod( idrPeriod_ ), consecutiveBFrameCount( consecutiveBFrameCount_ ), rateControlStructure( rateControlStructure_ ), subLayerCount( subLayerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265RateControlInfoEXT( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265RateControlInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265RateControlInfoEXT & operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265RateControlInfoEXT & operator=( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      gopFrameCount = gopFrameCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      idrPeriod = idrPeriod_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      consecutiveBFrameCount = consecutiveBFrameCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rateControlStructure = rateControlStructure_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subLayerCount = subLayerCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265RateControlInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265RateControlInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT const &, uint8_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265RateControlInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( gopFrameCount == rhs.gopFrameCount )
+          && ( idrPeriod == rhs.idrPeriod )
+          && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
+          && ( rateControlStructure == rhs.rateControlStructure )
+          && ( subLayerCount == rhs.subLayerCount );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT;
+    const void * pNext = {};
+    uint32_t gopFrameCount = {};
+    uint32_t idrPeriod = {};
+    uint32_t consecutiveBFrameCount = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown;
+    uint8_t subLayerCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlInfoEXT>
+  {
+    using Type = VideoEncodeH265RateControlInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265RateControlLayerInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT(uint8_t temporalId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), temporalId( temporalId_ ), useInitialRcQp( useInitialRcQp_ ), initialRcQp( initialRcQp_ ), useMinQp( useMinQp_ ), minQp( minQp_ ), useMaxQp( useMaxQp_ ), maxQp( maxQp_ ), useMaxFrameSize( useMaxFrameSize_ ), maxFrameSize( maxFrameSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265RateControlLayerInfoEXT( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265RateControlLayerInfoEXT & operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265RateControlLayerInfoEXT & operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      temporalId = temporalId_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useInitialRcQp = useInitialRcQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialRcQp = initialRcQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMinQp = useMinQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minQp = minQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMaxQp = useMaxQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxQp = maxQp_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      useMaxFrameSize = useMaxFrameSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFrameSize = maxFrameSize_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, temporalId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265RateControlLayerInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( temporalId == rhs.temporalId )
+          && ( useInitialRcQp == rhs.useInitialRcQp )
+          && ( initialRcQp == rhs.initialRcQp )
+          && ( useMinQp == rhs.useMinQp )
+          && ( minQp == rhs.minQp )
+          && ( useMaxQp == rhs.useMaxQp )
+          && ( maxQp == rhs.maxQp )
+          && ( useMaxFrameSize == rhs.useMaxFrameSize )
+          && ( maxFrameSize == rhs.maxFrameSize );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
+    const void * pNext = {};
+    uint8_t temporalId = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlLayerInfoEXT>
+  {
+    using Type = VideoEncodeH265RateControlLayerInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265SessionParametersAddInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(uint32_t stdVPSCount_ = {}, const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, uint32_t stdSPSCount_ = {}, const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), stdVPSCount( stdVPSCount_ ), pStdVPSs( pStdVPSs_ ), stdSPSCount( stdSPSCount_ ), pStdSPSs( pStdSPSs_ ), stdPPSCount( stdPPSCount_ ), pStdPPSs( pStdPPSs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) ), pStdVPSs( stdVPSs_.data() ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH265SessionParametersAddInfoEXT & operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265SessionParametersAddInfoEXT & operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdVPSCount = stdVPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdVPSs = pStdVPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265SessionParametersAddInfoEXT & setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
+      pStdVPSs = stdVPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = stdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdSPSs = pStdSPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265SessionParametersAddInfoEXT & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+      pStdSPSs = stdSPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = stdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdPPSs = pStdPPSs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265SessionParametersAddInfoEXT & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+      pStdPPSs = stdPPSs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265VideoParameterSet * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265SessionParametersAddInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stdVPSCount == rhs.stdVPSCount )
+          && ( pStdVPSs == rhs.pStdVPSs )
+          && ( stdSPSCount == rhs.stdSPSCount )
+          && ( pStdSPSs == rhs.pStdSPSs )
+          && ( stdPPSCount == rhs.stdPPSCount )
+          && ( pStdPPSs == rhs.pStdPPSs );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
+    const void * pNext = {};
+    uint32_t stdVPSCount = {};
+    const StdVideoH265VideoParameterSet * pStdVPSs = {};
+    uint32_t stdSPSCount = {};
+    const StdVideoH265SequenceParameterSet * pStdSPSs = {};
+    uint32_t stdPPSCount = {};
+    const StdVideoH265PictureParameterSet * pStdPPSs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersAddInfoEXT>
+  {
+    using Type = VideoEncodeH265SessionParametersAddInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265SessionParametersCreateInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(uint32_t maxStdVPSCount_ = {}, uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), maxStdVPSCount( maxStdVPSCount_ ), maxStdSPSCount( maxStdSPSCount_ ), maxStdPPSCount( maxStdPPSCount_ ), pParametersAddInfo( pParametersAddInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdVPSCount = maxStdVPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdSPSCount = maxStdSPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStdPPSCount = maxStdPPSCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pParametersAddInfo = pParametersAddInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265SessionParametersCreateInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxStdVPSCount == rhs.maxStdVPSCount )
+          && ( maxStdSPSCount == rhs.maxStdSPSCount )
+          && ( maxStdPPSCount == rhs.maxStdPPSCount )
+          && ( pParametersAddInfo == rhs.pParametersAddInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
+    const void * pNext = {};
+    uint32_t maxStdVPSCount = {};
+    uint32_t maxStdSPSCount = {};
+    uint32_t maxStdPPSCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT>
+  {
+    using Type = VideoEncodeH265SessionParametersCreateInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeH265VclFrameInfoEXT
+  {
+    using NativeType = VkVideoEncodeH265VclFrameInfoEXT;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, uint32_t naluSliceSegmentEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ = {}, const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ ), pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ ), pCurrentPictureInfo( pCurrentPictureInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeH265VclFrameInfoEXT( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeH265VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH265VclFrameInfoEXT const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT> const & naluSliceSegmentEntries_, const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceSegmentEntryCount( static_cast<uint32_t>( naluSliceSegmentEntries_.size() ) ), pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ), pCurrentPictureInfo( pCurrentPictureInfo_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeH265VclFrameInfoEXT & operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeH265VclFrameInfoEXT & operator=( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceFinalLists = pReferenceFinalLists_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT> const & naluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      naluSliceSegmentEntryCount = static_cast<uint32_t>( naluSliceSegmentEntries_.size() );
+      pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCurrentPictureInfo = pCurrentPictureInfo_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeH265VclFrameInfoEXT*>( this );
+    }
+
+    operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeH265VclFrameInfoEXT*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * const &, const StdVideoEncodeH265PictureInfo * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeH265VclFrameInfoEXT const & ) const = default;
+#else
+    bool operator==( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pReferenceFinalLists == rhs.pReferenceFinalLists )
+          && ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount )
+          && ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries )
+          && ( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
+#endif
+    }
+
+    bool operator!=( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
+    const void * pNext = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {};
+    uint32_t naluSliceSegmentEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries = {};
+    const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeH265VclFrameInfoEXT>
+  {
+    using Type = VideoEncodeH265VclFrameInfoEXT;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeInfoKHR
+  {
+    using NativeType = VkVideoEncodeInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, uint32_t qualityLevel_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, uint32_t precedingExternallyEncodedBytes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), qualityLevel( qualityLevel_ ), dstBitstreamBuffer( dstBitstreamBuffer_ ), dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ), dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ), srcPictureResource( srcPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ ), precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, uint32_t qualityLevel_, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, uint32_t precedingExternallyEncodedBytes_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), qualityLevel( qualityLevel_ ), dstBitstreamBuffer( dstBitstreamBuffer_ ), dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ), dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ), srcPictureResource( srcPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() ), precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      qualityLevel = qualityLevel_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBitstreamBuffer = dstBitstreamBuffer_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBitstreamBufferOffset = dstBitstreamBufferOffset_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcPictureResource = srcPictureResource_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetupReferenceSlot = pSetupReferenceSlot_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = referenceSlotCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReferenceSlots = pReferenceSlots_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
+      pReferenceSlots = referenceSlots_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeInfoKHR*>( this );
+    }
+
+    operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, qualityLevel, dstBitstreamBuffer, dstBitstreamBufferOffset, dstBitstreamBufferMaxRange, srcPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots, precedingExternallyEncodedBytes );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( qualityLevel == rhs.qualityLevel )
+          && ( dstBitstreamBuffer == rhs.dstBitstreamBuffer )
+          && ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset )
+          && ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange )
+          && ( srcPictureResource == rhs.srcPictureResource )
+          && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
+          && ( referenceSlotCount == rhs.referenceSlotCount )
+          && ( pReferenceSlots == rhs.pReferenceSlots )
+          && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
+#endif
+    }
+
+    bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {};
+    uint32_t qualityLevel = {};
+    VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {};
+    VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {};
+    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
+    uint32_t referenceSlotCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
+    uint32_t precedingExternallyEncodedBytes = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
+  {
+    using Type = VideoEncodeInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeRateControlLayerInfoKHR
+  {
+    using NativeType = VkVideoEncodeRateControlLayerInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR(uint32_t averageBitrate_ = {}, uint32_t maxBitrate_ = {}, uint32_t frameRateNumerator_ = {}, uint32_t frameRateDenominator_ = {}, uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), averageBitrate( averageBitrate_ ), maxBitrate( maxBitrate_ ), frameRateNumerator( frameRateNumerator_ ), frameRateDenominator( frameRateDenominator_ ), virtualBufferSizeInMs( virtualBufferSizeInMs_ ), initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      averageBitrate = averageBitrate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxBitrate = maxBitrate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameRateNumerator = frameRateNumerator_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameRateDenominator = frameRateDenominator_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      virtualBufferSizeInMs = virtualBufferSizeInMs_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR*>( this );
+    }
+
+    operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator, virtualBufferSizeInMs, initialVirtualBufferSizeInMs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( averageBitrate == rhs.averageBitrate )
+          && ( maxBitrate == rhs.maxBitrate )
+          && ( frameRateNumerator == rhs.frameRateNumerator )
+          && ( frameRateDenominator == rhs.frameRateDenominator )
+          && ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs )
+          && ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
+#endif
+    }
+
+    bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
+    const void * pNext = {};
+    uint32_t averageBitrate = {};
+    uint32_t maxBitrate = {};
+    uint32_t frameRateNumerator = {};
+    uint32_t frameRateDenominator = {};
+    uint32_t virtualBufferSizeInMs = {};
+    uint32_t initialVirtualBufferSizeInMs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeRateControlLayerInfoKHR>
+  {
+    using Type = VideoEncodeRateControlLayerInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeRateControlInfoKHR
+  {
+    using NativeType = VkVideoEncodeRateControlInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, uint8_t layerCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), rateControlMode( rateControlMode_ ), layerCount( layerCount_ ), pLayerConfigs( pLayerConfigs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), flags( flags_ ), rateControlMode( rateControlMode_ ), layerCount( static_cast<uint8_t>( layerConfigs_.size() ) ), pLayerConfigs( layerConfigs_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rateControlMode = rateControlMode_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint8_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayerConfigs( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLayerConfigs = pLayerConfigs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoEncodeRateControlInfoKHR & setLayerConfigs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = static_cast<uint8_t>( layerConfigs_.size() );
+      pLayerConfigs = layerConfigs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR*>( this );
+    }
+
+    operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayerConfigs );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rateControlMode == rhs.rateControlMode )
+          && ( layerCount == rhs.layerCount )
+          && ( pLayerConfigs == rhs.pLayerConfigs );
+#endif
+    }
+
+    bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone;
+    uint8_t layerCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
+  {
+    using Type = VideoEncodeRateControlInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  struct VideoEncodeUsageInfoKHR
+  {
+    using NativeType = VkVideoEncodeUsageInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), videoUsageHints( videoUsageHints_ ), videoContentHints( videoContentHints_ ), tuningMode( tuningMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEncodeUsageInfoKHR( *reinterpret_cast<VideoEncodeUsageInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoUsageHints = videoUsageHints_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoContentHints = videoContentHints_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tuningMode = tuningMode_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEncodeUsageInfoKHR*>( this );
+    }
+
+    operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEncodeUsageInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( videoUsageHints == rhs.videoUsageHints )
+          && ( videoContentHints == rhs.videoContentHints )
+          && ( tuningMode == rhs.tuningMode );
+#endif
+    }
+
+    bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints = {};
+    VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault;
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEncodeUsageInfoKHR>
+  {
+    using Type = VideoEncodeUsageInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct VideoEndCodingInfoKHR
+  {
+    using NativeType = VkVideoEndCodingInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoEndCodingInfoKHR*>( this );
+    }
+
+    operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoEndCodingInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoEndCodingInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+#endif
+    }
+
+    bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
+  {
+    using Type = VideoEndCodingInfoKHR;
+  };
+
+  struct VideoFormatPropertiesKHR
+  {
+    using NativeType = VkVideoFormatPropertiesKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), format( format_ ), componentMapping( componentMapping_ ), imageCreateFlags( imageCreateFlags_ ), imageType( imageType_ ), imageTiling( imageTiling_ ), imageUsageFlags( imageUsageFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
+    {}
+
+
+    VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoFormatPropertiesKHR*>( this );
+    }
+
+    operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoFormatPropertiesKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoFormatPropertiesKHR const & ) const = default;
+#else
+    bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( componentMapping == rhs.componentMapping )
+          && ( imageCreateFlags == rhs.imageCreateFlags )
+          && ( imageType == rhs.imageType )
+          && ( imageTiling == rhs.imageTiling )
+          && ( imageUsageFlags == rhs.imageUsageFlags );
+#endif
+    }
+
+    bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR;
+    void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags = {};
+    VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::ImageTiling imageTiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
+  {
+    using Type = VideoFormatPropertiesKHR;
+  };
+
+  struct VideoProfileInfoKHR
+  {
+    using NativeType = VkVideoProfileInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), videoCodecOperation( videoCodecOperation_ ), chromaSubsampling( chromaSubsampling_ ), lumaBitDepth( lumaBitDepth_ ), chromaBitDepth( chromaBitDepth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoProfileInfoKHR( *reinterpret_cast<VideoProfileInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoCodecOperation = videoCodecOperation_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      chromaSubsampling = chromaSubsampling_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lumaBitDepth = lumaBitDepth_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      chromaBitDepth = chromaBitDepth_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoProfileInfoKHR*>( this );
+    }
+
+    operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoProfileInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoProfileInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( videoCodecOperation == rhs.videoCodecOperation )
+          && ( chromaSubsampling == rhs.chromaSubsampling )
+          && ( lumaBitDepth == rhs.lumaBitDepth )
+          && ( chromaBitDepth == rhs.chromaBitDepth );
+#endif
+    }
+
+    bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone;
+    VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
+    VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {};
+    VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoProfileInfoKHR>
+  {
+    using Type = VideoProfileInfoKHR;
+  };
+
+  struct VideoProfileListInfoKHR
+  {
+    using NativeType = VkVideoProfileListInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR(uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), profileCount( profileCount_ ), pProfiles( pProfiles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoProfileListInfoKHR( *reinterpret_cast<VideoProfileListInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), profileCount( static_cast<uint32_t>( profiles_.size() ) ), pProfiles( profiles_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      profileCount = profileCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pProfiles = pProfiles_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    VideoProfileListInfoKHR & setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      profileCount = static_cast<uint32_t>( profiles_.size() );
+      pProfiles = profiles_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoProfileListInfoKHR*>( this );
+    }
+
+    operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoProfileListInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, profileCount, pProfiles );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoProfileListInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( profileCount == rhs.profileCount )
+          && ( pProfiles == rhs.pProfiles );
+#endif
+    }
+
+    bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR;
+    const void * pNext = {};
+    uint32_t profileCount = {};
+    const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoProfileListInfoKHR>
+  {
+    using Type = VideoProfileListInfoKHR;
+  };
+
+  struct VideoSessionCreateInfoKHR
+  {
+    using NativeType = VkVideoSessionCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t maxDpbSlots_ = {}, uint32_t maxActiveReferencePictures_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), queueFamilyIndex( queueFamilyIndex_ ), flags( flags_ ), pVideoProfile( pVideoProfile_ ), pictureFormat( pictureFormat_ ), maxCodedExtent( maxCodedExtent_ ), referencePictureFormat( referencePictureFormat_ ), maxDpbSlots( maxDpbSlots_ ), maxActiveReferencePictures( maxActiveReferencePictures_ ), pStdHeaderVersion( pStdHeaderVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVideoProfile = pVideoProfile_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pictureFormat = pictureFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxCodedExtent = maxCodedExtent_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      referencePictureFormat = referencePictureFormat_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDpbSlots = maxDpbSlots_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxActiveReferencePictures = maxActiveReferencePictures_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStdHeaderVersion = pStdHeaderVersion_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoSessionCreateInfoKHR*>( this );
+    }
+
+    operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoSessionCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, queueFamilyIndex, flags, pVideoProfile, pictureFormat, maxCodedExtent, referencePictureFormat, maxDpbSlots, maxActiveReferencePictures, pStdHeaderVersion );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( flags == rhs.flags )
+          && ( pVideoProfile == rhs.pVideoProfile )
+          && ( pictureFormat == rhs.pictureFormat )
+          && ( maxCodedExtent == rhs.maxCodedExtent )
+          && ( referencePictureFormat == rhs.referencePictureFormat )
+          && ( maxDpbSlots == rhs.maxDpbSlots )
+          && ( maxActiveReferencePictures == rhs.maxActiveReferencePictures )
+          && ( pStdHeaderVersion == rhs.pStdHeaderVersion );
+#endif
+    }
+
+    bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR;
+    const void * pNext = {};
+    uint32_t queueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {};
+    const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {};
+    VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
+    VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint32_t maxDpbSlots = {};
+    uint32_t maxActiveReferencePictures = {};
+    const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
+  {
+    using Type = VideoSessionCreateInfoKHR;
+  };
+
+  struct VideoSessionMemoryRequirementsKHR
+  {
+    using NativeType = VkVideoSessionMemoryRequirementsKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), memoryBindIndex( memoryBindIndex_ ), memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoSessionMemoryRequirementsKHR( *reinterpret_cast<VideoSessionMemoryRequirementsKHR const *>( &rhs ) )
+    {}
+
+
+    VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const *>( &rhs );
+      return *this;
+    }
+
+
+    operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR*>( this );
+    }
+
+    operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoSessionMemoryRequirementsKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, memoryBindIndex, memoryRequirements );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryBindIndex == rhs.memoryBindIndex )
+          && ( memoryRequirements == rhs.memoryRequirements );
+#endif
+    }
+
+    bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR;
+    void * pNext = {};
+    uint32_t memoryBindIndex = {};
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoSessionMemoryRequirementsKHR>
+  {
+    using Type = VideoSessionMemoryRequirementsKHR;
+  };
+
+  struct VideoSessionParametersCreateInfoKHR
+  {
+    using NativeType = VkVideoSessionParametersCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), videoSessionParametersTemplate( videoSessionParametersTemplate_ ), videoSession( videoSession_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoSessionParametersTemplate = videoSessionParametersTemplate_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
+    {
+      videoSession = videoSession_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR*>( this );
+    }
+
+    operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate )
+          && ( videoSession == rhs.videoSession );
+#endif
+    }
+
+    bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
+    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
+  {
+    using Type = VideoSessionParametersCreateInfoKHR;
+  };
+
+  struct VideoSessionParametersUpdateInfoKHR
+  {
+    using NativeType = VkVideoSessionParametersUpdateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR(uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), updateSequenceCount( updateSequenceCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      updateSequenceCount = updateSequenceCount_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR*>( this );
+    }
+
+    operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, updateSequenceCount );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default;
+#else
+    bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( updateSequenceCount == rhs.updateSequenceCount );
+#endif
+    }
+
+    bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR;
+    const void * pNext = {};
+    uint32_t updateSequenceCount = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
+  {
+    using Type = VideoSessionParametersUpdateInfoKHR;
+  };
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  struct WaylandSurfaceCreateInfoKHR
+  {
+    using NativeType = VkWaylandSurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display * display_ = {}, struct wl_surface * surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), display( display_ ), surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT
+    {
+      display = display_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR const &, struct wl_display * const &, struct wl_surface * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, display, surface );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( display == rhs.display )
+          && ( surface == rhs.surface );
+#endif
+    }
+
+    bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
+    struct wl_display * display = {};
+    struct wl_surface * surface = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
+  {
+    using Type = WaylandSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct Win32KeyedMutexAcquireReleaseInfoKHR
+  {
+    using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
+      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
+#else
+      if ( acquireSyncs_.size() != acquireKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
+      }
+      if ( acquireSyncs_.size() != acquireTimeouts_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
+      }
+      if ( acquireKeys_.size() != acquireTimeouts_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+      if ( releaseSyncs_.size() != releaseKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+      pAcquireSyncs = acquireSyncs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+      pAcquireKeys = acquireKeys_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeouts = pAcquireTimeouts_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
+      pAcquireTimeouts = acquireTimeouts_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+      pReleaseSyncs = releaseSyncs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+      pReleaseKeys = releaseKeys_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default;
+#else
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+#endif
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+    const void * pNext = {};
+    uint32_t acquireCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
+    const uint64_t * pAcquireKeys = {};
+    const uint32_t * pAcquireTimeouts = {};
+    uint32_t releaseCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
+    const uint64_t * pReleaseKeys = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
+  {
+    using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct Win32KeyedMutexAcquireReleaseInfoNV
+  {
+    using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
+      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
+#else
+      if ( acquireSyncs_.size() != acquireKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
+      }
+      if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
+      }
+      if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+      if ( releaseSyncs_.size() != releaseKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+      pAcquireSyncs = acquireSyncs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+      pAcquireKeys = acquireKeys_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
+      pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+      pReleaseSyncs = releaseSyncs_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+      pReleaseKeys = releaseKeys_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default;
+#else
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+#endif
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+    const void * pNext = {};
+    uint32_t acquireCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
+    const uint64_t * pAcquireKeys = {};
+    const uint32_t * pAcquireTimeoutMilliseconds = {};
+    uint32_t releaseCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
+    const uint64_t * pReleaseKeys = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
+  {
+    using Type = Win32KeyedMutexAcquireReleaseInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  struct Win32SurfaceCreateInfoKHR
+  {
+    using NativeType = VkWin32SurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hinstance = hinstance_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hwnd = hwnd_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR const &, HINSTANCE const &, HWND const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, hinstance, hwnd );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default;
+#else
+    bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( hinstance == rhs.hinstance )
+          && ( hwnd == rhs.hwnd );
+#endif
+    }
+
+    bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
+    HINSTANCE hinstance = {};
+    HWND hwnd = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
+  {
+    using Type = Win32SurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct WriteDescriptorSet
+  {
+    using NativeType = VkWriteDescriptorSet;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {}, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1);
+#else
+      if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageInfo = pImageInfo_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
+      pImageInfo = imageInfo_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferInfo = pBufferInfo_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
+      pBufferInfo = bufferInfo_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTexelBufferView = pTexelBufferView_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
+      pTexelBufferView = texelBufferView_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+    }
+
+    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * const &, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * const &, const VULKAN_HPP_NAMESPACE::BufferView * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( WriteDescriptorSet const & ) const = default;
+#else
+    bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( pImageInfo == rhs.pImageInfo )
+          && ( pBufferInfo == rhs.pBufferInfo )
+          && ( pTexelBufferView == rhs.pTexelBufferView );
+#endif
+    }
+
+    bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
+    const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
+  {
+    using Type = WriteDescriptorSet;
+  };
+
+  struct WriteDescriptorSetAccelerationStructureKHR
+  {
+    using NativeType = VkWriteDescriptorSetAccelerationStructureKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = accelerationStructureCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAccelerationStructures = pAccelerationStructures_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
+      pAccelerationStructures = accelerationStructures_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
+    }
+
+    operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default;
+#else
+    bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructureCount == rhs.accelerationStructureCount )
+          && ( pAccelerationStructures == rhs.pAccelerationStructures );
+#endif
+    }
+
+    bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+    const void * pNext = {};
+    uint32_t accelerationStructureCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
+  {
+    using Type = WriteDescriptorSetAccelerationStructureKHR;
+  };
+
+  struct WriteDescriptorSetAccelerationStructureNV
+  {
+    using NativeType = VkWriteDescriptorSetAccelerationStructureNV;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSetAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = accelerationStructureCount_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAccelerationStructures = pAccelerationStructures_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
+      pAccelerationStructures = accelerationStructures_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
+    }
+
+    operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default;
+#else
+    bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructureCount == rhs.accelerationStructureCount )
+          && ( pAccelerationStructures == rhs.pAccelerationStructures );
+#endif
+    }
+
+    bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+    const void * pNext = {};
+    uint32_t accelerationStructureCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
+  {
+    using Type = WriteDescriptorSetAccelerationStructureNV;
+  };
+
+  struct WriteDescriptorSetInlineUniformBlock
+  {
+    using NativeType = VkWriteDescriptorSetInlineUniformBlock;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock(uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
+      : WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
+    {}
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
+    : pNext( pNext_ ), dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
+    {}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+    template <typename T>
+    WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
+      pData = data_.data();
+      return *this;
+    }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock*>( this );
+    }
+
+    operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, dataSize, pData );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default;
+#else
+    bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined( VULKAN_HPP_USE_REFLECT )
+      return this->reflect() == rhs.reflect();
+#else
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+#endif
+    }
+
+    bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock;
+    const void * pNext = {};
+    uint32_t dataSize = {};
+    const void * pData = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
+  {
+    using Type = WriteDescriptorSetInlineUniformBlock;
+  };
+  using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  struct XcbSurfaceCreateInfoKHR
+  {
+    using NativeType = VkXcbSurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t * connection_ = {}, xcb_window_t window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), connection( connection_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      connection = connection_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR const &, xcb_connection_t * const &, xcb_window_t const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, connection, window );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( connection == rhs.connection )
+          && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
+    }
+
+    bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
+    xcb_connection_t * connection = {};
+    xcb_window_t window = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
+  {
+    using Type = XcbSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  struct XlibSurfaceCreateInfoKHR
+  {
+    using NativeType = VkXlibSurfaceCreateInfoKHR;
+
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display * dpy_ = {}, Window window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
+    : pNext( pNext_ ), flags( flags_ ), dpy( dpy_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+      : XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
+    {}
+
+
+    XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+    XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dpy = dpy_;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+
+    operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+#if 14 <= VULKAN_HPP_CPP_VERSION
+    auto
+#else
+    std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR const &, Display * const &, Window const &>
+#endif
+      reflect() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( sType, pNext, flags, dpy, window );
+    }
+#endif
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
+      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
+      if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
+      if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) return cmp;
+      if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 )
+        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+      return std::strong_ordering::equivalent;
+    }
+#endif
+
+    bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dpy == rhs.dpy )
+          && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
+    }
+
+    bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+    public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
+    const void * pNext = {};
+    VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
+    Display * dpy = {};
+    Window window = {};
+
+  };
+
+  template <>
+  struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
+  {
+    using Type = XlibSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_to_string.hpp b/host/libs/graphics_detector/include/vulkan/vulkan_to_string.hpp
new file mode 100644
index 0000000..de4350e
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_to_string.hpp
@@ -0,0 +1,8110 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_TO_STRING_HPP
+#  define VULKAN_TO_STRING_HPP
+
+#include <vulkan/vulkan_enums.hpp>
+
+#if __cpp_lib_format
+#  include <format>   // std::format
+#else
+#  include <sstream>  // std::stringstream
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //==========================
+  //=== BITMASKs to_string ===
+  //==========================
+
+
+  //=== VK_VERSION_1_0 ===
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | ";
+    if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | ";
+    if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | ";
+    if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | ";
+    if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | ";
+    if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+    if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
+    if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | ";
+    if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | ";
+    if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) result += "SampledImageFilterCubicEXT | ";
+    if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | ";
+    if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | ";
+    if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | ";
+    if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | ";
+    if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | ";
+    if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | ";
+    if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | ";
+    if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | ";
+    if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | ";
+    if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    if ( value & ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) result += "DescriptorBufferCaptureReplayEXT | ";
+    if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) result += "MultisampledRenderToSingleSampledEXT | ";
+    if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT ) result += "2DViewCompatibleEXT | ";
+    if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) result += "FragmentDensityMapOffsetQCOM | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | ";
+    if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | ";
+    if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | ";
+    if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | ";
+    if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) result += "VideoDecodeDstKHR | ";
+    if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) result += "VideoDecodeSrcKHR | ";
+    if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | ";
+    if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | ";
+    if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) result += "VideoEncodeSrcKHR | ";
+    if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) result += "AttachmentFeedbackLoopEXT | ";
+    if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) result += "InvocationMaskHUAWEI | ";
+    if ( value & ImageUsageFlagBits::eSampleWeightQCOM ) result += "SampleWeightQCOM | ";
+    if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) result += "SampleBlockMatchQCOM | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & InstanceCreateFlagBits::eEnumeratePortabilityKHR ) result += "EnumeratePortabilityKHR | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | ";
+    if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | ";
+    if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | ";
+    if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | ";
+    if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | ";
+    if ( value & MemoryPropertyFlagBits::eRdmaCapableNV ) result += "RdmaCapableNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & QueueFlagBits::eGraphics ) result += "Graphics | ";
+    if ( value & QueueFlagBits::eCompute ) result += "Compute | ";
+    if ( value & QueueFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & QueueFlagBits::eProtected ) result += "Protected | ";
+    if ( value & QueueFlagBits::eVideoDecodeKHR ) result += "VideoDecodeKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & QueueFlagBits::eVideoEncodeKHR ) result += "VideoEncodeKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & QueueFlagBits::eOpticalFlowNV ) result += "OpticalFlowNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SampleCountFlagBits::e1 ) result += "1 | ";
+    if ( value & SampleCountFlagBits::e2 ) result += "2 | ";
+    if ( value & SampleCountFlagBits::e4 ) result += "4 | ";
+    if ( value & SampleCountFlagBits::e8 ) result += "8 | ";
+    if ( value & SampleCountFlagBits::e16 ) result += "16 | ";
+    if ( value & SampleCountFlagBits::e32 ) result += "32 | ";
+    if ( value & SampleCountFlagBits::e64 ) result += "64 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | ";
+    if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | ";
+    if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | ";
+    if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | ";
+    if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | ";
+    if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | ";
+    if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | ";
+    if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | ";
+    if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | ";
+    if ( value & PipelineStageFlagBits::eHost ) result += "Host | ";
+    if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | ";
+    if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | ";
+    if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
+    if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | ";
+    if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | ";
+    if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
+    if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+    if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | ";
+    if ( value & PipelineStageFlagBits::eTaskShaderEXT ) result += "TaskShaderEXT | ";
+    if ( value & PipelineStageFlagBits::eMeshShaderEXT ) result += "MeshShaderEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageAspectFlagBits::eColor ) result += "Color | ";
+    if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | ";
+    if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | ";
+    if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | ";
+    if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | ";
+    if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | ";
+    if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | ";
+    if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | ";
+    if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & EventCreateFlagBits::eDeviceOnly ) result += "DeviceOnly | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | ";
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT ) result += "TaskShaderInvocationsEXT | ";
+    if ( value & QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT ) result += "MeshShaderInvocationsEXT | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI ) result += "ClusterCullingShaderInvocationsHUAWEI | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & QueryResultFlagBits::e64 ) result += "64 | ";
+    if ( value & QueryResultFlagBits::eWait ) result += "Wait | ";
+    if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | ";
+    if ( value & QueryResultFlagBits::ePartial ) result += "Partial | ";
+    if ( value & QueryResultFlagBits::eWithStatusKHR ) result += "WithStatusKHR | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+    if ( value & BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) result += "DescriptorBufferCaptureReplayEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | ";
+    if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | ";
+    if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | ";
+    if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) result += "VideoDecodeSrcKHR | ";
+    if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) result += "VideoDecodeDstKHR | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) result += "AccelerationStructureBuildInputReadOnlyKHR | ";
+    if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) result += "AccelerationStructureStorageKHR | ";
+    if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) result += "ShaderBindingTableKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | ";
+    if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) result += "VideoEncodeSrcKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & BufferUsageFlagBits::eSamplerDescriptorBufferEXT ) result += "SamplerDescriptorBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eResourceDescriptorBufferEXT ) result += "ResourceDescriptorBufferEXT | ";
+    if ( value & BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT ) result += "PushDescriptorsDescriptorBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) result += "MicromapBuildInputReadOnlyEXT | ";
+    if ( value & BufferUsageFlagBits::eMicromapStorageEXT ) result += "MicromapStorageEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | ";
+    if ( value & ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) result += "DescriptorBufferCaptureReplayEXT | ";
+    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized ) result += "ExternallySynchronized | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ColorComponentFlagBits::eR ) result += "R | ";
+    if ( value & ColorComponentFlagBits::eG ) result += "G | ";
+    if ( value & ColorComponentFlagBits::eB ) result += "B | ";
+    if ( value & ColorComponentFlagBits::eA ) result += "A | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CullModeFlagBits::eFront ) result += "Front | ";
+    if ( value & CullModeFlagBits::eBack ) result += "Back | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT ) result += "RasterizationOrderAttachmentAccessEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | ";
+    if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | ";
+    if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | ";
+    if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | ";
+    if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | ";
+    if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) result += "FailOnPipelineCompileRequired | ";
+    if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | ";
+    if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) result += "RenderingFragmentShadingRateAttachmentKHR | ";
+    if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) result += "RenderingFragmentDensityMapAttachmentEXT | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) result += "RayTracingShaderGroupHandleCaptureReplayKHR | ";
+    if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | ";
+    if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | ";
+    if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | ";
+    if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | ";
+    if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | ";
+    if ( value & PipelineCreateFlagBits::eDescriptorBufferEXT ) result += "DescriptorBufferEXT | ";
+    if ( value & PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) result += "RetainLinkTimeOptimizationInfoEXT | ";
+    if ( value & PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) result += "LinkTimeOptimizationEXT | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV ) result += "RayTracingAllowMotionNV | ";
+    if ( value & PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) result += "ColorAttachmentFeedbackLoopEXT | ";
+    if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) result += "DepthStencilAttachmentFeedbackLoopEXT | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) result += "RayTracingOpacityMicromapEXT | ";
+    if ( value & PipelineCreateFlagBits::eNoProtectedAccessEXT ) result += "NoProtectedAccessEXT | ";
+    if ( value & PipelineCreateFlagBits::eProtectedAccessOnlyEXT ) result += "ProtectedAccessOnlyEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) result += "RasterizationOrderAttachmentDepthAccessEXT | ";
+    if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) result += "RasterizationOrderAttachmentStencilAccessEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineLayoutCreateFlagBits::eIndependentSetsEXT ) result += "IndependentSetsEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) result += "AllowVaryingSubgroupSize | ";
+    if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) result += "RequireFullSubgroups | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | ";
+    if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | ";
+    if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | ";
+    if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | ";
+    if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | ";
+    if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | ";
+    if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | ";
+    if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | ";
+    if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | ";
+    if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | ";
+    if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | ";
+    if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | ";
+    if ( value & ShaderStageFlagBits::eTaskEXT ) result += "TaskEXT | ";
+    if ( value & ShaderStageFlagBits::eMeshEXT ) result += "MeshEXT | ";
+    if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI ) result += "SubpassShadingHUAWEI | ";
+    if ( value & ShaderStageFlagBits::eClusterCullingHUAWEI ) result += "ClusterCullingHUAWEI | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | ";
+    if ( value & SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) result += "DescriptorBufferCaptureReplayEXT | ";
+    if ( value & SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) result += "NonSeamlessCubeMapEXT | ";
+    if ( value & SamplerCreateFlagBits::eImageProcessingQCOM ) result += "ImageProcessingQCOM | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | ";
+    if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+    if ( value & DescriptorPoolCreateFlagBits::eHostOnlyEXT ) result += "HostOnlyEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT ) result += "DescriptorBufferEXT | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) result += "EmbeddedImmutableSamplersEXT | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) result += "HostOnlyPoolEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | ";
+    if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | ";
+    if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | ";
+    if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | ";
+    if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | ";
+    if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | ";
+    if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | ";
+    if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | ";
+    if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | ";
+    if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | ";
+    if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | ";
+    if ( value & AccessFlagBits::eHostRead ) result += "HostRead | ";
+    if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | ";
+    if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | ";
+    if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | ";
+    if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
+    if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
+    if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
+    if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | ";
+    if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | ";
+    if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
+    if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) result += "FragmentShadingRateAttachmentReadKHR | ";
+    if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | ";
+    if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | ";
+    if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | ";
+    if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | ";
+    if ( value & DependencyFlagBits::eFeedbackLoopEXT ) result += "FeedbackLoopEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | ";
+    if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | ";
+    if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | ";
+    if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | ";
+    if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT ) result += "RasterizationOrderAttachmentColorAccessEXT | ";
+    if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) result += "RasterizationOrderAttachmentDepthAccessEXT | ";
+    if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) result += "RasterizationOrderAttachmentStencilAccessEXT | ";
+    if ( value & SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | ";
+    if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | ";
+    if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | ";
+    if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | ";
+    if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & StencilFaceFlagBits::eFront ) result += "Front | ";
+    if ( value & StencilFaceFlagBits::eBack ) result += "Back | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_VERSION_1_1 ===
+
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | ";
+    if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | ";
+    if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | ";
+    if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | ";
+    if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | ";
+    if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | ";
+    if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | ";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | ";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | ";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) result += "ZirconVmoFUCHSIA | ";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) result += "RdmaAddressNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) result += "ZirconEventFUCHSIA | ";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_VERSION_1_2 ===
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+    if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
+    if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | ";
+    if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | ";
+    if ( value & ResolveModeFlagBits::eAverage ) result += "Average | ";
+    if ( value & ResolveModeFlagBits::eMin ) result += "Min | ";
+    if ( value & ResolveModeFlagBits::eMax ) result += "Max | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_VERSION_1_3 ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineCreationFeedbackFlagBits::eValid ) result += "Valid | ";
+    if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | ";
+    if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ToolPurposeFlagBits::eValidation ) result += "Validation | ";
+    if ( value & ToolPurposeFlagBits::eProfiling ) result += "Profiling | ";
+    if ( value & ToolPurposeFlagBits::eTracing ) result += "Tracing | ";
+    if ( value & ToolPurposeFlagBits::eAdditionalFeatures ) result += "AdditionalFeatures | ";
+    if ( value & ToolPurposeFlagBits::eModifyingFeatures ) result += "ModifyingFeatures | ";
+    if ( value & ToolPurposeFlagBits::eDebugReportingEXT ) result += "DebugReportingEXT | ";
+    if ( value & ToolPurposeFlagBits::eDebugMarkersEXT ) result += "DebugMarkersEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlags )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PipelineStageFlagBits2::eTopOfPipe ) result += "TopOfPipe | ";
+    if ( value & PipelineStageFlagBits2::eDrawIndirect ) result += "DrawIndirect | ";
+    if ( value & PipelineStageFlagBits2::eVertexInput ) result += "VertexInput | ";
+    if ( value & PipelineStageFlagBits2::eVertexShader ) result += "VertexShader | ";
+    if ( value & PipelineStageFlagBits2::eTessellationControlShader ) result += "TessellationControlShader | ";
+    if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | ";
+    if ( value & PipelineStageFlagBits2::eGeometryShader ) result += "GeometryShader | ";
+    if ( value & PipelineStageFlagBits2::eFragmentShader ) result += "FragmentShader | ";
+    if ( value & PipelineStageFlagBits2::eEarlyFragmentTests ) result += "EarlyFragmentTests | ";
+    if ( value & PipelineStageFlagBits2::eLateFragmentTests ) result += "LateFragmentTests | ";
+    if ( value & PipelineStageFlagBits2::eColorAttachmentOutput ) result += "ColorAttachmentOutput | ";
+    if ( value & PipelineStageFlagBits2::eComputeShader ) result += "ComputeShader | ";
+    if ( value & PipelineStageFlagBits2::eAllTransfer ) result += "AllTransfer | ";
+    if ( value & PipelineStageFlagBits2::eBottomOfPipe ) result += "BottomOfPipe | ";
+    if ( value & PipelineStageFlagBits2::eHost ) result += "Host | ";
+    if ( value & PipelineStageFlagBits2::eAllGraphics ) result += "AllGraphics | ";
+    if ( value & PipelineStageFlagBits2::eAllCommands ) result += "AllCommands | ";
+    if ( value & PipelineStageFlagBits2::eCopy ) result += "Copy | ";
+    if ( value & PipelineStageFlagBits2::eResolve ) result += "Resolve | ";
+    if ( value & PipelineStageFlagBits2::eBlit ) result += "Blit | ";
+    if ( value & PipelineStageFlagBits2::eClear ) result += "Clear | ";
+    if ( value & PipelineStageFlagBits2::eIndexInput ) result += "IndexInput | ";
+    if ( value & PipelineStageFlagBits2::eVertexAttributeInput ) result += "VertexAttributeInput | ";
+    if ( value & PipelineStageFlagBits2::ePreRasterizationShaders ) result += "PreRasterizationShaders | ";
+    if ( value & PipelineStageFlagBits2::eVideoDecodeKHR ) result += "VideoDecodeKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & PipelineStageFlagBits2::eVideoEncodeKHR ) result += "VideoEncodeKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
+    if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & PipelineStageFlagBits2::eCommandPreprocessNV ) result += "CommandPreprocessNV | ";
+    if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+    if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | ";
+    if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | ";
+    if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
+    if ( value & PipelineStageFlagBits2::eTaskShaderEXT ) result += "TaskShaderEXT | ";
+    if ( value & PipelineStageFlagBits2::eMeshShaderEXT ) result += "MeshShaderEXT | ";
+    if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI ) result += "SubpassShadingHUAWEI | ";
+    if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) result += "InvocationMaskHUAWEI | ";
+    if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) result += "AccelerationStructureCopyKHR | ";
+    if ( value & PipelineStageFlagBits2::eMicromapBuildEXT ) result += "MicromapBuildEXT | ";
+    if ( value & PipelineStageFlagBits2::eClusterCullingShaderHUAWEI ) result += "ClusterCullingShaderHUAWEI | ";
+    if ( value & PipelineStageFlagBits2::eOpticalFlowNV ) result += "OpticalFlowNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & AccessFlagBits2::eIndirectCommandRead ) result += "IndirectCommandRead | ";
+    if ( value & AccessFlagBits2::eIndexRead ) result += "IndexRead | ";
+    if ( value & AccessFlagBits2::eVertexAttributeRead ) result += "VertexAttributeRead | ";
+    if ( value & AccessFlagBits2::eUniformRead ) result += "UniformRead | ";
+    if ( value & AccessFlagBits2::eInputAttachmentRead ) result += "InputAttachmentRead | ";
+    if ( value & AccessFlagBits2::eShaderRead ) result += "ShaderRead | ";
+    if ( value & AccessFlagBits2::eShaderWrite ) result += "ShaderWrite | ";
+    if ( value & AccessFlagBits2::eColorAttachmentRead ) result += "ColorAttachmentRead | ";
+    if ( value & AccessFlagBits2::eColorAttachmentWrite ) result += "ColorAttachmentWrite | ";
+    if ( value & AccessFlagBits2::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | ";
+    if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | ";
+    if ( value & AccessFlagBits2::eTransferRead ) result += "TransferRead | ";
+    if ( value & AccessFlagBits2::eTransferWrite ) result += "TransferWrite | ";
+    if ( value & AccessFlagBits2::eHostRead ) result += "HostRead | ";
+    if ( value & AccessFlagBits2::eHostWrite ) result += "HostWrite | ";
+    if ( value & AccessFlagBits2::eMemoryRead ) result += "MemoryRead | ";
+    if ( value & AccessFlagBits2::eMemoryWrite ) result += "MemoryWrite | ";
+    if ( value & AccessFlagBits2::eShaderSampledRead ) result += "ShaderSampledRead | ";
+    if ( value & AccessFlagBits2::eShaderStorageRead ) result += "ShaderStorageRead | ";
+    if ( value & AccessFlagBits2::eShaderStorageWrite ) result += "ShaderStorageWrite | ";
+    if ( value & AccessFlagBits2::eVideoDecodeReadKHR ) result += "VideoDecodeReadKHR | ";
+    if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) result += "VideoDecodeWriteKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & AccessFlagBits2::eVideoEncodeReadKHR ) result += "VideoEncodeReadKHR | ";
+    if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) result += "VideoEncodeWriteKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | ";
+    if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
+    if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
+    if ( value & AccessFlagBits2::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
+    if ( value & AccessFlagBits2::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | ";
+    if ( value & AccessFlagBits2::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | ";
+    if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) result += "FragmentShadingRateAttachmentReadKHR | ";
+    if ( value & AccessFlagBits2::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | ";
+    if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | ";
+    if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
+    if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
+    if ( value & AccessFlagBits2::eDescriptorBufferReadEXT ) result += "DescriptorBufferReadEXT | ";
+    if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI ) result += "InvocationMaskReadHUAWEI | ";
+    if ( value & AccessFlagBits2::eShaderBindingTableReadKHR ) result += "ShaderBindingTableReadKHR | ";
+    if ( value & AccessFlagBits2::eMicromapReadEXT ) result += "MicromapReadEXT | ";
+    if ( value & AccessFlagBits2::eMicromapWriteEXT ) result += "MicromapWriteEXT | ";
+    if ( value & AccessFlagBits2::eOpticalFlowReadNV ) result += "OpticalFlowReadNV | ";
+    if ( value & AccessFlagBits2::eOpticalFlowWriteNV ) result += "OpticalFlowWriteNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubmitFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SubmitFlagBits::eProtected ) result += "Protected | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( RenderingFlags value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers ) result += "ContentsSecondaryCommandBuffers | ";
+    if ( value & RenderingFlagBits::eSuspending ) result += "Suspending | ";
+    if ( value & RenderingFlagBits::eResuming ) result += "Resuming | ";
+    if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & FormatFeatureFlagBits2::eSampledImage ) result += "SampledImage | ";
+    if ( value & FormatFeatureFlagBits2::eStorageImage ) result += "StorageImage | ";
+    if ( value & FormatFeatureFlagBits2::eStorageImageAtomic ) result += "StorageImageAtomic | ";
+    if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | ";
+    if ( value & FormatFeatureFlagBits2::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & FormatFeatureFlagBits2::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend ) result += "ColorAttachmentBlend | ";
+    if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & FormatFeatureFlagBits2::eBlitSrc ) result += "BlitSrc | ";
+    if ( value & FormatFeatureFlagBits2::eBlitDst ) result += "BlitDst | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) result += "SampledImageFilterCubic | ";
+    if ( value & FormatFeatureFlagBits2::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & FormatFeatureFlagBits2::eTransferDst ) result += "TransferDst | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
+    if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples ) result += "MidpointChromaSamples | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+    if ( value & FormatFeatureFlagBits2::eDisjoint ) result += "Disjoint | ";
+    if ( value & FormatFeatureFlagBits2::eCositedChromaSamples ) result += "CositedChromaSamples | ";
+    if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat ) result += "StorageReadWithoutFormat | ";
+    if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) result += "StorageWriteWithoutFormat | ";
+    if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) result += "SampledImageDepthComparison | ";
+    if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | ";
+    if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | ";
+    if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | ";
+    if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | ";
+    if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV ) result += "LinearColorAttachmentNV | ";
+    if ( value & FormatFeatureFlagBits2::eWeightImageQCOM ) result += "WeightImageQCOM | ";
+    if ( value & FormatFeatureFlagBits2::eWeightSampledImageQCOM ) result += "WeightSampledImageQCOM | ";
+    if ( value & FormatFeatureFlagBits2::eBlockMatchingQCOM ) result += "BlockMatchingQCOM | ";
+    if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) result += "BoxFilterSampledQCOM | ";
+    if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV ) result += "OpticalFlowImageNV | ";
+    if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV ) result += "OpticalFlowVectorNV | ";
+    if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) result += "OpticalFlowCostNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_KHR_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_KHR_swapchain ===
+
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT ) result += "DeferredMemoryAllocationEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_KHR_display ===
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | ";
+    if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | ";
+    if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | ";
+    if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_KHR_video_queue ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) result += "EncodeH264EXT | ";
+    if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) result += "EncodeH265EXT | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264 ) result += "DecodeH264 | ";
+    if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265 ) result += "DecodeH265 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) result += "Monochrome | ";
+    if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) result += "420 | ";
+    if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) result += "422 | ";
+    if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) result += "444 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) result += "8 | ";
+    if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) result += "10 | ";
+    if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) result += "12 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent ) result += "ProtectedContent | ";
+    if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) result += "SeparateReferenceImages | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) result += "ProtectedContent | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoCodingControlFlagBitsKHR::eReset ) result += "Reset | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControl ) result += "EncodeRateControl | ";
+    if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer ) result += "EncodeRateControlLayer | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_KHR_video_decode_queue ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide ) result += "DpbAndOutputCoincide | ";
+    if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct ) result += "DpbAndOutputDistinct | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoDecodeUsageFlagBitsKHR::eTranscoding ) result += "Transcoding | ";
+    if ( value & VideoDecodeUsageFlagBitsKHR::eOffline ) result += "Offline | ";
+    if ( value & VideoDecodeUsageFlagBitsKHR::eStreaming ) result += "Streaming | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_transform_feedback ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled ) result += "Direct8X8InferenceEnabled | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled ) result += "Direct8X8InferenceDisabled | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane ) result += "SeparateColourPlane | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass ) result += "QpprimeYZeroTransformBypass | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists ) result += "ScalingLists | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance ) result += "HrdCompliance | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) result += "ChromaQpOffset | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) result += "SecondChromaQpOffset | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 ) result += "PicInitQpMinus26 | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred ) result += "WeightedPred | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit ) result += "WeightedBipredExplicit | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit ) result += "WeightedBipredImplicit | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable ) result += "WeightedPredNoTable | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) result += "Transform8X8 | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) result += "Cabac | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) result += "Cavlc | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) result += "DeblockingFilterDisabled | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) result += "DeblockingFilterEnabled | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) result += "DeblockingFilterPartial | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred ) result += "DisableDirectSpatialMvPred | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) result += "MultipleSlicePerFrame | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount ) result += "SliceMbCount | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice ) result += "RowUnalignedSlice | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType ) result += "DifferentSliceType | ";
+    if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List ) result += "BFrameInL1List | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame ) result += "Frame | ";
+    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice ) result += "Slice | ";
+    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) result += "NonVcl | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) result += "Frame | ";
+    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) result += "Slice | ";
+    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) result += "NonVcl | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane ) result += "SeparateColourPlane | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists ) result += "ScalingLists | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled ) result += "SampleAdaptiveOffsetEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable ) result += "PcmEnable | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled ) result += "SpsTemporalMvpEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance ) result += "HrdCompliance | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 ) result += "InitQpMinus26 | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 ) result += "Log2ParallelMergeLevelMinus2 | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled ) result += "SignDataHidingEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled ) result += "TransformSkipEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled ) result += "TransformSkipDisabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent ) result += "PpsSliceChromaQpOffsetsPresent | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred ) result += "WeightedPred | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred ) result += "WeightedBipred | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable ) result += "WeightedPredNoTable | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled ) result += "TransquantBypassEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled ) result += "EntropyCodingSyncEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled ) result += "DeblockingFilterOverrideEnabled | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame ) result += "MultipleTilePerFrame | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile ) result += "MultipleSlicePerTile | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice ) result += "MultipleTilePerSlice | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount ) result += "SliceSegmentCtbCount | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment ) result += "RowUnalignedSliceSegment | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment ) result += "DependentSliceSegment | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType ) result += "DifferentSliceType | ";
+    if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List ) result += "BFrameInL1List | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH265InputModeFlagBitsEXT::eFrame ) result += "Frame | ";
+    if ( value & VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment ) result += "SliceSegment | ";
+    if ( value & VideoEncodeH265InputModeFlagBitsEXT::eNonVcl ) result += "NonVcl | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) result += "Frame | ";
+    if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment ) result += "SliceSegment | ";
+    if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl ) result += "NonVcl | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) result += "16 | ";
+    if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) result += "32 | ";
+    if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e64 ) result += "64 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 ) result += "4 | ";
+    if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 ) result += "8 | ";
+    if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 ) result += "16 | ";
+    if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 ) result += "32 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines ) result += "InterlacedInterleavedLines | ";
+    if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes ) result += "InterlacedSeparatePlanes | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_conditional_rendering ===
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_EXT_display_surface_counter ===
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_NV_viewport_swizzle ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_discard_rectangles ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_conservative_rasterization ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_depth_clip_enable ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_KHR_performance_query ===
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | ";
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
+  {
+    return "{}";
+  }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding ) result += "DeviceAddressBinding | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_NV_fragment_coverage_to_color ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
+  {
+    return "{}";
+  }
+
+  //=== VK_KHR_acceleration_structure ===
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) result += "TriangleFlipFacing | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) result += "ForceOpacityMicromap2StateEXT | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT ) result += "DisableOpacityMicromapsEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV ) result += "MotionNV | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) result += "AllowOpacityMicromapUpdateEXT | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) result += "AllowDisableOpacityMicromapsEXT | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT ) result += "AllowOpacityMicromapDataUpdateEXT | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+    if ( value & AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT ) result += "DescriptorBufferCaptureReplayEXT | ";
+    if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV ) result += "MotionNV | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_validation_cache ===
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_AMD_pipeline_compiler_control ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
+  {
+    return "{}";
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_AMD_shader_core_properties2 ===
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
+  {
+    return "{}";
+  }
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_headless_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_surface_maintenance1 ===
+
+  VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PresentScalingFlagBitsEXT::eOneToOne ) result += "OneToOne | ";
+    if ( value & PresentScalingFlagBitsEXT::eAspectRatioStretch ) result += "AspectRatioStretch | ";
+    if ( value & PresentScalingFlagBitsEXT::eStretch ) result += "Stretch | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & PresentGravityFlagBitsEXT::eMin ) result += "Min | ";
+    if ( value & PresentGravityFlagBitsEXT::eMax ) result += "Max | ";
+    if ( value & PresentGravityFlagBitsEXT::eCentered ) result += "Centered | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_NV_device_generated_commands ===
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_EXT_device_memory_report ===
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT )
+  {
+    return "{}";
+  }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) result += "PrecedingExternallyEncodedBytes | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeUsageFlagBitsKHR::eTranscoding ) result += "Transcoding | ";
+    if ( value & VideoEncodeUsageFlagBitsKHR::eStreaming ) result += "Streaming | ";
+    if ( value & VideoEncodeUsageFlagBitsKHR::eRecording ) result += "Recording | ";
+    if ( value & VideoEncodeUsageFlagBitsKHR::eConferencing ) result += "Conferencing | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & VideoEncodeContentFlagBitsKHR::eCamera ) result += "Camera | ";
+    if ( value & VideoEncodeContentFlagBitsKHR::eDesktop ) result += "Desktop | ";
+    if ( value & VideoEncodeContentFlagBitsKHR::eRendered ) result += "Rendered | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | ";
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | ";
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | ";
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting ) result += "EnableShaderErrorReporting | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+  VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalDevice ) result += "MetalDevice | ";
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue ) result += "MetalCommandQueue | ";
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer ) result += "MetalBuffer | ";
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalTexture ) result += "MetalTexture | ";
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface ) result += "MetalIosurface | ";
+    if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent ) result += "MetalSharedEvent | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_graphics_pipeline_library ===
+
+  VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface ) result += "VertexInputInterface | ";
+    if ( value & GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders ) result += "PreRasterizationShaders | ";
+    if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader ) result += "FragmentShader | ";
+    if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface ) result += "FragmentOutputInterface | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagsNV )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagsNV )
+  {
+    return "{}";
+  }
+
+  //=== VK_EXT_image_compression_control ===
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageCompressionFlagBitsEXT::eFixedRateDefault ) result += "FixedRateDefault | ";
+    if ( value & ImageCompressionFlagBitsEXT::eFixedRateExplicit ) result += "FixedRateExplicit | ";
+    if ( value & ImageCompressionFlagBitsEXT::eDisabled ) result += "Disabled | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) result += "1Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e2Bpc ) result += "2Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e3Bpc ) result += "3Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e4Bpc ) result += "4Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e5Bpc ) result += "5Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e6Bpc ) result += "6Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e7Bpc ) result += "7Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e8Bpc ) result += "8Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e9Bpc ) result += "9Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e10Bpc ) result += "10Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e11Bpc ) result += "11Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e12Bpc ) result += "12Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e13Bpc ) result += "13Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e14Bpc ) result += "14Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e15Bpc ) result += "15Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e16Bpc ) result += "16Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e17Bpc ) result += "17Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e18Bpc ) result += "18Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e19Bpc ) result += "19Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e20Bpc ) result += "20Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e21Bpc ) result += "21Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e22Bpc ) result += "22Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e23Bpc ) result += "23Bpc | ";
+    if ( value & ImageCompressionFixedRateFlagBitsEXT::e24Bpc ) result += "24Bpc | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_EXT_device_address_binding_report ===
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject ) result += "InternalObject | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+  VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagsFUCHSIA )
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) result += "CpuReadRarely | ";
+    if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) result += "CpuReadOften | ";
+    if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) result += "CpuWriteRarely | ";
+    if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) result += "CpuWriteOften | ";
+    if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) result += "ProtectedOptional | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+  VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_opacity_micromap ===
+
+  VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace ) result += "PreferFastTrace | ";
+    if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild ) result += "PreferFastBuild | ";
+    if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction ) result += "AllowCompaction | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_NV_memory_decompression ===
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & MemoryDecompressionMethodFlagBitsNV::eGdeflate10 ) result += "Gdeflate10 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  //=== VK_LUNARG_direct_driver_loading ===
+
+  VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagsLUNARG )
+  {
+    return "{}";
+  }
+
+  //=== VK_NV_optical_flow ===
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & OpticalFlowUsageFlagBitsNV::eInput ) result += "Input | ";
+    if ( value & OpticalFlowUsageFlagBitsNV::eOutput ) result += "Output | ";
+    if ( value & OpticalFlowUsageFlagBitsNV::eHint ) result += "Hint | ";
+    if ( value & OpticalFlowUsageFlagBitsNV::eCost ) result += "Cost | ";
+    if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow ) result += "GlobalFlow | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 ) result += "1X1 | ";
+    if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 ) result += "2X2 | ";
+    if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 ) result += "4X4 | ";
+    if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 ) result += "8X8 | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) result += "EnableHint | ";
+    if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) result += "EnableCost | ";
+    if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) result += "EnableGlobalFlow | ";
+    if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) result += "AllowRegions | ";
+    if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections ) result += "BothDirections | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value )
+  {
+    if ( !value )
+      return "{}";
+
+    std::string result;
+    if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints ) result += "DisableTemporalHints | ";
+
+    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+  }
+
+
+
+  //=======================
+  //=== ENUMs to_string ===
+  //=======================
+
+  VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
+  {
+#if __cpp_lib_format
+    return std::format( "{:x}", value );
+#else
+    std::stringstream stream;
+    stream << std::hex << value;
+    return stream.str();
+#endif
+  }
+
+
+  //=== VK_VERSION_1_0 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( Result value )
+  {
+    switch ( value )
+    {
+      case Result::eSuccess : return "Success";
+      case Result::eNotReady : return "NotReady";
+      case Result::eTimeout : return "Timeout";
+      case Result::eEventSet : return "EventSet";
+      case Result::eEventReset : return "EventReset";
+      case Result::eIncomplete : return "Incomplete";
+      case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory";
+      case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory";
+      case Result::eErrorInitializationFailed : return "ErrorInitializationFailed";
+      case Result::eErrorDeviceLost : return "ErrorDeviceLost";
+      case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed";
+      case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent";
+      case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent";
+      case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent";
+      case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver";
+      case Result::eErrorTooManyObjects : return "ErrorTooManyObjects";
+      case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported";
+      case Result::eErrorFragmentedPool : return "ErrorFragmentedPool";
+      case Result::eErrorUnknown : return "ErrorUnknown";
+      case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory";
+      case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle";
+      case Result::eErrorFragmentation : return "ErrorFragmentation";
+      case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress";
+      case Result::ePipelineCompileRequired : return "PipelineCompileRequired";
+      case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR";
+      case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR";
+      case Result::eSuboptimalKHR : return "SuboptimalKHR";
+      case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR";
+      case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR";
+      case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
+      case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
+      case Result::eErrorImageUsageNotSupportedKHR : return "ErrorImageUsageNotSupportedKHR";
+      case Result::eErrorVideoPictureLayoutNotSupportedKHR : return "ErrorVideoPictureLayoutNotSupportedKHR";
+      case Result::eErrorVideoProfileOperationNotSupportedKHR : return "ErrorVideoProfileOperationNotSupportedKHR";
+      case Result::eErrorVideoProfileFormatNotSupportedKHR : return "ErrorVideoProfileFormatNotSupportedKHR";
+      case Result::eErrorVideoProfileCodecNotSupportedKHR : return "ErrorVideoProfileCodecNotSupportedKHR";
+      case Result::eErrorVideoStdVersionNotSupportedKHR : return "ErrorVideoStdVersionNotSupportedKHR";
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
+      case Result::eErrorNotPermittedKHR : return "ErrorNotPermittedKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case Result::eThreadIdleKHR : return "ThreadIdleKHR";
+      case Result::eThreadDoneKHR : return "ThreadDoneKHR";
+      case Result::eOperationDeferredKHR : return "OperationDeferredKHR";
+      case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR";
+      case Result::eErrorCompressionExhaustedEXT : return "ErrorCompressionExhaustedEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( StructureType value )
+  {
+    switch ( value )
+    {
+      case StructureType::eApplicationInfo : return "ApplicationInfo";
+      case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo";
+      case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo";
+      case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo";
+      case StructureType::eSubmitInfo : return "SubmitInfo";
+      case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo";
+      case StructureType::eMappedMemoryRange : return "MappedMemoryRange";
+      case StructureType::eBindSparseInfo : return "BindSparseInfo";
+      case StructureType::eFenceCreateInfo : return "FenceCreateInfo";
+      case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo";
+      case StructureType::eEventCreateInfo : return "EventCreateInfo";
+      case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo";
+      case StructureType::eBufferCreateInfo : return "BufferCreateInfo";
+      case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo";
+      case StructureType::eImageCreateInfo : return "ImageCreateInfo";
+      case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo";
+      case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo";
+      case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo";
+      case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo";
+      case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo";
+      case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo";
+      case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo";
+      case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo";
+      case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo";
+      case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo";
+      case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo";
+      case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo";
+      case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo";
+      case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo";
+      case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo";
+      case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo";
+      case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo";
+      case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo";
+      case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo";
+      case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo";
+      case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet";
+      case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet";
+      case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo";
+      case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo";
+      case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo";
+      case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo";
+      case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo";
+      case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo";
+      case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo";
+      case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier";
+      case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier";
+      case StructureType::eMemoryBarrier : return "MemoryBarrier";
+      case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo";
+      case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo";
+      case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties";
+      case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo";
+      case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo";
+      case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures";
+      case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements";
+      case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo";
+      case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo";
+      case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo";
+      case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo";
+      case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo";
+      case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo";
+      case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo";
+      case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo";
+      case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties";
+      case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo";
+      case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2";
+      case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2";
+      case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2";
+      case StructureType::eMemoryRequirements2 : return "MemoryRequirements2";
+      case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2";
+      case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2";
+      case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2";
+      case StructureType::eFormatProperties2 : return "FormatProperties2";
+      case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2";
+      case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2";
+      case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2";
+      case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2";
+      case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2";
+      case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2";
+      case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties";
+      case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo";
+      case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo";
+      case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo";
+      case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo";
+      case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures";
+      case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties";
+      case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures";
+      case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo";
+      case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures";
+      case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties";
+      case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2";
+      case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo";
+      case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo";
+      case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo";
+      case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo";
+      case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures";
+      case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties";
+      case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo";
+      case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo";
+      case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties";
+      case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo";
+      case StructureType::eExternalBufferProperties : return "ExternalBufferProperties";
+      case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties";
+      case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo";
+      case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo";
+      case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo";
+      case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo";
+      case StructureType::eExternalFenceProperties : return "ExternalFenceProperties";
+      case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo";
+      case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo";
+      case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo";
+      case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties";
+      case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties";
+      case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport";
+      case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures";
+      case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features";
+      case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties";
+      case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features";
+      case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties";
+      case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo";
+      case StructureType::eAttachmentDescription2 : return "AttachmentDescription2";
+      case StructureType::eAttachmentReference2 : return "AttachmentReference2";
+      case StructureType::eSubpassDescription2 : return "SubpassDescription2";
+      case StructureType::eSubpassDependency2 : return "SubpassDependency2";
+      case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2";
+      case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo";
+      case StructureType::eSubpassEndInfo : return "SubpassEndInfo";
+      case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures";
+      case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties";
+      case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features";
+      case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features";
+      case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties";
+      case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo";
+      case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures";
+      case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties";
+      case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo";
+      case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport";
+      case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties";
+      case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve";
+      case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures";
+      case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo";
+      case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties";
+      case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo";
+      case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures";
+      case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures";
+      case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo";
+      case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo";
+      case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo";
+      case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
+      case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
+      case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
+      case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout";
+      case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout";
+      case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures";
+      case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures";
+      case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties";
+      case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo";
+      case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo";
+      case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo";
+      case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures";
+      case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo";
+      case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo";
+      case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo";
+      case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo";
+      case StructureType::ePhysicalDeviceVulkan13Features : return "PhysicalDeviceVulkan13Features";
+      case StructureType::ePhysicalDeviceVulkan13Properties : return "PhysicalDeviceVulkan13Properties";
+      case StructureType::ePipelineCreationFeedbackCreateInfo : return "PipelineCreationFeedbackCreateInfo";
+      case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures : return "PhysicalDeviceShaderTerminateInvocationFeatures";
+      case StructureType::ePhysicalDeviceToolProperties : return "PhysicalDeviceToolProperties";
+      case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures : return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures";
+      case StructureType::ePhysicalDevicePrivateDataFeatures : return "PhysicalDevicePrivateDataFeatures";
+      case StructureType::eDevicePrivateDataCreateInfo : return "DevicePrivateDataCreateInfo";
+      case StructureType::ePrivateDataSlotCreateInfo : return "PrivateDataSlotCreateInfo";
+      case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures : return "PhysicalDevicePipelineCreationCacheControlFeatures";
+      case StructureType::eMemoryBarrier2 : return "MemoryBarrier2";
+      case StructureType::eBufferMemoryBarrier2 : return "BufferMemoryBarrier2";
+      case StructureType::eImageMemoryBarrier2 : return "ImageMemoryBarrier2";
+      case StructureType::eDependencyInfo : return "DependencyInfo";
+      case StructureType::eSubmitInfo2 : return "SubmitInfo2";
+      case StructureType::eSemaphoreSubmitInfo : return "SemaphoreSubmitInfo";
+      case StructureType::eCommandBufferSubmitInfo : return "CommandBufferSubmitInfo";
+      case StructureType::ePhysicalDeviceSynchronization2Features : return "PhysicalDeviceSynchronization2Features";
+      case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures : return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures";
+      case StructureType::ePhysicalDeviceImageRobustnessFeatures : return "PhysicalDeviceImageRobustnessFeatures";
+      case StructureType::eCopyBufferInfo2 : return "CopyBufferInfo2";
+      case StructureType::eCopyImageInfo2 : return "CopyImageInfo2";
+      case StructureType::eCopyBufferToImageInfo2 : return "CopyBufferToImageInfo2";
+      case StructureType::eCopyImageToBufferInfo2 : return "CopyImageToBufferInfo2";
+      case StructureType::eBlitImageInfo2 : return "BlitImageInfo2";
+      case StructureType::eResolveImageInfo2 : return "ResolveImageInfo2";
+      case StructureType::eBufferCopy2 : return "BufferCopy2";
+      case StructureType::eImageCopy2 : return "ImageCopy2";
+      case StructureType::eImageBlit2 : return "ImageBlit2";
+      case StructureType::eBufferImageCopy2 : return "BufferImageCopy2";
+      case StructureType::eImageResolve2 : return "ImageResolve2";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlProperties : return "PhysicalDeviceSubgroupSizeControlProperties";
+      case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo : return "PipelineShaderStageRequiredSubgroupSizeCreateInfo";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures : return "PhysicalDeviceSubgroupSizeControlFeatures";
+      case StructureType::ePhysicalDeviceInlineUniformBlockFeatures : return "PhysicalDeviceInlineUniformBlockFeatures";
+      case StructureType::ePhysicalDeviceInlineUniformBlockProperties : return "PhysicalDeviceInlineUniformBlockProperties";
+      case StructureType::eWriteDescriptorSetInlineUniformBlock : return "WriteDescriptorSetInlineUniformBlock";
+      case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo : return "DescriptorPoolInlineUniformBlockCreateInfo";
+      case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures : return "PhysicalDeviceTextureCompressionAstcHdrFeatures";
+      case StructureType::eRenderingInfo : return "RenderingInfo";
+      case StructureType::eRenderingAttachmentInfo : return "RenderingAttachmentInfo";
+      case StructureType::ePipelineRenderingCreateInfo : return "PipelineRenderingCreateInfo";
+      case StructureType::ePhysicalDeviceDynamicRenderingFeatures : return "PhysicalDeviceDynamicRenderingFeatures";
+      case StructureType::eCommandBufferInheritanceRenderingInfo : return "CommandBufferInheritanceRenderingInfo";
+      case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures : return "PhysicalDeviceShaderIntegerDotProductFeatures";
+      case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties : return "PhysicalDeviceShaderIntegerDotProductProperties";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties : return "PhysicalDeviceTexelBufferAlignmentProperties";
+      case StructureType::eFormatProperties3 : return "FormatProperties3";
+      case StructureType::ePhysicalDeviceMaintenance4Features : return "PhysicalDeviceMaintenance4Features";
+      case StructureType::ePhysicalDeviceMaintenance4Properties : return "PhysicalDeviceMaintenance4Properties";
+      case StructureType::eDeviceBufferMemoryRequirements : return "DeviceBufferMemoryRequirements";
+      case StructureType::eDeviceImageMemoryRequirements : return "DeviceImageMemoryRequirements";
+      case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR";
+      case StructureType::ePresentInfoKHR : return "PresentInfoKHR";
+      case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR";
+      case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR";
+      case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR";
+      case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR";
+      case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR";
+      case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR";
+      case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR";
+      case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR";
+      case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR";
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+      case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+      case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+      case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+      case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT";
+      case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD";
+      case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT";
+      case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT";
+      case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT";
+      case StructureType::eVideoProfileInfoKHR : return "VideoProfileInfoKHR";
+      case StructureType::eVideoCapabilitiesKHR : return "VideoCapabilitiesKHR";
+      case StructureType::eVideoPictureResourceInfoKHR : return "VideoPictureResourceInfoKHR";
+      case StructureType::eVideoSessionMemoryRequirementsKHR : return "VideoSessionMemoryRequirementsKHR";
+      case StructureType::eBindVideoSessionMemoryInfoKHR : return "BindVideoSessionMemoryInfoKHR";
+      case StructureType::eVideoSessionCreateInfoKHR : return "VideoSessionCreateInfoKHR";
+      case StructureType::eVideoSessionParametersCreateInfoKHR : return "VideoSessionParametersCreateInfoKHR";
+      case StructureType::eVideoSessionParametersUpdateInfoKHR : return "VideoSessionParametersUpdateInfoKHR";
+      case StructureType::eVideoBeginCodingInfoKHR : return "VideoBeginCodingInfoKHR";
+      case StructureType::eVideoEndCodingInfoKHR : return "VideoEndCodingInfoKHR";
+      case StructureType::eVideoCodingControlInfoKHR : return "VideoCodingControlInfoKHR";
+      case StructureType::eVideoReferenceSlotInfoKHR : return "VideoReferenceSlotInfoKHR";
+      case StructureType::eQueueFamilyVideoPropertiesKHR : return "QueueFamilyVideoPropertiesKHR";
+      case StructureType::eVideoProfileListInfoKHR : return "VideoProfileListInfoKHR";
+      case StructureType::ePhysicalDeviceVideoFormatInfoKHR : return "PhysicalDeviceVideoFormatInfoKHR";
+      case StructureType::eVideoFormatPropertiesKHR : return "VideoFormatPropertiesKHR";
+      case StructureType::eQueueFamilyQueryResultStatusPropertiesKHR : return "QueueFamilyQueryResultStatusPropertiesKHR";
+      case StructureType::eVideoDecodeInfoKHR : return "VideoDecodeInfoKHR";
+      case StructureType::eVideoDecodeCapabilitiesKHR : return "VideoDecodeCapabilitiesKHR";
+      case StructureType::eVideoDecodeUsageInfoKHR : return "VideoDecodeUsageInfoKHR";
+      case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV";
+      case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV";
+      case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV";
+      case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT";
+      case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT";
+      case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT";
+      case StructureType::eCuModuleCreateInfoNVX : return "CuModuleCreateInfoNVX";
+      case StructureType::eCuFunctionCreateInfoNVX : return "CuFunctionCreateInfoNVX";
+      case StructureType::eCuLaunchInfoNVX : return "CuLaunchInfoNVX";
+      case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX";
+      case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case StructureType::eVideoEncodeH264CapabilitiesEXT : return "VideoEncodeH264CapabilitiesEXT";
+      case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT : return "VideoEncodeH264SessionParametersCreateInfoEXT";
+      case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT : return "VideoEncodeH264SessionParametersAddInfoEXT";
+      case StructureType::eVideoEncodeH264VclFrameInfoEXT : return "VideoEncodeH264VclFrameInfoEXT";
+      case StructureType::eVideoEncodeH264DpbSlotInfoEXT : return "VideoEncodeH264DpbSlotInfoEXT";
+      case StructureType::eVideoEncodeH264NaluSliceInfoEXT : return "VideoEncodeH264NaluSliceInfoEXT";
+      case StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT : return "VideoEncodeH264EmitPictureParametersInfoEXT";
+      case StructureType::eVideoEncodeH264ProfileInfoEXT : return "VideoEncodeH264ProfileInfoEXT";
+      case StructureType::eVideoEncodeH264RateControlInfoEXT : return "VideoEncodeH264RateControlInfoEXT";
+      case StructureType::eVideoEncodeH264RateControlLayerInfoEXT : return "VideoEncodeH264RateControlLayerInfoEXT";
+      case StructureType::eVideoEncodeH264ReferenceListsInfoEXT : return "VideoEncodeH264ReferenceListsInfoEXT";
+      case StructureType::eVideoEncodeH265CapabilitiesEXT : return "VideoEncodeH265CapabilitiesEXT";
+      case StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT : return "VideoEncodeH265SessionParametersCreateInfoEXT";
+      case StructureType::eVideoEncodeH265SessionParametersAddInfoEXT : return "VideoEncodeH265SessionParametersAddInfoEXT";
+      case StructureType::eVideoEncodeH265VclFrameInfoEXT : return "VideoEncodeH265VclFrameInfoEXT";
+      case StructureType::eVideoEncodeH265DpbSlotInfoEXT : return "VideoEncodeH265DpbSlotInfoEXT";
+      case StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT : return "VideoEncodeH265NaluSliceSegmentInfoEXT";
+      case StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT : return "VideoEncodeH265EmitPictureParametersInfoEXT";
+      case StructureType::eVideoEncodeH265ProfileInfoEXT : return "VideoEncodeH265ProfileInfoEXT";
+      case StructureType::eVideoEncodeH265ReferenceListsInfoEXT : return "VideoEncodeH265ReferenceListsInfoEXT";
+      case StructureType::eVideoEncodeH265RateControlInfoEXT : return "VideoEncodeH265RateControlInfoEXT";
+      case StructureType::eVideoEncodeH265RateControlLayerInfoEXT : return "VideoEncodeH265RateControlLayerInfoEXT";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case StructureType::eVideoDecodeH264CapabilitiesKHR : return "VideoDecodeH264CapabilitiesKHR";
+      case StructureType::eVideoDecodeH264PictureInfoKHR : return "VideoDecodeH264PictureInfoKHR";
+      case StructureType::eVideoDecodeH264ProfileInfoKHR : return "VideoDecodeH264ProfileInfoKHR";
+      case StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR : return "VideoDecodeH264SessionParametersCreateInfoKHR";
+      case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR : return "VideoDecodeH264SessionParametersAddInfoKHR";
+      case StructureType::eVideoDecodeH264DpbSlotInfoKHR : return "VideoDecodeH264DpbSlotInfoKHR";
+      case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD";
+      case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR : return "RenderingFragmentShadingRateAttachmentInfoKHR";
+      case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT : return "RenderingFragmentDensityMapAttachmentInfoEXT";
+      case StructureType::eAttachmentSampleCountInfoAMD : return "AttachmentSampleCountInfoAMD";
+      case StructureType::eMultiviewPerViewAttributesInfoNVX : return "MultiviewPerViewAttributesInfoNVX";
+#if defined( VK_USE_PLATFORM_GGP )
+      case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP";
+#endif /*VK_USE_PLATFORM_GGP*/
+      case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV";
+      case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV";
+      case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV";
+      case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV";
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT";
+#if defined( VK_USE_PLATFORM_VI_NN )
+      case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN";
+#endif /*VK_USE_PLATFORM_VI_NN*/
+      case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT";
+      case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT";
+      case StructureType::ePipelineRobustnessCreateInfoEXT : return "PipelineRobustnessCreateInfoEXT";
+      case StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT : return "PhysicalDevicePipelineRobustnessFeaturesEXT";
+      case StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT : return "PhysicalDevicePipelineRobustnessPropertiesEXT";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR";
+      case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR";
+      case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR";
+      case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR";
+      case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR";
+      case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR";
+      case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR";
+      case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR";
+      case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR";
+      case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT";
+      case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
+      case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
+      case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
+      case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV";
+      case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT";
+      case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT";
+      case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT";
+      case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT";
+      case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT";
+      case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE";
+      case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+      case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+      case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
+      case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT";
+      case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT";
+      case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT";
+      case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR";
+      case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR";
+      case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR";
+      case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR";
+      case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR";
+      case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR";
+      case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR";
+      case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR";
+      case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR";
+      case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR";
+      case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR";
+      case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR";
+      case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR";
+      case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR";
+      case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR";
+      case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR";
+      case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR";
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+      case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK";
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+      case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK";
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+      case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT";
+      case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT";
+      case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT";
+      case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT";
+      case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+      case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID";
+      case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID";
+      case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID";
+      case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID";
+      case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID";
+      case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID";
+      case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID : return "AndroidHardwareBufferFormatProperties2ANDROID";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT";
+      case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT";
+      case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT";
+      case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+      case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+      case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV";
+      case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR";
+      case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR";
+      case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR";
+      case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR";
+      case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR";
+      case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR";
+      case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR";
+      case StructureType::eAccelerationStructureVersionInfoKHR : return "AccelerationStructureVersionInfoKHR";
+      case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR";
+      case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR";
+      case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR";
+      case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR : return "PhysicalDeviceAccelerationStructureFeaturesKHR";
+      case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR : return "PhysicalDeviceAccelerationStructurePropertiesKHR";
+      case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR";
+      case StructureType::eAccelerationStructureBuildSizesInfoKHR : return "AccelerationStructureBuildSizesInfoKHR";
+      case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR : return "PhysicalDeviceRayTracingPipelineFeaturesKHR";
+      case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR : return "PhysicalDeviceRayTracingPipelinePropertiesKHR";
+      case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR";
+      case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR";
+      case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR";
+      case StructureType::ePhysicalDeviceRayQueryFeaturesKHR : return "PhysicalDeviceRayQueryFeaturesKHR";
+      case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
+      case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT";
+      case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
+      case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
+      case StructureType::eDrmFormatModifierPropertiesList2EXT : return "DrmFormatModifierPropertiesList2EXT";
+      case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
+      case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR";
+      case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR : return "PhysicalDevicePortabilitySubsetPropertiesKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
+      case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
+      case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
+      case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV";
+      case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV";
+      case StructureType::eGeometryNV : return "GeometryNV";
+      case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV";
+      case StructureType::eGeometryAabbNV : return "GeometryAabbNV";
+      case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV";
+      case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV";
+      case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV";
+      case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV";
+      case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV";
+      case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV";
+      case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
+      case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT";
+      case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT";
+      case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT";
+      case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT";
+      case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
+      case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR";
+      case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD";
+      case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT";
+      case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD";
+      case StructureType::eVideoDecodeH265CapabilitiesKHR : return "VideoDecodeH265CapabilitiesKHR";
+      case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR : return "VideoDecodeH265SessionParametersCreateInfoKHR";
+      case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR : return "VideoDecodeH265SessionParametersAddInfoKHR";
+      case StructureType::eVideoDecodeH265ProfileInfoKHR : return "VideoDecodeH265ProfileInfoKHR";
+      case StructureType::eVideoDecodeH265PictureInfoKHR : return "VideoDecodeH265PictureInfoKHR";
+      case StructureType::eVideoDecodeH265DpbSlotInfoKHR : return "VideoDecodeH265DpbSlotInfoKHR";
+      case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR : return "DeviceQueueGlobalPriorityCreateInfoKHR";
+      case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR : return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR";
+      case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR : return "QueueFamilyGlobalPriorityPropertiesKHR";
+      case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
+      case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
+#if defined( VK_USE_PLATFORM_GGP )
+      case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP";
+#endif /*VK_USE_PLATFORM_GGP*/
+      case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV";
+      case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV";
+      case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV";
+      case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
+      case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
+      case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
+      case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL";
+      case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
+      case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL";
+      case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
+      case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL";
+      case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL";
+      case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT";
+      case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD";
+      case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+      case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT";
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+      case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
+      case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT";
+      case StructureType::eFragmentShadingRateAttachmentInfoKHR : return "FragmentShadingRateAttachmentInfoKHR";
+      case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR : return "PipelineFragmentShadingRateStateCreateInfoKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR : return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR : return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRateKHR : return "PhysicalDeviceFragmentShadingRateKHR";
+      case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD";
+      case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD";
+      case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT : return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
+      case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT";
+      case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT";
+      case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT";
+      case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR";
+      case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
+      case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT";
+      case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT";
+      case StructureType::ePhysicalDevicePresentWaitFeaturesKHR : return "PhysicalDevicePresentWaitFeaturesKHR";
+      case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV";
+      case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV";
+      case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV";
+      case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV";
+      case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
+      case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
+      case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT : return "PhysicalDeviceProvokingVertexFeaturesEXT";
+      case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT : return "PipelineRasterizationProvokingVertexStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT : return "PhysicalDeviceProvokingVertexPropertiesEXT";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT";
+      case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT";
+      case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT";
+      case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
+      case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
+      case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT : return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
+      case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
+      case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
+      case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
+      case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
+      case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR";
+      case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR";
+      case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR";
+      case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR";
+      case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT : return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT";
+      case StructureType::eSurfacePresentModeEXT : return "SurfacePresentModeEXT";
+      case StructureType::eSurfacePresentScalingCapabilitiesEXT : return "SurfacePresentScalingCapabilitiesEXT";
+      case StructureType::eSurfacePresentModeCompatibilityEXT : return "SurfacePresentModeCompatibilityEXT";
+      case StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT : return "PhysicalDeviceSwapchainMaintenance1FeaturesEXT";
+      case StructureType::eSwapchainPresentFenceInfoEXT : return "SwapchainPresentFenceInfoEXT";
+      case StructureType::eSwapchainPresentModesCreateInfoEXT : return "SwapchainPresentModesCreateInfoEXT";
+      case StructureType::eSwapchainPresentModeInfoEXT : return "SwapchainPresentModeInfoEXT";
+      case StructureType::eSwapchainPresentScalingCreateInfoEXT : return "SwapchainPresentScalingCreateInfoEXT";
+      case StructureType::eReleaseSwapchainImagesInfoEXT : return "ReleaseSwapchainImagesInfoEXT";
+      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
+      case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV";
+      case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV";
+      case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV";
+      case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV";
+      case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV";
+      case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV";
+      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
+      case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV : return "PhysicalDeviceInheritedViewportScissorFeaturesNV";
+      case StructureType::eCommandBufferInheritanceViewportScissorInfoNV : return "CommandBufferInheritanceViewportScissorInfoNV";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
+      case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
+      case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM";
+      case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT : return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
+      case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT : return "DeviceDeviceMemoryReportCreateInfoEXT";
+      case StructureType::eDeviceMemoryReportCallbackDataEXT : return "DeviceMemoryReportCallbackDataEXT";
+      case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT";
+      case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT";
+      case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT";
+      case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT";
+      case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT";
+      case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR";
+      case StructureType::ePhysicalDevicePresentBarrierFeaturesNV : return "PhysicalDevicePresentBarrierFeaturesNV";
+      case StructureType::eSurfaceCapabilitiesPresentBarrierNV : return "SurfaceCapabilitiesPresentBarrierNV";
+      case StructureType::eSwapchainPresentBarrierCreateInfoNV : return "SwapchainPresentBarrierCreateInfoNV";
+      case StructureType::ePresentIdKHR : return "PresentIdKHR";
+      case StructureType::ePhysicalDevicePresentIdFeaturesKHR : return "PhysicalDevicePresentIdFeaturesKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case StructureType::eVideoEncodeInfoKHR : return "VideoEncodeInfoKHR";
+      case StructureType::eVideoEncodeRateControlInfoKHR : return "VideoEncodeRateControlInfoKHR";
+      case StructureType::eVideoEncodeRateControlLayerInfoKHR : return "VideoEncodeRateControlLayerInfoKHR";
+      case StructureType::eVideoEncodeCapabilitiesKHR : return "VideoEncodeCapabilitiesKHR";
+      case StructureType::eVideoEncodeUsageInfoKHR : return "VideoEncodeUsageInfoKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
+      case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+      case StructureType::eExportMetalObjectCreateInfoEXT : return "ExportMetalObjectCreateInfoEXT";
+      case StructureType::eExportMetalObjectsInfoEXT : return "ExportMetalObjectsInfoEXT";
+      case StructureType::eExportMetalDeviceInfoEXT : return "ExportMetalDeviceInfoEXT";
+      case StructureType::eExportMetalCommandQueueInfoEXT : return "ExportMetalCommandQueueInfoEXT";
+      case StructureType::eExportMetalBufferInfoEXT : return "ExportMetalBufferInfoEXT";
+      case StructureType::eImportMetalBufferInfoEXT : return "ImportMetalBufferInfoEXT";
+      case StructureType::eExportMetalTextureInfoEXT : return "ExportMetalTextureInfoEXT";
+      case StructureType::eImportMetalTextureInfoEXT : return "ImportMetalTextureInfoEXT";
+      case StructureType::eExportMetalIoSurfaceInfoEXT : return "ExportMetalIoSurfaceInfoEXT";
+      case StructureType::eImportMetalIoSurfaceInfoEXT : return "ImportMetalIoSurfaceInfoEXT";
+      case StructureType::eExportMetalSharedEventInfoEXT : return "ExportMetalSharedEventInfoEXT";
+      case StructureType::eImportMetalSharedEventInfoEXT : return "ImportMetalSharedEventInfoEXT";
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+      case StructureType::eQueueFamilyCheckpointProperties2NV : return "QueueFamilyCheckpointProperties2NV";
+      case StructureType::eCheckpointData2NV : return "CheckpointData2NV";
+      case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT : return "PhysicalDeviceDescriptorBufferPropertiesEXT";
+      case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT : return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT";
+      case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT : return "PhysicalDeviceDescriptorBufferFeaturesEXT";
+      case StructureType::eDescriptorAddressInfoEXT : return "DescriptorAddressInfoEXT";
+      case StructureType::eDescriptorGetInfoEXT : return "DescriptorGetInfoEXT";
+      case StructureType::eBufferCaptureDescriptorDataInfoEXT : return "BufferCaptureDescriptorDataInfoEXT";
+      case StructureType::eImageCaptureDescriptorDataInfoEXT : return "ImageCaptureDescriptorDataInfoEXT";
+      case StructureType::eImageViewCaptureDescriptorDataInfoEXT : return "ImageViewCaptureDescriptorDataInfoEXT";
+      case StructureType::eSamplerCaptureDescriptorDataInfoEXT : return "SamplerCaptureDescriptorDataInfoEXT";
+      case StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT : return "OpaqueCaptureDescriptorDataCreateInfoEXT";
+      case StructureType::eDescriptorBufferBindingInfoEXT : return "DescriptorBufferBindingInfoEXT";
+      case StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT : return "DescriptorBufferBindingPushDescriptorBufferHandleEXT";
+      case StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT : return "AccelerationStructureCaptureDescriptorDataInfoEXT";
+      case StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT : return "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT";
+      case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT : return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT";
+      case StructureType::eGraphicsPipelineLibraryCreateInfoEXT : return "GraphicsPipelineLibraryCreateInfoEXT";
+      case StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD : return "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD";
+      case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR : return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR";
+      case StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR : return "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR";
+      case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR : return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV : return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
+      case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV : return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
+      case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV : return "PipelineFragmentShadingRateEnumStateCreateInfoNV";
+      case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV : return "AccelerationStructureGeometryMotionTrianglesDataNV";
+      case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV : return "PhysicalDeviceRayTracingMotionBlurFeaturesNV";
+      case StructureType::eAccelerationStructureMotionInfoNV : return "AccelerationStructureMotionInfoNV";
+      case StructureType::ePhysicalDeviceMeshShaderFeaturesEXT : return "PhysicalDeviceMeshShaderFeaturesEXT";
+      case StructureType::ePhysicalDeviceMeshShaderPropertiesEXT : return "PhysicalDeviceMeshShaderPropertiesEXT";
+      case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT : return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT : return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT : return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
+      case StructureType::eCopyCommandTransformInfoQCOM : return "CopyCommandTransformInfoQCOM";
+      case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR : return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
+      case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT : return "PhysicalDeviceImageCompressionControlFeaturesEXT";
+      case StructureType::eImageCompressionControlEXT : return "ImageCompressionControlEXT";
+      case StructureType::eSubresourceLayout2EXT : return "SubresourceLayout2EXT";
+      case StructureType::eImageSubresource2EXT : return "ImageSubresource2EXT";
+      case StructureType::eImageCompressionPropertiesEXT : return "ImageCompressionPropertiesEXT";
+      case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT : return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT";
+      case StructureType::ePhysicalDevice4444FormatsFeaturesEXT : return "PhysicalDevice4444FormatsFeaturesEXT";
+      case StructureType::ePhysicalDeviceFaultFeaturesEXT : return "PhysicalDeviceFaultFeaturesEXT";
+      case StructureType::eDeviceFaultCountsEXT : return "DeviceFaultCountsEXT";
+      case StructureType::eDeviceFaultInfoEXT : return "DeviceFaultInfoEXT";
+      case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT : return "PhysicalDeviceRgba10X6FormatsFeaturesEXT";
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+      case StructureType::eDirectfbSurfaceCreateInfoEXT : return "DirectfbSurfaceCreateInfoEXT";
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+      case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT : return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT";
+      case StructureType::eVertexInputBindingDescription2EXT : return "VertexInputBindingDescription2EXT";
+      case StructureType::eVertexInputAttributeDescription2EXT : return "VertexInputAttributeDescription2EXT";
+      case StructureType::ePhysicalDeviceDrmPropertiesEXT : return "PhysicalDeviceDrmPropertiesEXT";
+      case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT : return "PhysicalDeviceAddressBindingReportFeaturesEXT";
+      case StructureType::eDeviceAddressBindingCallbackDataEXT : return "DeviceAddressBindingCallbackDataEXT";
+      case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT : return "PhysicalDeviceDepthClipControlFeaturesEXT";
+      case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT : return "PipelineViewportDepthClipControlCreateInfoEXT";
+      case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT : return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case StructureType::eImportMemoryZirconHandleInfoFUCHSIA : return "ImportMemoryZirconHandleInfoFUCHSIA";
+      case StructureType::eMemoryZirconHandlePropertiesFUCHSIA : return "MemoryZirconHandlePropertiesFUCHSIA";
+      case StructureType::eMemoryGetZirconHandleInfoFUCHSIA : return "MemoryGetZirconHandleInfoFUCHSIA";
+      case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA : return "ImportSemaphoreZirconHandleInfoFUCHSIA";
+      case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA : return "SemaphoreGetZirconHandleInfoFUCHSIA";
+      case StructureType::eBufferCollectionCreateInfoFUCHSIA : return "BufferCollectionCreateInfoFUCHSIA";
+      case StructureType::eImportMemoryBufferCollectionFUCHSIA : return "ImportMemoryBufferCollectionFUCHSIA";
+      case StructureType::eBufferCollectionImageCreateInfoFUCHSIA : return "BufferCollectionImageCreateInfoFUCHSIA";
+      case StructureType::eBufferCollectionPropertiesFUCHSIA : return "BufferCollectionPropertiesFUCHSIA";
+      case StructureType::eBufferConstraintsInfoFUCHSIA : return "BufferConstraintsInfoFUCHSIA";
+      case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA : return "BufferCollectionBufferCreateInfoFUCHSIA";
+      case StructureType::eImageConstraintsInfoFUCHSIA : return "ImageConstraintsInfoFUCHSIA";
+      case StructureType::eImageFormatConstraintsInfoFUCHSIA : return "ImageFormatConstraintsInfoFUCHSIA";
+      case StructureType::eSysmemColorSpaceFUCHSIA : return "SysmemColorSpaceFUCHSIA";
+      case StructureType::eBufferCollectionConstraintsInfoFUCHSIA : return "BufferCollectionConstraintsInfoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+      case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI : return "SubpassShadingPipelineCreateInfoHUAWEI";
+      case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI : return "PhysicalDeviceSubpassShadingFeaturesHUAWEI";
+      case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI : return "PhysicalDeviceSubpassShadingPropertiesHUAWEI";
+      case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI : return "PhysicalDeviceInvocationMaskFeaturesHUAWEI";
+      case StructureType::eMemoryGetRemoteAddressInfoNV : return "MemoryGetRemoteAddressInfoNV";
+      case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV : return "PhysicalDeviceExternalMemoryRdmaFeaturesNV";
+      case StructureType::ePipelinePropertiesIdentifierEXT : return "PipelinePropertiesIdentifierEXT";
+      case StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT : return "PhysicalDevicePipelinePropertiesFeaturesEXT";
+      case StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT : return "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT";
+      case StructureType::eSubpassResolvePerformanceQueryEXT : return "SubpassResolvePerformanceQueryEXT";
+      case StructureType::eMultisampledRenderToSingleSampledInfoEXT : return "MultisampledRenderToSingleSampledInfoEXT";
+      case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT : return "PhysicalDeviceExtendedDynamicState2FeaturesEXT";
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+      case StructureType::eScreenSurfaceCreateInfoQNX : return "ScreenSurfaceCreateInfoQNX";
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+      case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT : return "PhysicalDeviceColorWriteEnableFeaturesEXT";
+      case StructureType::ePipelineColorWriteCreateInfoEXT : return "PipelineColorWriteCreateInfoEXT";
+      case StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT : return "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT";
+      case StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR : return "PhysicalDeviceRayTracingMaintenance1FeaturesKHR";
+      case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT : return "PhysicalDeviceImageViewMinLodFeaturesEXT";
+      case StructureType::eImageViewMinLodCreateInfoEXT : return "ImageViewMinLodCreateInfoEXT";
+      case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT : return "PhysicalDeviceMultiDrawFeaturesEXT";
+      case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT : return "PhysicalDeviceMultiDrawPropertiesEXT";
+      case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT : return "PhysicalDeviceImage2DViewOf3DFeaturesEXT";
+      case StructureType::eMicromapBuildInfoEXT : return "MicromapBuildInfoEXT";
+      case StructureType::eMicromapVersionInfoEXT : return "MicromapVersionInfoEXT";
+      case StructureType::eCopyMicromapInfoEXT : return "CopyMicromapInfoEXT";
+      case StructureType::eCopyMicromapToMemoryInfoEXT : return "CopyMicromapToMemoryInfoEXT";
+      case StructureType::eCopyMemoryToMicromapInfoEXT : return "CopyMemoryToMicromapInfoEXT";
+      case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT : return "PhysicalDeviceOpacityMicromapFeaturesEXT";
+      case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT : return "PhysicalDeviceOpacityMicromapPropertiesEXT";
+      case StructureType::eMicromapCreateInfoEXT : return "MicromapCreateInfoEXT";
+      case StructureType::eMicromapBuildSizesInfoEXT : return "MicromapBuildSizesInfoEXT";
+      case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT : return "AccelerationStructureTrianglesOpacityMicromapEXT";
+      case StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI : return "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI";
+      case StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI : return "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI";
+      case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT : return "PhysicalDeviceBorderColorSwizzleFeaturesEXT";
+      case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT : return "SamplerBorderColorComponentMappingCreateInfoEXT";
+      case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT : return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
+      case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE : return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE";
+      case StructureType::eDescriptorSetBindingReferenceVALVE : return "DescriptorSetBindingReferenceVALVE";
+      case StructureType::eDescriptorSetLayoutHostMappingInfoVALVE : return "DescriptorSetLayoutHostMappingInfoVALVE";
+      case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT : return "PhysicalDeviceDepthClampZeroOneFeaturesEXT";
+      case StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT : return "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM : return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM";
+      case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM : return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM";
+      case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM : return "SubpassFragmentDensityMapOffsetEndInfoQCOM";
+      case StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV : return "PhysicalDeviceCopyMemoryIndirectFeaturesNV";
+      case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV : return "PhysicalDeviceCopyMemoryIndirectPropertiesNV";
+      case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV : return "PhysicalDeviceMemoryDecompressionFeaturesNV";
+      case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV : return "PhysicalDeviceMemoryDecompressionPropertiesNV";
+      case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV : return "PhysicalDeviceLinearColorAttachmentFeaturesNV";
+      case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT : return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT";
+      case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM : return "PhysicalDeviceImageProcessingFeaturesQCOM";
+      case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM : return "PhysicalDeviceImageProcessingPropertiesQCOM";
+      case StructureType::eImageViewSampleWeightCreateInfoQCOM : return "ImageViewSampleWeightCreateInfoQCOM";
+      case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT : return "PhysicalDeviceExtendedDynamicState3FeaturesEXT";
+      case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT : return "PhysicalDeviceExtendedDynamicState3PropertiesEXT";
+      case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT : return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT";
+      case StructureType::eRenderPassCreationControlEXT : return "RenderPassCreationControlEXT";
+      case StructureType::eRenderPassCreationFeedbackCreateInfoEXT : return "RenderPassCreationFeedbackCreateInfoEXT";
+      case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT : return "RenderPassSubpassFeedbackCreateInfoEXT";
+      case StructureType::eDirectDriverLoadingInfoLUNARG : return "DirectDriverLoadingInfoLUNARG";
+      case StructureType::eDirectDriverLoadingListLUNARG : return "DirectDriverLoadingListLUNARG";
+      case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT : return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT";
+      case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT : return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT";
+      case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT : return "PipelineShaderStageModuleIdentifierCreateInfoEXT";
+      case StructureType::eShaderModuleIdentifierEXT : return "ShaderModuleIdentifierEXT";
+      case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT : return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT";
+      case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV : return "PhysicalDeviceOpticalFlowFeaturesNV";
+      case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV : return "PhysicalDeviceOpticalFlowPropertiesNV";
+      case StructureType::eOpticalFlowImageFormatInfoNV : return "OpticalFlowImageFormatInfoNV";
+      case StructureType::eOpticalFlowImageFormatPropertiesNV : return "OpticalFlowImageFormatPropertiesNV";
+      case StructureType::eOpticalFlowSessionCreateInfoNV : return "OpticalFlowSessionCreateInfoNV";
+      case StructureType::eOpticalFlowExecuteInfoNV : return "OpticalFlowExecuteInfoNV";
+      case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV : return "OpticalFlowSessionCreatePrivateDataInfoNV";
+      case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT : return "PhysicalDeviceLegacyDitheringFeaturesEXT";
+      case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT : return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
+      case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM : return "PhysicalDeviceTilePropertiesFeaturesQCOM";
+      case StructureType::eTilePropertiesQCOM : return "TilePropertiesQCOM";
+      case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC : return "PhysicalDeviceAmigoProfilingFeaturesSEC";
+      case StructureType::eAmigoProfilingSubmitInfoSEC : return "AmigoProfilingSubmitInfoSEC";
+      case StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM : return "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM";
+      case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV : return "PhysicalDeviceRayTracingInvocationReorderFeaturesNV";
+      case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV : return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV";
+      case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT : return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT";
+      case StructureType::eMutableDescriptorTypeCreateInfoEXT : return "MutableDescriptorTypeCreateInfoEXT";
+      case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM : return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM";
+      case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM : return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
+  {
+    switch ( value )
+    {
+      case PipelineCacheHeaderVersion::eOne : return "One";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectType value )
+  {
+    switch ( value )
+    {
+      case ObjectType::eUnknown : return "Unknown";
+      case ObjectType::eInstance : return "Instance";
+      case ObjectType::ePhysicalDevice : return "PhysicalDevice";
+      case ObjectType::eDevice : return "Device";
+      case ObjectType::eQueue : return "Queue";
+      case ObjectType::eSemaphore : return "Semaphore";
+      case ObjectType::eCommandBuffer : return "CommandBuffer";
+      case ObjectType::eFence : return "Fence";
+      case ObjectType::eDeviceMemory : return "DeviceMemory";
+      case ObjectType::eBuffer : return "Buffer";
+      case ObjectType::eImage : return "Image";
+      case ObjectType::eEvent : return "Event";
+      case ObjectType::eQueryPool : return "QueryPool";
+      case ObjectType::eBufferView : return "BufferView";
+      case ObjectType::eImageView : return "ImageView";
+      case ObjectType::eShaderModule : return "ShaderModule";
+      case ObjectType::ePipelineCache : return "PipelineCache";
+      case ObjectType::ePipelineLayout : return "PipelineLayout";
+      case ObjectType::eRenderPass : return "RenderPass";
+      case ObjectType::ePipeline : return "Pipeline";
+      case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case ObjectType::eSampler : return "Sampler";
+      case ObjectType::eDescriptorPool : return "DescriptorPool";
+      case ObjectType::eDescriptorSet : return "DescriptorSet";
+      case ObjectType::eFramebuffer : return "Framebuffer";
+      case ObjectType::eCommandPool : return "CommandPool";
+      case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case ObjectType::ePrivateDataSlot : return "PrivateDataSlot";
+      case ObjectType::eSurfaceKHR : return "SurfaceKHR";
+      case ObjectType::eSwapchainKHR : return "SwapchainKHR";
+      case ObjectType::eDisplayKHR : return "DisplayKHR";
+      case ObjectType::eDisplayModeKHR : return "DisplayModeKHR";
+      case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case ObjectType::eVideoSessionKHR : return "VideoSessionKHR";
+      case ObjectType::eVideoSessionParametersKHR : return "VideoSessionParametersKHR";
+      case ObjectType::eCuModuleNVX : return "CuModuleNVX";
+      case ObjectType::eCuFunctionNVX : return "CuFunctionNVX";
+      case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT";
+      case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT";
+      case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV";
+      case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL";
+      case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR";
+      case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case ObjectType::eBufferCollectionFUCHSIA : return "BufferCollectionFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+      case ObjectType::eMicromapEXT : return "MicromapEXT";
+      case ObjectType::eOpticalFlowSessionNV : return "OpticalFlowSessionNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VendorId value )
+  {
+    switch ( value )
+    {
+      case VendorId::eVIV : return "VIV";
+      case VendorId::eVSI : return "VSI";
+      case VendorId::eKazan : return "Kazan";
+      case VendorId::eCodeplay : return "Codeplay";
+      case VendorId::eMESA : return "MESA";
+      case VendorId::ePocl : return "Pocl";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( Format value )
+  {
+    switch ( value )
+    {
+      case Format::eUndefined : return "Undefined";
+      case Format::eR4G4UnormPack8 : return "R4G4UnormPack8";
+      case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16";
+      case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16";
+      case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16";
+      case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16";
+      case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16";
+      case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16";
+      case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16";
+      case Format::eR8Unorm : return "R8Unorm";
+      case Format::eR8Snorm : return "R8Snorm";
+      case Format::eR8Uscaled : return "R8Uscaled";
+      case Format::eR8Sscaled : return "R8Sscaled";
+      case Format::eR8Uint : return "R8Uint";
+      case Format::eR8Sint : return "R8Sint";
+      case Format::eR8Srgb : return "R8Srgb";
+      case Format::eR8G8Unorm : return "R8G8Unorm";
+      case Format::eR8G8Snorm : return "R8G8Snorm";
+      case Format::eR8G8Uscaled : return "R8G8Uscaled";
+      case Format::eR8G8Sscaled : return "R8G8Sscaled";
+      case Format::eR8G8Uint : return "R8G8Uint";
+      case Format::eR8G8Sint : return "R8G8Sint";
+      case Format::eR8G8Srgb : return "R8G8Srgb";
+      case Format::eR8G8B8Unorm : return "R8G8B8Unorm";
+      case Format::eR8G8B8Snorm : return "R8G8B8Snorm";
+      case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled";
+      case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled";
+      case Format::eR8G8B8Uint : return "R8G8B8Uint";
+      case Format::eR8G8B8Sint : return "R8G8B8Sint";
+      case Format::eR8G8B8Srgb : return "R8G8B8Srgb";
+      case Format::eB8G8R8Unorm : return "B8G8R8Unorm";
+      case Format::eB8G8R8Snorm : return "B8G8R8Snorm";
+      case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled";
+      case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled";
+      case Format::eB8G8R8Uint : return "B8G8R8Uint";
+      case Format::eB8G8R8Sint : return "B8G8R8Sint";
+      case Format::eB8G8R8Srgb : return "B8G8R8Srgb";
+      case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm";
+      case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm";
+      case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled";
+      case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled";
+      case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint";
+      case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint";
+      case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb";
+      case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm";
+      case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm";
+      case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled";
+      case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled";
+      case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint";
+      case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint";
+      case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb";
+      case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32";
+      case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32";
+      case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32";
+      case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32";
+      case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32";
+      case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32";
+      case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32";
+      case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32";
+      case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32";
+      case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32";
+      case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32";
+      case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32";
+      case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32";
+      case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32";
+      case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32";
+      case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32";
+      case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32";
+      case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32";
+      case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32";
+      case Format::eR16Unorm : return "R16Unorm";
+      case Format::eR16Snorm : return "R16Snorm";
+      case Format::eR16Uscaled : return "R16Uscaled";
+      case Format::eR16Sscaled : return "R16Sscaled";
+      case Format::eR16Uint : return "R16Uint";
+      case Format::eR16Sint : return "R16Sint";
+      case Format::eR16Sfloat : return "R16Sfloat";
+      case Format::eR16G16Unorm : return "R16G16Unorm";
+      case Format::eR16G16Snorm : return "R16G16Snorm";
+      case Format::eR16G16Uscaled : return "R16G16Uscaled";
+      case Format::eR16G16Sscaled : return "R16G16Sscaled";
+      case Format::eR16G16Uint : return "R16G16Uint";
+      case Format::eR16G16Sint : return "R16G16Sint";
+      case Format::eR16G16Sfloat : return "R16G16Sfloat";
+      case Format::eR16G16B16Unorm : return "R16G16B16Unorm";
+      case Format::eR16G16B16Snorm : return "R16G16B16Snorm";
+      case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled";
+      case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled";
+      case Format::eR16G16B16Uint : return "R16G16B16Uint";
+      case Format::eR16G16B16Sint : return "R16G16B16Sint";
+      case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat";
+      case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm";
+      case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm";
+      case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled";
+      case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled";
+      case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint";
+      case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint";
+      case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat";
+      case Format::eR32Uint : return "R32Uint";
+      case Format::eR32Sint : return "R32Sint";
+      case Format::eR32Sfloat : return "R32Sfloat";
+      case Format::eR32G32Uint : return "R32G32Uint";
+      case Format::eR32G32Sint : return "R32G32Sint";
+      case Format::eR32G32Sfloat : return "R32G32Sfloat";
+      case Format::eR32G32B32Uint : return "R32G32B32Uint";
+      case Format::eR32G32B32Sint : return "R32G32B32Sint";
+      case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat";
+      case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint";
+      case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint";
+      case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat";
+      case Format::eR64Uint : return "R64Uint";
+      case Format::eR64Sint : return "R64Sint";
+      case Format::eR64Sfloat : return "R64Sfloat";
+      case Format::eR64G64Uint : return "R64G64Uint";
+      case Format::eR64G64Sint : return "R64G64Sint";
+      case Format::eR64G64Sfloat : return "R64G64Sfloat";
+      case Format::eR64G64B64Uint : return "R64G64B64Uint";
+      case Format::eR64G64B64Sint : return "R64G64B64Sint";
+      case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat";
+      case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint";
+      case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint";
+      case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat";
+      case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32";
+      case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32";
+      case Format::eD16Unorm : return "D16Unorm";
+      case Format::eX8D24UnormPack32 : return "X8D24UnormPack32";
+      case Format::eD32Sfloat : return "D32Sfloat";
+      case Format::eS8Uint : return "S8Uint";
+      case Format::eD16UnormS8Uint : return "D16UnormS8Uint";
+      case Format::eD24UnormS8Uint : return "D24UnormS8Uint";
+      case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint";
+      case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock";
+      case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock";
+      case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock";
+      case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock";
+      case Format::eBc2UnormBlock : return "Bc2UnormBlock";
+      case Format::eBc2SrgbBlock : return "Bc2SrgbBlock";
+      case Format::eBc3UnormBlock : return "Bc3UnormBlock";
+      case Format::eBc3SrgbBlock : return "Bc3SrgbBlock";
+      case Format::eBc4UnormBlock : return "Bc4UnormBlock";
+      case Format::eBc4SnormBlock : return "Bc4SnormBlock";
+      case Format::eBc5UnormBlock : return "Bc5UnormBlock";
+      case Format::eBc5SnormBlock : return "Bc5SnormBlock";
+      case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock";
+      case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock";
+      case Format::eBc7UnormBlock : return "Bc7UnormBlock";
+      case Format::eBc7SrgbBlock : return "Bc7SrgbBlock";
+      case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock";
+      case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock";
+      case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock";
+      case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock";
+      case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock";
+      case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock";
+      case Format::eEacR11UnormBlock : return "EacR11UnormBlock";
+      case Format::eEacR11SnormBlock : return "EacR11SnormBlock";
+      case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock";
+      case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock";
+      case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock";
+      case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock";
+      case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock";
+      case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock";
+      case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock";
+      case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock";
+      case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock";
+      case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock";
+      case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock";
+      case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock";
+      case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock";
+      case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock";
+      case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock";
+      case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock";
+      case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock";
+      case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock";
+      case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock";
+      case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock";
+      case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock";
+      case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock";
+      case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock";
+      case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock";
+      case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock";
+      case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock";
+      case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock";
+      case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock";
+      case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock";
+      case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock";
+      case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm";
+      case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm";
+      case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm";
+      case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm";
+      case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm";
+      case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm";
+      case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm";
+      case Format::eR10X6UnormPack16 : return "R10X6UnormPack16";
+      case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16";
+      case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16";
+      case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
+      case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
+      case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16";
+      case Format::eR12X4UnormPack16 : return "R12X4UnormPack16";
+      case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16";
+      case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16";
+      case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
+      case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
+      case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16";
+      case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm";
+      case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm";
+      case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm";
+      case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm";
+      case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm";
+      case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm";
+      case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm";
+      case Format::eG8B8R82Plane444Unorm : return "G8B8R82Plane444Unorm";
+      case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16 : return "G10X6B10X6R10X62Plane444Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16 : return "G12X4B12X4R12X42Plane444Unorm3Pack16";
+      case Format::eG16B16R162Plane444Unorm : return "G16B16R162Plane444Unorm";
+      case Format::eA4R4G4B4UnormPack16 : return "A4R4G4B4UnormPack16";
+      case Format::eA4B4G4R4UnormPack16 : return "A4B4G4R4UnormPack16";
+      case Format::eAstc4x4SfloatBlock : return "Astc4x4SfloatBlock";
+      case Format::eAstc5x4SfloatBlock : return "Astc5x4SfloatBlock";
+      case Format::eAstc5x5SfloatBlock : return "Astc5x5SfloatBlock";
+      case Format::eAstc6x5SfloatBlock : return "Astc6x5SfloatBlock";
+      case Format::eAstc6x6SfloatBlock : return "Astc6x6SfloatBlock";
+      case Format::eAstc8x5SfloatBlock : return "Astc8x5SfloatBlock";
+      case Format::eAstc8x6SfloatBlock : return "Astc8x6SfloatBlock";
+      case Format::eAstc8x8SfloatBlock : return "Astc8x8SfloatBlock";
+      case Format::eAstc10x5SfloatBlock : return "Astc10x5SfloatBlock";
+      case Format::eAstc10x6SfloatBlock : return "Astc10x6SfloatBlock";
+      case Format::eAstc10x8SfloatBlock : return "Astc10x8SfloatBlock";
+      case Format::eAstc10x10SfloatBlock : return "Astc10x10SfloatBlock";
+      case Format::eAstc12x10SfloatBlock : return "Astc12x10SfloatBlock";
+      case Format::eAstc12x12SfloatBlock : return "Astc12x12SfloatBlock";
+      case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG";
+      case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG";
+      case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG";
+      case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG";
+      case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG";
+      case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG";
+      case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG";
+      case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG";
+      case Format::eR16G16S105NV : return "R16G16S105NV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
+      case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
+      case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
+      case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
+      case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
+      case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
+      case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
+      case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
+      case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
+      case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
+      case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
+      case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
+      case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+      case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
+      case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
+      case FormatFeatureFlagBits::eVideoDecodeOutputKHR : return "VideoDecodeOutputKHR";
+      case FormatFeatureFlagBits::eVideoDecodeDpbKHR : return "VideoDecodeDpbKHR";
+      case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR";
+      case FormatFeatureFlagBits::eSampledImageFilterCubicEXT : return "SampledImageFilterCubicEXT";
+      case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case FormatFeatureFlagBits::eVideoEncodeInputKHR : return "VideoEncodeInputKHR";
+      case FormatFeatureFlagBits::eVideoEncodeDpbKHR : return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
+      case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
+      case ImageCreateFlagBits::eAlias : return "Alias";
+      case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
+      case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
+      case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
+      case ImageCreateFlagBits::eProtected : return "Protected";
+      case ImageCreateFlagBits::eDisjoint : return "Disjoint";
+      case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
+      case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
+      case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      case ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT";
+      case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT : return "MultisampledRenderToSingleSampledEXT";
+      case ImageCreateFlagBits::e2DViewCompatibleEXT : return "2DViewCompatibleEXT";
+      case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM : return "FragmentDensityMapOffsetQCOM";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
+  {
+    switch ( value )
+    {
+      case ImageTiling::eOptimal : return "Optimal";
+      case ImageTiling::eLinear : return "Linear";
+      case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageType value )
+  {
+    switch ( value )
+    {
+      case ImageType::e1D : return "1D";
+      case ImageType::e2D : return "2D";
+      case ImageType::e3D : return "3D";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case ImageUsageFlagBits::eTransferDst : return "TransferDst";
+      case ImageUsageFlagBits::eSampled : return "Sampled";
+      case ImageUsageFlagBits::eStorage : return "Storage";
+      case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
+      case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
+      case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
+      case ImageUsageFlagBits::eVideoDecodeDstKHR : return "VideoDecodeDstKHR";
+      case ImageUsageFlagBits::eVideoDecodeSrcKHR : return "VideoDecodeSrcKHR";
+      case ImageUsageFlagBits::eVideoDecodeDpbKHR : return "VideoDecodeDpbKHR";
+      case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case ImageUsageFlagBits::eVideoEncodeDstKHR : return "VideoEncodeDstKHR";
+      case ImageUsageFlagBits::eVideoEncodeSrcKHR : return "VideoEncodeSrcKHR";
+      case ImageUsageFlagBits::eVideoEncodeDpbKHR : return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case ImageUsageFlagBits::eAttachmentFeedbackLoopEXT : return "AttachmentFeedbackLoopEXT";
+      case ImageUsageFlagBits::eInvocationMaskHUAWEI : return "InvocationMaskHUAWEI";
+      case ImageUsageFlagBits::eSampleWeightQCOM : return "SampleWeightQCOM";
+      case ImageUsageFlagBits::eSampleBlockMatchQCOM : return "SampleBlockMatchQCOM";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case InstanceCreateFlagBits::eEnumeratePortabilityKHR : return "EnumeratePortabilityKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
+  {
+    switch ( value )
+    {
+      case InternalAllocationType::eExecutable : return "Executable";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
+      case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
+      case MemoryPropertyFlagBits::eHostCached : return "HostCached";
+      case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
+      case MemoryPropertyFlagBits::eProtected : return "Protected";
+      case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
+      case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
+      case MemoryPropertyFlagBits::eRdmaCapableNV : return "RdmaCapableNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
+  {
+    switch ( value )
+    {
+      case PhysicalDeviceType::eOther : return "Other";
+      case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu";
+      case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu";
+      case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu";
+      case PhysicalDeviceType::eCpu : return "Cpu";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueueFlagBits::eGraphics : return "Graphics";
+      case QueueFlagBits::eCompute : return "Compute";
+      case QueueFlagBits::eTransfer : return "Transfer";
+      case QueueFlagBits::eSparseBinding : return "SparseBinding";
+      case QueueFlagBits::eProtected : return "Protected";
+      case QueueFlagBits::eVideoDecodeKHR : return "VideoDecodeKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case QueueFlagBits::eVideoEncodeKHR : return "VideoEncodeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case QueueFlagBits::eOpticalFlowNV : return "OpticalFlowNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
+  {
+    switch ( value )
+    {
+      case SampleCountFlagBits::e1 : return "1";
+      case SampleCountFlagBits::e2 : return "2";
+      case SampleCountFlagBits::e4 : return "4";
+      case SampleCountFlagBits::e8 : return "8";
+      case SampleCountFlagBits::e16 : return "16";
+      case SampleCountFlagBits::e32 : return "32";
+      case SampleCountFlagBits::e64 : return "64";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
+  {
+    switch ( value )
+    {
+      case SystemAllocationScope::eCommand : return "Command";
+      case SystemAllocationScope::eObject : return "Object";
+      case SystemAllocationScope::eCache : return "Cache";
+      case SystemAllocationScope::eDevice : return "Device";
+      case SystemAllocationScope::eInstance : return "Instance";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
+      case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
+      case PipelineStageFlagBits::eVertexInput : return "VertexInput";
+      case PipelineStageFlagBits::eVertexShader : return "VertexShader";
+      case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
+      case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
+      case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
+      case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
+      case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
+      case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
+      case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
+      case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
+      case PipelineStageFlagBits::eTransfer : return "Transfer";
+      case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
+      case PipelineStageFlagBits::eHost : return "Host";
+      case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
+      case PipelineStageFlagBits::eAllCommands : return "AllCommands";
+      case PipelineStageFlagBits::eNone : return "None";
+      case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
+      case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR";
+      case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR";
+      case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+      case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+      case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
+      case PipelineStageFlagBits::eTaskShaderEXT : return "TaskShaderEXT";
+      case PipelineStageFlagBits::eMeshShaderEXT : return "MeshShaderEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageAspectFlagBits::eColor : return "Color";
+      case ImageAspectFlagBits::eDepth : return "Depth";
+      case ImageAspectFlagBits::eStencil : return "Stencil";
+      case ImageAspectFlagBits::eMetadata : return "Metadata";
+      case ImageAspectFlagBits::ePlane0 : return "Plane0";
+      case ImageAspectFlagBits::ePlane1 : return "Plane1";
+      case ImageAspectFlagBits::ePlane2 : return "Plane2";
+      case ImageAspectFlagBits::eNone : return "None";
+      case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
+      case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
+      case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
+      case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
+      case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
+      case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceCreateFlagBits::eSignaled : return "Signaled";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case EventCreateFlagBits::eDeviceOnly : return "DeviceOnly";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
+      case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
+      case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
+      case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
+      case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
+      case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
+      case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT : return "TaskShaderInvocationsEXT";
+      case QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT : return "MeshShaderInvocationsEXT";
+      case QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI : return "ClusterCullingShaderInvocationsHUAWEI";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryResultFlagBits::e64 : return "64";
+      case QueryResultFlagBits::eWait : return "Wait";
+      case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
+      case QueryResultFlagBits::ePartial : return "Partial";
+      case QueryResultFlagBits::eWithStatusKHR : return "WithStatusKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryType value )
+  {
+    switch ( value )
+    {
+      case QueryType::eOcclusion : return "Occlusion";
+      case QueryType::ePipelineStatistics : return "PipelineStatistics";
+      case QueryType::eTimestamp : return "Timestamp";
+      case QueryType::eResultStatusOnlyKHR : return "ResultStatusOnlyKHR";
+      case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
+      case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
+      case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR";
+      case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR";
+      case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV";
+      case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case QueryType::eVideoEncodeBitstreamBufferRangeKHR : return "VideoEncodeBitstreamBufferRangeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case QueryType::eMeshPrimitivesGeneratedEXT : return "MeshPrimitivesGeneratedEXT";
+      case QueryType::ePrimitivesGeneratedEXT : return "PrimitivesGeneratedEXT";
+      case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR : return "AccelerationStructureSerializationBottomLevelPointersKHR";
+      case QueryType::eAccelerationStructureSizeKHR : return "AccelerationStructureSizeKHR";
+      case QueryType::eMicromapSerializationSizeEXT : return "MicromapSerializationSizeEXT";
+      case QueryType::eMicromapCompactedSizeEXT : return "MicromapCompactedSizeEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case BufferCreateFlagBits::eProtected : return "Protected";
+      case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      case BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case BufferUsageFlagBits::eTransferDst : return "TransferDst";
+      case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
+      case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
+      case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
+      case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
+      case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
+      case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress";
+      case BufferUsageFlagBits::eVideoDecodeSrcKHR : return "VideoDecodeSrcKHR";
+      case BufferUsageFlagBits::eVideoDecodeDstKHR : return "VideoDecodeDstKHR";
+      case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
+      case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
+      case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR : return "AccelerationStructureBuildInputReadOnlyKHR";
+      case BufferUsageFlagBits::eAccelerationStructureStorageKHR : return "AccelerationStructureStorageKHR";
+      case BufferUsageFlagBits::eShaderBindingTableKHR : return "ShaderBindingTableKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case BufferUsageFlagBits::eVideoEncodeDstKHR : return "VideoEncodeDstKHR";
+      case BufferUsageFlagBits::eVideoEncodeSrcKHR : return "VideoEncodeSrcKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case BufferUsageFlagBits::eSamplerDescriptorBufferEXT : return "SamplerDescriptorBufferEXT";
+      case BufferUsageFlagBits::eResourceDescriptorBufferEXT : return "ResourceDescriptorBufferEXT";
+      case BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT : return "PushDescriptorsDescriptorBufferEXT";
+      case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT : return "MicromapBuildInputReadOnlyEXT";
+      case BufferUsageFlagBits::eMicromapStorageEXT : return "MicromapStorageEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SharingMode value )
+  {
+    switch ( value )
+    {
+      case SharingMode::eExclusive : return "Exclusive";
+      case SharingMode::eConcurrent : return "Concurrent";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
+  {
+    switch ( value )
+    {
+      case ImageLayout::eUndefined : return "Undefined";
+      case ImageLayout::eGeneral : return "General";
+      case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal";
+      case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal";
+      case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal";
+      case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal";
+      case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal";
+      case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal";
+      case ImageLayout::ePreinitialized : return "Preinitialized";
+      case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal";
+      case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal";
+      case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal";
+      case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal";
+      case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal";
+      case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal";
+      case ImageLayout::eReadOnlyOptimal : return "ReadOnlyOptimal";
+      case ImageLayout::eAttachmentOptimal : return "AttachmentOptimal";
+      case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR";
+      case ImageLayout::eVideoDecodeDstKHR : return "VideoDecodeDstKHR";
+      case ImageLayout::eVideoDecodeSrcKHR : return "VideoDecodeSrcKHR";
+      case ImageLayout::eVideoDecodeDpbKHR : return "VideoDecodeDpbKHR";
+      case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
+      case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
+      case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR : return "FragmentShadingRateAttachmentOptimalKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case ImageLayout::eVideoEncodeDstKHR : return "VideoEncodeDstKHR";
+      case ImageLayout::eVideoEncodeSrcKHR : return "VideoEncodeSrcKHR";
+      case ImageLayout::eVideoEncodeDpbKHR : return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case ImageLayout::eAttachmentFeedbackLoopOptimalEXT : return "AttachmentFeedbackLoopOptimalEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
+  {
+    switch ( value )
+    {
+      case ComponentSwizzle::eIdentity : return "Identity";
+      case ComponentSwizzle::eZero : return "Zero";
+      case ComponentSwizzle::eOne : return "One";
+      case ComponentSwizzle::eR : return "R";
+      case ComponentSwizzle::eG : return "G";
+      case ComponentSwizzle::eB : return "B";
+      case ComponentSwizzle::eA : return "A";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
+      case ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT";
+      case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT : return "FragmentDensityMapDeferredEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
+  {
+    switch ( value )
+    {
+      case ImageViewType::e1D : return "1D";
+      case ImageViewType::e2D : return "2D";
+      case ImageViewType::e3D : return "3D";
+      case ImageViewType::eCube : return "Cube";
+      case ImageViewType::e1DArray : return "1DArray";
+      case ImageViewType::e2DArray : return "2DArray";
+      case ImageViewType::eCubeArray : return "CubeArray";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
+  {
+    switch ( value )
+    {
+      case BlendFactor::eZero : return "Zero";
+      case BlendFactor::eOne : return "One";
+      case BlendFactor::eSrcColor : return "SrcColor";
+      case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor";
+      case BlendFactor::eDstColor : return "DstColor";
+      case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor";
+      case BlendFactor::eSrcAlpha : return "SrcAlpha";
+      case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha";
+      case BlendFactor::eDstAlpha : return "DstAlpha";
+      case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha";
+      case BlendFactor::eConstantColor : return "ConstantColor";
+      case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor";
+      case BlendFactor::eConstantAlpha : return "ConstantAlpha";
+      case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha";
+      case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate";
+      case BlendFactor::eSrc1Color : return "Src1Color";
+      case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color";
+      case BlendFactor::eSrc1Alpha : return "Src1Alpha";
+      case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BlendOp value )
+  {
+    switch ( value )
+    {
+      case BlendOp::eAdd : return "Add";
+      case BlendOp::eSubtract : return "Subtract";
+      case BlendOp::eReverseSubtract : return "ReverseSubtract";
+      case BlendOp::eMin : return "Min";
+      case BlendOp::eMax : return "Max";
+      case BlendOp::eZeroEXT : return "ZeroEXT";
+      case BlendOp::eSrcEXT : return "SrcEXT";
+      case BlendOp::eDstEXT : return "DstEXT";
+      case BlendOp::eSrcOverEXT : return "SrcOverEXT";
+      case BlendOp::eDstOverEXT : return "DstOverEXT";
+      case BlendOp::eSrcInEXT : return "SrcInEXT";
+      case BlendOp::eDstInEXT : return "DstInEXT";
+      case BlendOp::eSrcOutEXT : return "SrcOutEXT";
+      case BlendOp::eDstOutEXT : return "DstOutEXT";
+      case BlendOp::eSrcAtopEXT : return "SrcAtopEXT";
+      case BlendOp::eDstAtopEXT : return "DstAtopEXT";
+      case BlendOp::eXorEXT : return "XorEXT";
+      case BlendOp::eMultiplyEXT : return "MultiplyEXT";
+      case BlendOp::eScreenEXT : return "ScreenEXT";
+      case BlendOp::eOverlayEXT : return "OverlayEXT";
+      case BlendOp::eDarkenEXT : return "DarkenEXT";
+      case BlendOp::eLightenEXT : return "LightenEXT";
+      case BlendOp::eColordodgeEXT : return "ColordodgeEXT";
+      case BlendOp::eColorburnEXT : return "ColorburnEXT";
+      case BlendOp::eHardlightEXT : return "HardlightEXT";
+      case BlendOp::eSoftlightEXT : return "SoftlightEXT";
+      case BlendOp::eDifferenceEXT : return "DifferenceEXT";
+      case BlendOp::eExclusionEXT : return "ExclusionEXT";
+      case BlendOp::eInvertEXT : return "InvertEXT";
+      case BlendOp::eInvertRgbEXT : return "InvertRgbEXT";
+      case BlendOp::eLineardodgeEXT : return "LineardodgeEXT";
+      case BlendOp::eLinearburnEXT : return "LinearburnEXT";
+      case BlendOp::eVividlightEXT : return "VividlightEXT";
+      case BlendOp::eLinearlightEXT : return "LinearlightEXT";
+      case BlendOp::ePinlightEXT : return "PinlightEXT";
+      case BlendOp::eHardmixEXT : return "HardmixEXT";
+      case BlendOp::eHslHueEXT : return "HslHueEXT";
+      case BlendOp::eHslSaturationEXT : return "HslSaturationEXT";
+      case BlendOp::eHslColorEXT : return "HslColorEXT";
+      case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT";
+      case BlendOp::ePlusEXT : return "PlusEXT";
+      case BlendOp::ePlusClampedEXT : return "PlusClampedEXT";
+      case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT";
+      case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT";
+      case BlendOp::eMinusEXT : return "MinusEXT";
+      case BlendOp::eMinusClampedEXT : return "MinusClampedEXT";
+      case BlendOp::eContrastEXT : return "ContrastEXT";
+      case BlendOp::eInvertOvgEXT : return "InvertOvgEXT";
+      case BlendOp::eRedEXT : return "RedEXT";
+      case BlendOp::eGreenEXT : return "GreenEXT";
+      case BlendOp::eBlueEXT : return "BlueEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
+  {
+    switch ( value )
+    {
+      case ColorComponentFlagBits::eR : return "R";
+      case ColorComponentFlagBits::eG : return "G";
+      case ColorComponentFlagBits::eB : return "B";
+      case ColorComponentFlagBits::eA : return "A";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CompareOp value )
+  {
+    switch ( value )
+    {
+      case CompareOp::eNever : return "Never";
+      case CompareOp::eLess : return "Less";
+      case CompareOp::eEqual : return "Equal";
+      case CompareOp::eLessOrEqual : return "LessOrEqual";
+      case CompareOp::eGreater : return "Greater";
+      case CompareOp::eNotEqual : return "NotEqual";
+      case CompareOp::eGreaterOrEqual : return "GreaterOrEqual";
+      case CompareOp::eAlways : return "Always";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
+  {
+    switch ( value )
+    {
+      case CullModeFlagBits::eNone : return "None";
+      case CullModeFlagBits::eFront : return "Front";
+      case CullModeFlagBits::eBack : return "Back";
+      case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DynamicState value )
+  {
+    switch ( value )
+    {
+      case DynamicState::eViewport : return "Viewport";
+      case DynamicState::eScissor : return "Scissor";
+      case DynamicState::eLineWidth : return "LineWidth";
+      case DynamicState::eDepthBias : return "DepthBias";
+      case DynamicState::eBlendConstants : return "BlendConstants";
+      case DynamicState::eDepthBounds : return "DepthBounds";
+      case DynamicState::eStencilCompareMask : return "StencilCompareMask";
+      case DynamicState::eStencilWriteMask : return "StencilWriteMask";
+      case DynamicState::eStencilReference : return "StencilReference";
+      case DynamicState::eCullMode : return "CullMode";
+      case DynamicState::eFrontFace : return "FrontFace";
+      case DynamicState::ePrimitiveTopology : return "PrimitiveTopology";
+      case DynamicState::eViewportWithCount : return "ViewportWithCount";
+      case DynamicState::eScissorWithCount : return "ScissorWithCount";
+      case DynamicState::eVertexInputBindingStride : return "VertexInputBindingStride";
+      case DynamicState::eDepthTestEnable : return "DepthTestEnable";
+      case DynamicState::eDepthWriteEnable : return "DepthWriteEnable";
+      case DynamicState::eDepthCompareOp : return "DepthCompareOp";
+      case DynamicState::eDepthBoundsTestEnable : return "DepthBoundsTestEnable";
+      case DynamicState::eStencilTestEnable : return "StencilTestEnable";
+      case DynamicState::eStencilOp : return "StencilOp";
+      case DynamicState::eRasterizerDiscardEnable : return "RasterizerDiscardEnable";
+      case DynamicState::eDepthBiasEnable : return "DepthBiasEnable";
+      case DynamicState::ePrimitiveRestartEnable : return "PrimitiveRestartEnable";
+      case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV";
+      case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT";
+      case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT";
+      case DynamicState::eRayTracingPipelineStackSizeKHR : return "RayTracingPipelineStackSizeKHR";
+      case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV";
+      case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV";
+      case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV";
+      case DynamicState::eFragmentShadingRateKHR : return "FragmentShadingRateKHR";
+      case DynamicState::eLineStippleEXT : return "LineStippleEXT";
+      case DynamicState::eVertexInputEXT : return "VertexInputEXT";
+      case DynamicState::ePatchControlPointsEXT : return "PatchControlPointsEXT";
+      case DynamicState::eLogicOpEXT : return "LogicOpEXT";
+      case DynamicState::eColorWriteEnableEXT : return "ColorWriteEnableEXT";
+      case DynamicState::eTessellationDomainOriginEXT : return "TessellationDomainOriginEXT";
+      case DynamicState::eDepthClampEnableEXT : return "DepthClampEnableEXT";
+      case DynamicState::ePolygonModeEXT : return "PolygonModeEXT";
+      case DynamicState::eRasterizationSamplesEXT : return "RasterizationSamplesEXT";
+      case DynamicState::eSampleMaskEXT : return "SampleMaskEXT";
+      case DynamicState::eAlphaToCoverageEnableEXT : return "AlphaToCoverageEnableEXT";
+      case DynamicState::eAlphaToOneEnableEXT : return "AlphaToOneEnableEXT";
+      case DynamicState::eLogicOpEnableEXT : return "LogicOpEnableEXT";
+      case DynamicState::eColorBlendEnableEXT : return "ColorBlendEnableEXT";
+      case DynamicState::eColorBlendEquationEXT : return "ColorBlendEquationEXT";
+      case DynamicState::eColorWriteMaskEXT : return "ColorWriteMaskEXT";
+      case DynamicState::eRasterizationStreamEXT : return "RasterizationStreamEXT";
+      case DynamicState::eConservativeRasterizationModeEXT : return "ConservativeRasterizationModeEXT";
+      case DynamicState::eExtraPrimitiveOverestimationSizeEXT : return "ExtraPrimitiveOverestimationSizeEXT";
+      case DynamicState::eDepthClipEnableEXT : return "DepthClipEnableEXT";
+      case DynamicState::eSampleLocationsEnableEXT : return "SampleLocationsEnableEXT";
+      case DynamicState::eColorBlendAdvancedEXT : return "ColorBlendAdvancedEXT";
+      case DynamicState::eProvokingVertexModeEXT : return "ProvokingVertexModeEXT";
+      case DynamicState::eLineRasterizationModeEXT : return "LineRasterizationModeEXT";
+      case DynamicState::eLineStippleEnableEXT : return "LineStippleEnableEXT";
+      case DynamicState::eDepthClipNegativeOneToOneEXT : return "DepthClipNegativeOneToOneEXT";
+      case DynamicState::eViewportWScalingEnableNV : return "ViewportWScalingEnableNV";
+      case DynamicState::eViewportSwizzleNV : return "ViewportSwizzleNV";
+      case DynamicState::eCoverageToColorEnableNV : return "CoverageToColorEnableNV";
+      case DynamicState::eCoverageToColorLocationNV : return "CoverageToColorLocationNV";
+      case DynamicState::eCoverageModulationModeNV : return "CoverageModulationModeNV";
+      case DynamicState::eCoverageModulationTableEnableNV : return "CoverageModulationTableEnableNV";
+      case DynamicState::eCoverageModulationTableNV : return "CoverageModulationTableNV";
+      case DynamicState::eShadingRateImageEnableNV : return "ShadingRateImageEnableNV";
+      case DynamicState::eRepresentativeFragmentTestEnableNV : return "RepresentativeFragmentTestEnableNV";
+      case DynamicState::eCoverageReductionModeNV : return "CoverageReductionModeNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FrontFace value )
+  {
+    switch ( value )
+    {
+      case FrontFace::eCounterClockwise : return "CounterClockwise";
+      case FrontFace::eClockwise : return "Clockwise";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( LogicOp value )
+  {
+    switch ( value )
+    {
+      case LogicOp::eClear : return "Clear";
+      case LogicOp::eAnd : return "And";
+      case LogicOp::eAndReverse : return "AndReverse";
+      case LogicOp::eCopy : return "Copy";
+      case LogicOp::eAndInverted : return "AndInverted";
+      case LogicOp::eNoOp : return "NoOp";
+      case LogicOp::eXor : return "Xor";
+      case LogicOp::eOr : return "Or";
+      case LogicOp::eNor : return "Nor";
+      case LogicOp::eEquivalent : return "Equivalent";
+      case LogicOp::eInvert : return "Invert";
+      case LogicOp::eOrReverse : return "OrReverse";
+      case LogicOp::eCopyInverted : return "CopyInverted";
+      case LogicOp::eOrInverted : return "OrInverted";
+      case LogicOp::eNand : return "Nand";
+      case LogicOp::eSet : return "Set";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
+      case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
+      case PipelineCreateFlagBits::eDerivative : return "Derivative";
+      case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
+      case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
+      case PipelineCreateFlagBits::eFailOnPipelineCompileRequired : return "FailOnPipelineCompileRequired";
+      case PipelineCreateFlagBits::eEarlyReturnOnFailure : return "EarlyReturnOnFailure";
+      case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR : return "RenderingFragmentShadingRateAttachmentKHR";
+      case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT : return "RenderingFragmentDensityMapAttachmentEXT";
+      case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR";
+      case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR";
+      case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR : return "RayTracingShaderGroupHandleCaptureReplayKHR";
+      case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
+      case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
+      case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
+      case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV";
+      case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR";
+      case PipelineCreateFlagBits::eDescriptorBufferEXT : return "DescriptorBufferEXT";
+      case PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT : return "RetainLinkTimeOptimizationInfoEXT";
+      case PipelineCreateFlagBits::eLinkTimeOptimizationEXT : return "LinkTimeOptimizationEXT";
+      case PipelineCreateFlagBits::eRayTracingAllowMotionNV : return "RayTracingAllowMotionNV";
+      case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT : return "ColorAttachmentFeedbackLoopEXT";
+      case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT : return "DepthStencilAttachmentFeedbackLoopEXT";
+      case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT : return "RayTracingOpacityMicromapEXT";
+      case PipelineCreateFlagBits::eNoProtectedAccessEXT : return "NoProtectedAccessEXT";
+      case PipelineCreateFlagBits::eProtectedAccessOnlyEXT : return "ProtectedAccessOnlyEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize : return "AllowVaryingSubgroupSize";
+      case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups : return "RequireFullSubgroups";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
+  {
+    switch ( value )
+    {
+      case PolygonMode::eFill : return "Fill";
+      case PolygonMode::eLine : return "Line";
+      case PolygonMode::ePoint : return "Point";
+      case PolygonMode::eFillRectangleNV : return "FillRectangleNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
+  {
+    switch ( value )
+    {
+      case PrimitiveTopology::ePointList : return "PointList";
+      case PrimitiveTopology::eLineList : return "LineList";
+      case PrimitiveTopology::eLineStrip : return "LineStrip";
+      case PrimitiveTopology::eTriangleList : return "TriangleList";
+      case PrimitiveTopology::eTriangleStrip : return "TriangleStrip";
+      case PrimitiveTopology::eTriangleFan : return "TriangleFan";
+      case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency";
+      case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency";
+      case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency";
+      case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency";
+      case PrimitiveTopology::ePatchList : return "PatchList";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ShaderStageFlagBits::eVertex : return "Vertex";
+      case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
+      case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
+      case ShaderStageFlagBits::eGeometry : return "Geometry";
+      case ShaderStageFlagBits::eFragment : return "Fragment";
+      case ShaderStageFlagBits::eCompute : return "Compute";
+      case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
+      case ShaderStageFlagBits::eAll : return "All";
+      case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR";
+      case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR";
+      case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR";
+      case ShaderStageFlagBits::eMissKHR : return "MissKHR";
+      case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR";
+      case ShaderStageFlagBits::eCallableKHR : return "CallableKHR";
+      case ShaderStageFlagBits::eTaskEXT : return "TaskEXT";
+      case ShaderStageFlagBits::eMeshEXT : return "MeshEXT";
+      case ShaderStageFlagBits::eSubpassShadingHUAWEI : return "SubpassShadingHUAWEI";
+      case ShaderStageFlagBits::eClusterCullingHUAWEI : return "ClusterCullingHUAWEI";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( StencilOp value )
+  {
+    switch ( value )
+    {
+      case StencilOp::eKeep : return "Keep";
+      case StencilOp::eZero : return "Zero";
+      case StencilOp::eReplace : return "Replace";
+      case StencilOp::eIncrementAndClamp : return "IncrementAndClamp";
+      case StencilOp::eDecrementAndClamp : return "DecrementAndClamp";
+      case StencilOp::eInvert : return "Invert";
+      case StencilOp::eIncrementAndWrap : return "IncrementAndWrap";
+      case StencilOp::eDecrementAndWrap : return "DecrementAndWrap";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
+  {
+    switch ( value )
+    {
+      case VertexInputRate::eVertex : return "Vertex";
+      case VertexInputRate::eInstance : return "Instance";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BorderColor value )
+  {
+    switch ( value )
+    {
+      case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack";
+      case BorderColor::eIntTransparentBlack : return "IntTransparentBlack";
+      case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack";
+      case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack";
+      case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite";
+      case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite";
+      case BorderColor::eFloatCustomEXT : return "FloatCustomEXT";
+      case BorderColor::eIntCustomEXT : return "IntCustomEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( Filter value )
+  {
+    switch ( value )
+    {
+      case Filter::eNearest : return "Nearest";
+      case Filter::eLinear : return "Linear";
+      case Filter::eCubicEXT : return "CubicEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
+  {
+    switch ( value )
+    {
+      case SamplerAddressMode::eRepeat : return "Repeat";
+      case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat";
+      case SamplerAddressMode::eClampToEdge : return "ClampToEdge";
+      case SamplerAddressMode::eClampToBorder : return "ClampToBorder";
+      case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
+      case SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT";
+      case SamplerCreateFlagBits::eNonSeamlessCubeMapEXT : return "NonSeamlessCubeMapEXT";
+      case SamplerCreateFlagBits::eImageProcessingQCOM : return "ImageProcessingQCOM";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
+  {
+    switch ( value )
+    {
+      case SamplerMipmapMode::eNearest : return "Nearest";
+      case SamplerMipmapMode::eLinear : return "Linear";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
+      case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+      case DescriptorPoolCreateFlagBits::eHostOnlyEXT : return "HostOnlyEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool";
+      case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
+      case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT : return "DescriptorBufferEXT";
+      case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT : return "EmbeddedImmutableSamplersEXT";
+      case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT : return "HostOnlyPoolEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
+  {
+    switch ( value )
+    {
+      case DescriptorType::eSampler : return "Sampler";
+      case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler";
+      case DescriptorType::eSampledImage : return "SampledImage";
+      case DescriptorType::eStorageImage : return "StorageImage";
+      case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case DescriptorType::eUniformBuffer : return "UniformBuffer";
+      case DescriptorType::eStorageBuffer : return "StorageBuffer";
+      case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic";
+      case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic";
+      case DescriptorType::eInputAttachment : return "InputAttachment";
+      case DescriptorType::eInlineUniformBlock : return "InlineUniformBlock";
+      case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV";
+      case DescriptorType::eSampleWeightImageQCOM : return "SampleWeightImageQCOM";
+      case DescriptorType::eBlockMatchImageQCOM : return "BlockMatchImageQCOM";
+      case DescriptorType::eMutableEXT : return "MutableEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
+  {
+    switch ( value )
+    {
+      case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
+      case AccessFlagBits::eIndexRead : return "IndexRead";
+      case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
+      case AccessFlagBits::eUniformRead : return "UniformRead";
+      case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
+      case AccessFlagBits::eShaderRead : return "ShaderRead";
+      case AccessFlagBits::eShaderWrite : return "ShaderWrite";
+      case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
+      case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
+      case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
+      case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
+      case AccessFlagBits::eTransferRead : return "TransferRead";
+      case AccessFlagBits::eTransferWrite : return "TransferWrite";
+      case AccessFlagBits::eHostRead : return "HostRead";
+      case AccessFlagBits::eHostWrite : return "HostWrite";
+      case AccessFlagBits::eMemoryRead : return "MemoryRead";
+      case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
+      case AccessFlagBits::eNone : return "None";
+      case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
+      case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
+      case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
+      case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
+      case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+      case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR";
+      case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR";
+      case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+      case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR : return "FragmentShadingRateAttachmentReadKHR";
+      case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
+      case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
+  {
+    switch ( value )
+    {
+      case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
+  {
+    switch ( value )
+    {
+      case AttachmentLoadOp::eLoad : return "Load";
+      case AttachmentLoadOp::eClear : return "Clear";
+      case AttachmentLoadOp::eDontCare : return "DontCare";
+      case AttachmentLoadOp::eNoneEXT : return "NoneEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
+  {
+    switch ( value )
+    {
+      case AttachmentStoreOp::eStore : return "Store";
+      case AttachmentStoreOp::eDontCare : return "DontCare";
+      case AttachmentStoreOp::eNone : return "None";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
+  {
+    switch ( value )
+    {
+      case DependencyFlagBits::eByRegion : return "ByRegion";
+      case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
+      case DependencyFlagBits::eViewLocal : return "ViewLocal";
+      case DependencyFlagBits::eFeedbackLoopEXT : return "FeedbackLoopEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FramebufferCreateFlagBits::eImageless : return "Imageless";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
+  {
+    switch ( value )
+    {
+      case PipelineBindPoint::eGraphics : return "Graphics";
+      case PipelineBindPoint::eCompute : return "Compute";
+      case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR";
+      case PipelineBindPoint::eSubpassShadingHUAWEI : return "SubpassShadingHUAWEI";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
+  {
+    switch ( value )
+    {
+      case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
+      case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
+      case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM";
+      case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM";
+      case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT : return "RasterizationOrderAttachmentColorAccessEXT";
+      case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT : return "RasterizationOrderAttachmentDepthAccessEXT";
+      case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT : return "RasterizationOrderAttachmentStencilAccessEXT";
+      case SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT : return "EnableLegacyDitheringEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolCreateFlagBits::eTransient : return "Transient";
+      case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
+      case CommandPoolCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
+  {
+    switch ( value )
+    {
+      case CommandBufferLevel::ePrimary : return "Primary";
+      case CommandBufferLevel::eSecondary : return "Secondary";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
+      case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
+      case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryControlFlagBits::ePrecise : return "Precise";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( IndexType value )
+  {
+    switch ( value )
+    {
+      case IndexType::eUint16 : return "Uint16";
+      case IndexType::eUint32 : return "Uint32";
+      case IndexType::eNoneKHR : return "NoneKHR";
+      case IndexType::eUint8EXT : return "Uint8EXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
+  {
+    switch ( value )
+    {
+      case StencilFaceFlagBits::eFront : return "Front";
+      case StencilFaceFlagBits::eBack : return "Back";
+      case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
+  {
+    switch ( value )
+    {
+      case SubpassContents::eInline : return "Inline";
+      case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_VERSION_1_1 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case SubgroupFeatureFlagBits::eBasic : return "Basic";
+      case SubgroupFeatureFlagBits::eVote : return "Vote";
+      case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
+      case SubgroupFeatureFlagBits::eBallot : return "Ballot";
+      case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
+      case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
+      case SubgroupFeatureFlagBits::eClustered : return "Clustered";
+      case SubgroupFeatureFlagBits::eQuad : return "Quad";
+      case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
+      case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
+      case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
+      case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
+      case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress";
+      case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
+  {
+    switch ( value )
+    {
+      case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes";
+      case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
+  {
+    switch ( value )
+    {
+      case TessellationDomainOrigin::eUpperLeft : return "UpperLeft";
+      case TessellationDomainOrigin::eLowerLeft : return "LowerLeft";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DeviceQueueCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity";
+      case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity";
+      case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709";
+      case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601";
+      case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrRange::eItuFull : return "ItuFull";
+      case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
+  {
+    switch ( value )
+    {
+      case ChromaLocation::eCositedEven : return "CositedEven";
+      case ChromaLocation::eMidpoint : return "Midpoint";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
+  {
+    switch ( value )
+    {
+      case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet";
+      case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
+      case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+      case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
+      case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA : return "ZirconVmoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+      case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV : return "RdmaAddressNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case SemaphoreImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
+      case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA : return "ZirconEventFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_VERSION_1_2 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DriverId value )
+  {
+    switch ( value )
+    {
+      case DriverId::eAmdProprietary : return "AmdProprietary";
+      case DriverId::eAmdOpenSource : return "AmdOpenSource";
+      case DriverId::eMesaRadv : return "MesaRadv";
+      case DriverId::eNvidiaProprietary : return "NvidiaProprietary";
+      case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows";
+      case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
+      case DriverId::eImaginationProprietary : return "ImaginationProprietary";
+      case DriverId::eQualcommProprietary : return "QualcommProprietary";
+      case DriverId::eArmProprietary : return "ArmProprietary";
+      case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader";
+      case DriverId::eGgpProprietary : return "GgpProprietary";
+      case DriverId::eBroadcomProprietary : return "BroadcomProprietary";
+      case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe";
+      case DriverId::eMoltenvk : return "Moltenvk";
+      case DriverId::eCoreaviProprietary : return "CoreaviProprietary";
+      case DriverId::eJuiceProprietary : return "JuiceProprietary";
+      case DriverId::eVerisiliconProprietary : return "VerisiliconProprietary";
+      case DriverId::eMesaTurnip : return "MesaTurnip";
+      case DriverId::eMesaV3Dv : return "MesaV3Dv";
+      case DriverId::eMesaPanvk : return "MesaPanvk";
+      case DriverId::eSamsungProprietary : return "SamsungProprietary";
+      case DriverId::eMesaVenus : return "MesaVenus";
+      case DriverId::eMesaDozen : return "MesaDozen";
+      case DriverId::eMesaNvk : return "MesaNvk";
+      case DriverId::eImaginationOpenSourceMESA : return "ImaginationOpenSourceMESA";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
+  {
+    switch ( value )
+    {
+      case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly";
+      case ShaderFloatControlsIndependence::eAll : return "All";
+      case ShaderFloatControlsIndependence::eNone : return "None";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+      case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
+      case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound";
+      case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ResolveModeFlagBits::eNone : return "None";
+      case ResolveModeFlagBits::eSampleZero : return "SampleZero";
+      case ResolveModeFlagBits::eAverage : return "Average";
+      case ResolveModeFlagBits::eMin : return "Min";
+      case ResolveModeFlagBits::eMax : return "Max";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
+  {
+    switch ( value )
+    {
+      case SamplerReductionMode::eWeightedAverage : return "WeightedAverage";
+      case SamplerReductionMode::eMin : return "Min";
+      case SamplerReductionMode::eMax : return "Max";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
+  {
+    switch ( value )
+    {
+      case SemaphoreType::eBinary : return "Binary";
+      case SemaphoreType::eTimeline : return "Timeline";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
+  {
+    switch ( value )
+    {
+      case SemaphoreWaitFlagBits::eAny : return "Any";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_VERSION_1_3 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCreationFeedbackFlagBits::eValid : return "Valid";
+      case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
+      case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration : return "BasePipelineAcceleration";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ToolPurposeFlagBits::eValidation : return "Validation";
+      case ToolPurposeFlagBits::eProfiling : return "Profiling";
+      case ToolPurposeFlagBits::eTracing : return "Tracing";
+      case ToolPurposeFlagBits::eAdditionalFeatures : return "AdditionalFeatures";
+      case ToolPurposeFlagBits::eModifyingFeatures : return "ModifyingFeatures";
+      case ToolPurposeFlagBits::eDebugReportingEXT : return "DebugReportingEXT";
+      case ToolPurposeFlagBits::eDebugMarkersEXT : return "DebugMarkersEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2 value )
+  {
+    switch ( value )
+    {
+      case PipelineStageFlagBits2::eNone : return "None";
+      case PipelineStageFlagBits2::eTopOfPipe : return "TopOfPipe";
+      case PipelineStageFlagBits2::eDrawIndirect : return "DrawIndirect";
+      case PipelineStageFlagBits2::eVertexInput : return "VertexInput";
+      case PipelineStageFlagBits2::eVertexShader : return "VertexShader";
+      case PipelineStageFlagBits2::eTessellationControlShader : return "TessellationControlShader";
+      case PipelineStageFlagBits2::eTessellationEvaluationShader : return "TessellationEvaluationShader";
+      case PipelineStageFlagBits2::eGeometryShader : return "GeometryShader";
+      case PipelineStageFlagBits2::eFragmentShader : return "FragmentShader";
+      case PipelineStageFlagBits2::eEarlyFragmentTests : return "EarlyFragmentTests";
+      case PipelineStageFlagBits2::eLateFragmentTests : return "LateFragmentTests";
+      case PipelineStageFlagBits2::eColorAttachmentOutput : return "ColorAttachmentOutput";
+      case PipelineStageFlagBits2::eComputeShader : return "ComputeShader";
+      case PipelineStageFlagBits2::eAllTransfer : return "AllTransfer";
+      case PipelineStageFlagBits2::eBottomOfPipe : return "BottomOfPipe";
+      case PipelineStageFlagBits2::eHost : return "Host";
+      case PipelineStageFlagBits2::eAllGraphics : return "AllGraphics";
+      case PipelineStageFlagBits2::eAllCommands : return "AllCommands";
+      case PipelineStageFlagBits2::eCopy : return "Copy";
+      case PipelineStageFlagBits2::eResolve : return "Resolve";
+      case PipelineStageFlagBits2::eBlit : return "Blit";
+      case PipelineStageFlagBits2::eClear : return "Clear";
+      case PipelineStageFlagBits2::eIndexInput : return "IndexInput";
+      case PipelineStageFlagBits2::eVertexAttributeInput : return "VertexAttributeInput";
+      case PipelineStageFlagBits2::ePreRasterizationShaders : return "PreRasterizationShaders";
+      case PipelineStageFlagBits2::eVideoDecodeKHR : return "VideoDecodeKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case PipelineStageFlagBits2::eVideoEncodeKHR : return "VideoEncodeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case PipelineStageFlagBits2::eTransformFeedbackEXT : return "TransformFeedbackEXT";
+      case PipelineStageFlagBits2::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case PipelineStageFlagBits2::eCommandPreprocessNV : return "CommandPreprocessNV";
+      case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+      case PipelineStageFlagBits2::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR";
+      case PipelineStageFlagBits2::eRayTracingShaderKHR : return "RayTracingShaderKHR";
+      case PipelineStageFlagBits2::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+      case PipelineStageFlagBits2::eTaskShaderEXT : return "TaskShaderEXT";
+      case PipelineStageFlagBits2::eMeshShaderEXT : return "MeshShaderEXT";
+      case PipelineStageFlagBits2::eSubpassShadingHUAWEI : return "SubpassShadingHUAWEI";
+      case PipelineStageFlagBits2::eInvocationMaskHUAWEI : return "InvocationMaskHUAWEI";
+      case PipelineStageFlagBits2::eAccelerationStructureCopyKHR : return "AccelerationStructureCopyKHR";
+      case PipelineStageFlagBits2::eMicromapBuildEXT : return "MicromapBuildEXT";
+      case PipelineStageFlagBits2::eClusterCullingShaderHUAWEI : return "ClusterCullingShaderHUAWEI";
+      case PipelineStageFlagBits2::eOpticalFlowNV : return "OpticalFlowNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2 value )
+  {
+    switch ( value )
+    {
+      case AccessFlagBits2::eNone : return "None";
+      case AccessFlagBits2::eIndirectCommandRead : return "IndirectCommandRead";
+      case AccessFlagBits2::eIndexRead : return "IndexRead";
+      case AccessFlagBits2::eVertexAttributeRead : return "VertexAttributeRead";
+      case AccessFlagBits2::eUniformRead : return "UniformRead";
+      case AccessFlagBits2::eInputAttachmentRead : return "InputAttachmentRead";
+      case AccessFlagBits2::eShaderRead : return "ShaderRead";
+      case AccessFlagBits2::eShaderWrite : return "ShaderWrite";
+      case AccessFlagBits2::eColorAttachmentRead : return "ColorAttachmentRead";
+      case AccessFlagBits2::eColorAttachmentWrite : return "ColorAttachmentWrite";
+      case AccessFlagBits2::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
+      case AccessFlagBits2::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
+      case AccessFlagBits2::eTransferRead : return "TransferRead";
+      case AccessFlagBits2::eTransferWrite : return "TransferWrite";
+      case AccessFlagBits2::eHostRead : return "HostRead";
+      case AccessFlagBits2::eHostWrite : return "HostWrite";
+      case AccessFlagBits2::eMemoryRead : return "MemoryRead";
+      case AccessFlagBits2::eMemoryWrite : return "MemoryWrite";
+      case AccessFlagBits2::eShaderSampledRead : return "ShaderSampledRead";
+      case AccessFlagBits2::eShaderStorageRead : return "ShaderStorageRead";
+      case AccessFlagBits2::eShaderStorageWrite : return "ShaderStorageWrite";
+      case AccessFlagBits2::eVideoDecodeReadKHR : return "VideoDecodeReadKHR";
+      case AccessFlagBits2::eVideoDecodeWriteKHR : return "VideoDecodeWriteKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case AccessFlagBits2::eVideoEncodeReadKHR : return "VideoEncodeReadKHR";
+      case AccessFlagBits2::eVideoEncodeWriteKHR : return "VideoEncodeWriteKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case AccessFlagBits2::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
+      case AccessFlagBits2::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
+      case AccessFlagBits2::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
+      case AccessFlagBits2::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
+      case AccessFlagBits2::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
+      case AccessFlagBits2::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
+      case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR : return "FragmentShadingRateAttachmentReadKHR";
+      case AccessFlagBits2::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR";
+      case AccessFlagBits2::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR";
+      case AccessFlagBits2::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+      case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+      case AccessFlagBits2::eDescriptorBufferReadEXT : return "DescriptorBufferReadEXT";
+      case AccessFlagBits2::eInvocationMaskReadHUAWEI : return "InvocationMaskReadHUAWEI";
+      case AccessFlagBits2::eShaderBindingTableReadKHR : return "ShaderBindingTableReadKHR";
+      case AccessFlagBits2::eMicromapReadEXT : return "MicromapReadEXT";
+      case AccessFlagBits2::eMicromapWriteEXT : return "MicromapWriteEXT";
+      case AccessFlagBits2::eOpticalFlowReadNV : return "OpticalFlowReadNV";
+      case AccessFlagBits2::eOpticalFlowWriteNV : return "OpticalFlowWriteNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( SubmitFlagBits value )
+  {
+    switch ( value )
+    {
+      case SubmitFlagBits::eProtected : return "Protected";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( RenderingFlagBits value )
+  {
+    switch ( value )
+    {
+      case RenderingFlagBits::eContentsSecondaryCommandBuffers : return "ContentsSecondaryCommandBuffers";
+      case RenderingFlagBits::eSuspending : return "Suspending";
+      case RenderingFlagBits::eResuming : return "Resuming";
+      case RenderingFlagBits::eEnableLegacyDitheringEXT : return "EnableLegacyDitheringEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits2 value )
+  {
+    switch ( value )
+    {
+      case FormatFeatureFlagBits2::eSampledImage : return "SampledImage";
+      case FormatFeatureFlagBits2::eStorageImage : return "StorageImage";
+      case FormatFeatureFlagBits2::eStorageImageAtomic : return "StorageImageAtomic";
+      case FormatFeatureFlagBits2::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case FormatFeatureFlagBits2::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case FormatFeatureFlagBits2::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
+      case FormatFeatureFlagBits2::eVertexBuffer : return "VertexBuffer";
+      case FormatFeatureFlagBits2::eColorAttachment : return "ColorAttachment";
+      case FormatFeatureFlagBits2::eColorAttachmentBlend : return "ColorAttachmentBlend";
+      case FormatFeatureFlagBits2::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case FormatFeatureFlagBits2::eBlitSrc : return "BlitSrc";
+      case FormatFeatureFlagBits2::eBlitDst : return "BlitDst";
+      case FormatFeatureFlagBits2::eSampledImageFilterLinear : return "SampledImageFilterLinear";
+      case FormatFeatureFlagBits2::eSampledImageFilterCubic : return "SampledImageFilterCubic";
+      case FormatFeatureFlagBits2::eTransferSrc : return "TransferSrc";
+      case FormatFeatureFlagBits2::eTransferDst : return "TransferDst";
+      case FormatFeatureFlagBits2::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
+      case FormatFeatureFlagBits2::eMidpointChromaSamples : return "MidpointChromaSamples";
+      case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
+      case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+      case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+      case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+      case FormatFeatureFlagBits2::eDisjoint : return "Disjoint";
+      case FormatFeatureFlagBits2::eCositedChromaSamples : return "CositedChromaSamples";
+      case FormatFeatureFlagBits2::eStorageReadWithoutFormat : return "StorageReadWithoutFormat";
+      case FormatFeatureFlagBits2::eStorageWriteWithoutFormat : return "StorageWriteWithoutFormat";
+      case FormatFeatureFlagBits2::eSampledImageDepthComparison : return "SampledImageDepthComparison";
+      case FormatFeatureFlagBits2::eVideoDecodeOutputKHR : return "VideoDecodeOutputKHR";
+      case FormatFeatureFlagBits2::eVideoDecodeDpbKHR : return "VideoDecodeDpbKHR";
+      case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR";
+      case FormatFeatureFlagBits2::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case FormatFeatureFlagBits2::eVideoEncodeInputKHR : return "VideoEncodeInputKHR";
+      case FormatFeatureFlagBits2::eVideoEncodeDpbKHR : return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case FormatFeatureFlagBits2::eLinearColorAttachmentNV : return "LinearColorAttachmentNV";
+      case FormatFeatureFlagBits2::eWeightImageQCOM : return "WeightImageQCOM";
+      case FormatFeatureFlagBits2::eWeightSampledImageQCOM : return "WeightSampledImageQCOM";
+      case FormatFeatureFlagBits2::eBlockMatchingQCOM : return "BlockMatchingQCOM";
+      case FormatFeatureFlagBits2::eBoxFilterSampledQCOM : return "BoxFilterSampledQCOM";
+      case FormatFeatureFlagBits2::eOpticalFlowImageNV : return "OpticalFlowImageNV";
+      case FormatFeatureFlagBits2::eOpticalFlowVectorNV : return "OpticalFlowVectorNV";
+      case FormatFeatureFlagBits2::eOpticalFlowCostNV : return "OpticalFlowCostNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
+      case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
+      case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
+      case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
+      case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
+  {
+    switch ( value )
+    {
+      case PresentModeKHR::eImmediate : return "Immediate";
+      case PresentModeKHR::eMailbox : return "Mailbox";
+      case PresentModeKHR::eFifo : return "Fifo";
+      case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed";
+      case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh";
+      case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
+  {
+    switch ( value )
+    {
+      case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear";
+      case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT";
+      case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT";
+      case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT";
+      case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT";
+      case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT";
+      case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT";
+      case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT";
+      case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT";
+      case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT";
+      case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT";
+      case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT";
+      case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT";
+      case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT";
+      case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT";
+      case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
+      case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
+      case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_swapchain ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
+      case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
+      case SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT : return "DeferredMemoryAllocationEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
+      case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
+      case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
+      case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_display ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportFlagBitsEXT::eInformation : return "Information";
+      case DebugReportFlagBitsEXT::eWarning : return "Warning";
+      case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
+      case DebugReportFlagBitsEXT::eError : return "Error";
+      case DebugReportFlagBitsEXT::eDebug : return "Debug";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportObjectTypeEXT::eUnknown : return "Unknown";
+      case DebugReportObjectTypeEXT::eInstance : return "Instance";
+      case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice";
+      case DebugReportObjectTypeEXT::eDevice : return "Device";
+      case DebugReportObjectTypeEXT::eQueue : return "Queue";
+      case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore";
+      case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer";
+      case DebugReportObjectTypeEXT::eFence : return "Fence";
+      case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory";
+      case DebugReportObjectTypeEXT::eBuffer : return "Buffer";
+      case DebugReportObjectTypeEXT::eImage : return "Image";
+      case DebugReportObjectTypeEXT::eEvent : return "Event";
+      case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool";
+      case DebugReportObjectTypeEXT::eBufferView : return "BufferView";
+      case DebugReportObjectTypeEXT::eImageView : return "ImageView";
+      case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule";
+      case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache";
+      case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout";
+      case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass";
+      case DebugReportObjectTypeEXT::ePipeline : return "Pipeline";
+      case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case DebugReportObjectTypeEXT::eSampler : return "Sampler";
+      case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool";
+      case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet";
+      case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer";
+      case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool";
+      case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR";
+      case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR";
+      case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR";
+      case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR";
+      case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT";
+      case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case DebugReportObjectTypeEXT::eCuModuleNVX : return "CuModuleNVX";
+      case DebugReportObjectTypeEXT::eCuFunctionNVX : return "CuFunctionNVX";
+      case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+      case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA : return "BufferCollectionFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_AMD_rasterization_order ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
+  {
+    switch ( value )
+    {
+      case RasterizationOrderAMD::eStrict : return "Strict";
+      case RasterizationOrderAMD::eRelaxed : return "Relaxed";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_video_queue ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoCodecOperationFlagBitsKHR::eNone : return "None";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT : return "EncodeH264EXT";
+      case VideoCodecOperationFlagBitsKHR::eEncodeH265EXT : return "EncodeH265EXT";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      case VideoCodecOperationFlagBitsKHR::eDecodeH264 : return "DecodeH264";
+      case VideoCodecOperationFlagBitsKHR::eDecodeH265 : return "DecodeH265";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoChromaSubsamplingFlagBitsKHR::eInvalid : return "Invalid";
+      case VideoChromaSubsamplingFlagBitsKHR::eMonochrome : return "Monochrome";
+      case VideoChromaSubsamplingFlagBitsKHR::e420 : return "420";
+      case VideoChromaSubsamplingFlagBitsKHR::e422 : return "422";
+      case VideoChromaSubsamplingFlagBitsKHR::e444 : return "444";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoComponentBitDepthFlagBitsKHR::eInvalid : return "Invalid";
+      case VideoComponentBitDepthFlagBitsKHR::e8 : return "8";
+      case VideoComponentBitDepthFlagBitsKHR::e10 : return "10";
+      case VideoComponentBitDepthFlagBitsKHR::e12 : return "12";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoCapabilityFlagBitsKHR::eProtectedContent : return "ProtectedContent";
+      case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages : return "SeparateReferenceImages";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoSessionCreateFlagBitsKHR::eProtectedContent : return "ProtectedContent";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoCodingControlFlagBitsKHR::eReset : return "Reset";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+      case VideoCodingControlFlagBitsKHR::eEncodeRateControl : return "EncodeRateControl";
+      case VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer : return "EncodeRateControlLayer";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value )
+  {
+    switch ( value )
+    {
+      case QueryResultStatusKHR::eError : return "Error";
+      case QueryResultStatusKHR::eNotReady : return "NotReady";
+      case QueryResultStatusKHR::eComplete : return "Complete";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  //=== VK_KHR_video_decode_queue ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide : return "DpbAndOutputCoincide";
+      case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct : return "DpbAndOutputDistinct";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoDecodeUsageFlagBitsKHR::eDefault : return "Default";
+      case VideoDecodeUsageFlagBitsKHR::eTranscoding : return "Transcoding";
+      case VideoDecodeUsageFlagBitsKHR::eOffline : return "Offline";
+      case VideoDecodeUsageFlagBitsKHR::eStreaming : return "Streaming";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_transform_feedback ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled : return "Direct8X8InferenceEnabled";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled : return "Direct8X8InferenceDisabled";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane : return "SeparateColourPlane";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass : return "QpprimeYZeroTransformBypass";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists : return "ScalingLists";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance : return "HrdCompliance";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset : return "ChromaQpOffset";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset : return "SecondChromaQpOffset";
+      case VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 : return "PicInitQpMinus26";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred : return "WeightedPred";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit : return "WeightedBipredExplicit";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit : return "WeightedBipredImplicit";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable : return "WeightedPredNoTable";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 : return "Transform8X8";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eCabac : return "Cabac";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eCavlc : return "Cavlc";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled : return "DeblockingFilterDisabled";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled : return "DeblockingFilterEnabled";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial : return "DeblockingFilterPartial";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred : return "DisableDirectSpatialMvPred";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame : return "MultipleSlicePerFrame";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount : return "SliceMbCount";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice : return "RowUnalignedSlice";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType : return "DifferentSliceType";
+      case VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List : return "BFrameInL1List";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH264InputModeFlagBitsEXT::eFrame : return "Frame";
+      case VideoEncodeH264InputModeFlagBitsEXT::eSlice : return "Slice";
+      case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl : return "NonVcl";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH264OutputModeFlagBitsEXT::eFrame : return "Frame";
+      case VideoEncodeH264OutputModeFlagBitsEXT::eSlice : return "Slice";
+      case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl : return "NonVcl";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH264RateControlStructureEXT::eUnknown : return "Unknown";
+      case VideoEncodeH264RateControlStructureEXT::eFlat : return "Flat";
+      case VideoEncodeH264RateControlStructureEXT::eDyadic : return "Dyadic";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane : return "SeparateColourPlane";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists : return "ScalingLists";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled : return "SampleAdaptiveOffsetEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable : return "PcmEnable";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled : return "SpsTemporalMvpEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance : return "HrdCompliance";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 : return "InitQpMinus26";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 : return "Log2ParallelMergeLevelMinus2";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled : return "SignDataHidingEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled : return "TransformSkipEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled : return "TransformSkipDisabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent : return "PpsSliceChromaQpOffsetsPresent";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred : return "WeightedPred";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred : return "WeightedBipred";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable : return "WeightedPredNoTable";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled : return "TransquantBypassEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled : return "EntropyCodingSyncEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled : return "DeblockingFilterOverrideEnabled";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame : return "MultipleTilePerFrame";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile : return "MultipleSlicePerTile";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice : return "MultipleTilePerSlice";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount : return "SliceSegmentCtbCount";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment : return "RowUnalignedSliceSegment";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment : return "DependentSliceSegment";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType : return "DifferentSliceType";
+      case VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List : return "BFrameInL1List";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265InputModeFlagBitsEXT::eFrame : return "Frame";
+      case VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment : return "SliceSegment";
+      case VideoEncodeH265InputModeFlagBitsEXT::eNonVcl : return "NonVcl";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265OutputModeFlagBitsEXT::eFrame : return "Frame";
+      case VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment : return "SliceSegment";
+      case VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl : return "NonVcl";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265CtbSizeFlagBitsEXT::e16 : return "16";
+      case VideoEncodeH265CtbSizeFlagBitsEXT::e32 : return "32";
+      case VideoEncodeH265CtbSizeFlagBitsEXT::e64 : return "64";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 : return "4";
+      case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 : return "8";
+      case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 : return "16";
+      case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 : return "32";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureEXT value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeH265RateControlStructureEXT::eUnknown : return "Unknown";
+      case VideoEncodeH265RateControlStructureEXT::eFlat : return "Flat";
+      case VideoEncodeH265RateControlStructureEXT::eDyadic : return "Dyadic";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive : return "Progressive";
+      case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines : return "InterlacedInterleavedLines";
+      case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes : return "InterlacedSeparatePlanes";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_AMD_shader_info ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
+  {
+    switch ( value )
+    {
+      case ShaderInfoTypeAMD::eStatistics : return "Statistics";
+      case ShaderInfoTypeAMD::eBinary : return "Binary";
+      case ShaderInfoTypeAMD::eDisassembly : return "Disassembly";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_validation_flags ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationCheckEXT::eAll : return "All";
+      case ValidationCheckEXT::eShaders : return "Shaders";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_pipeline_robustness ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehaviorEXT value )
+  {
+    switch ( value )
+    {
+      case PipelineRobustnessBufferBehaviorEXT::eDeviceDefault : return "DeviceDefault";
+      case PipelineRobustnessBufferBehaviorEXT::eDisabled : return "Disabled";
+      case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess : return "RobustBufferAccess";
+      case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess2 : return "RobustBufferAccess2";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehaviorEXT value )
+  {
+    switch ( value )
+    {
+      case PipelineRobustnessImageBehaviorEXT::eDeviceDefault : return "DeviceDefault";
+      case PipelineRobustnessImageBehaviorEXT::eDisabled : return "Disabled";
+      case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess : return "RobustImageAccess";
+      case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess2 : return "RobustImageAccess2";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_conditional_rendering ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_display_surface_counter ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_display_control ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayPowerStateEXT::eOff : return "Off";
+      case DisplayPowerStateEXT::eSuspend : return "Suspend";
+      case DisplayPowerStateEXT::eOn : return "On";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_viewport_swizzle ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
+  {
+    switch ( value )
+    {
+      case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX";
+      case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX";
+      case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY";
+      case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY";
+      case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ";
+      case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ";
+      case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW";
+      case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_discard_rectangles ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
+  {
+    switch ( value )
+    {
+      case DiscardRectangleModeEXT::eInclusive : return "Inclusive";
+      case DiscardRectangleModeEXT::eExclusive : return "Exclusive";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_conservative_rasterization ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case ConservativeRasterizationModeEXT::eDisabled : return "Disabled";
+      case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate";
+      case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_depth_clip_enable ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_KHR_performance_query ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting";
+      case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer";
+      case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass";
+      case PerformanceCounterScopeKHR::eCommand : return "Command";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterStorageKHR::eInt32 : return "Int32";
+      case PerformanceCounterStorageKHR::eInt64 : return "Int64";
+      case PerformanceCounterStorageKHR::eUint32 : return "Uint32";
+      case PerformanceCounterStorageKHR::eUint64 : return "Uint64";
+      case PerformanceCounterStorageKHR::eFloat32 : return "Float32";
+      case PerformanceCounterStorageKHR::eFloat64 : return "Float64";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterUnitKHR::eGeneric : return "Generic";
+      case PerformanceCounterUnitKHR::ePercentage : return "Percentage";
+      case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds";
+      case PerformanceCounterUnitKHR::eBytes : return "Bytes";
+      case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond";
+      case PerformanceCounterUnitKHR::eKelvin : return "Kelvin";
+      case PerformanceCounterUnitKHR::eWatts : return "Watts";
+      case PerformanceCounterUnitKHR::eVolts : return "Volts";
+      case PerformanceCounterUnitKHR::eAmps : return "Amps";
+      case PerformanceCounterUnitKHR::eHertz : return "Hertz";
+      case PerformanceCounterUnitKHR::eCycles : return "Cycles";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
+      case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
+      case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
+      case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding : return "DeviceAddressBinding";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_blend_operation_advanced ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
+  {
+    switch ( value )
+    {
+      case BlendOverlapEXT::eUncorrelated : return "Uncorrelated";
+      case BlendOverlapEXT::eDisjoint : return "Disjoint";
+      case BlendOverlapEXT::eConjoint : return "Conjoint";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_fragment_coverage_to_color ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel";
+      case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel";
+      case AccelerationStructureTypeKHR::eGeneric : return "Generic";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureBuildTypeKHR::eHost : return "Host";
+      case AccelerationStructureBuildTypeKHR::eDevice : return "Device";
+      case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryFlagBitsKHR::eOpaque : return "Opaque";
+      case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable";
+      case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing : return "TriangleFlipFacing";
+      case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque";
+      case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque";
+      case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT : return "ForceOpacityMicromap2StateEXT";
+      case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT : return "DisableOpacityMicromapsEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate";
+      case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction";
+      case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace";
+      case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild";
+      case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory";
+      case BuildAccelerationStructureFlagBitsKHR::eMotionNV : return "MotionNV";
+      case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT : return "AllowOpacityMicromapUpdateEXT";
+      case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT : return "AllowDisableOpacityMicromapsEXT";
+      case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT : return "AllowOpacityMicromapDataUpdateEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
+  {
+    switch ( value )
+    {
+      case CopyAccelerationStructureModeKHR::eClone : return "Clone";
+      case CopyAccelerationStructureModeKHR::eCompact : return "Compact";
+      case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize";
+      case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryTypeKHR::eTriangles : return "Triangles";
+      case GeometryTypeKHR::eAabbs : return "Aabbs";
+      case GeometryTypeKHR::eInstances : return "Instances";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureCompatibilityKHR::eCompatible : return "Compatible";
+      case AccelerationStructureCompatibilityKHR::eIncompatible : return "Incompatible";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      case AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT";
+      case AccelerationStructureCreateFlagBitsKHR::eMotionNV : return "MotionNV";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value )
+  {
+    switch ( value )
+    {
+      case BuildAccelerationStructureModeKHR::eBuild : return "Build";
+      case BuildAccelerationStructureModeKHR::eUpdate : return "Update";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageModulationModeNV::eNone : return "None";
+      case CoverageModulationModeNV::eRgb : return "Rgb";
+      case CoverageModulationModeNV::eAlpha : return "Alpha";
+      case CoverageModulationModeNV::eRgba : return "Rgba";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_validation_cache ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationCacheHeaderVersionEXT::eOne : return "One";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_NV_shading_rate_image ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
+  {
+    switch ( value )
+    {
+      case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations";
+      case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
+  {
+    switch ( value )
+    {
+      case CoarseSampleOrderTypeNV::eDefault : return "Default";
+      case CoarseSampleOrderTypeNV::eCustom : return "Custom";
+      case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor";
+      case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_ray_tracing ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object";
+      case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch";
+      case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_AMD_pipeline_compiler_control ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
+  {
+    switch ( value )
+    {
+      case TimeDomainEXT::eDevice : return "Device";
+      case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic";
+      case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw";
+      case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_global_priority ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value )
+  {
+    switch ( value )
+    {
+      case QueueGlobalPriorityKHR::eLow : return "Low";
+      case QueueGlobalPriorityKHR::eMedium : return "Medium";
+      case QueueGlobalPriorityKHR::eHigh : return "High";
+      case QueueGlobalPriorityKHR::eRealtime : return "Realtime";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
+  {
+    switch ( value )
+    {
+      case MemoryOverallocationBehaviorAMD::eDefault : return "Default";
+      case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed";
+      case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_INTEL_performance_query ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
+  {
+    switch ( value )
+    {
+      case QueryPoolSamplingModeINTEL::eManual : return "Manual";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware";
+      case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported";
+      case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceValueTypeINTEL::eUint32 : return "Uint32";
+      case PerformanceValueTypeINTEL::eUint64 : return "Uint64";
+      case PerformanceValueTypeINTEL::eFloat : return "Float";
+      case PerformanceValueTypeINTEL::eBool : return "Bool";
+      case PerformanceValueTypeINTEL::eString : return "String";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
+  {
+    switch ( value )
+    {
+      case FragmentShadingRateCombinerOpKHR::eKeep : return "Keep";
+      case FragmentShadingRateCombinerOpKHR::eReplace : return "Replace";
+      case FragmentShadingRateCombinerOpKHR::eMin : return "Min";
+      case FragmentShadingRateCombinerOpKHR::eMax : return "Max";
+      case FragmentShadingRateCombinerOpKHR::eMul : return "Mul";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_AMD_shader_core_properties2 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_validation_features ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted";
+      case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot";
+      case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices";
+      case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf";
+      case ValidationFeatureEnableEXT::eSynchronizationValidation : return "SynchronizationValidation";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureDisableEXT::eAll : return "All";
+      case ValidationFeatureDisableEXT::eShaders : return "Shaders";
+      case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety";
+      case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters";
+      case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes";
+      case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks";
+      case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles";
+      case ValidationFeatureDisableEXT::eShaderValidationCache : return "ShaderValidationCache";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_cooperative_matrix ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
+  {
+    switch ( value )
+    {
+      case ScopeNV::eDevice : return "Device";
+      case ScopeNV::eWorkgroup : return "Workgroup";
+      case ScopeNV::eSubgroup : return "Subgroup";
+      case ScopeNV::eQueueFamily : return "QueueFamily";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
+  {
+    switch ( value )
+    {
+      case ComponentTypeNV::eFloat16 : return "Float16";
+      case ComponentTypeNV::eFloat32 : return "Float32";
+      case ComponentTypeNV::eFloat64 : return "Float64";
+      case ComponentTypeNV::eSint8 : return "Sint8";
+      case ComponentTypeNV::eSint16 : return "Sint16";
+      case ComponentTypeNV::eSint32 : return "Sint32";
+      case ComponentTypeNV::eSint64 : return "Sint64";
+      case ComponentTypeNV::eUint8 : return "Uint8";
+      case ComponentTypeNV::eUint16 : return "Uint16";
+      case ComponentTypeNV::eUint32 : return "Uint32";
+      case ComponentTypeNV::eUint64 : return "Uint64";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageReductionModeNV::eMerge : return "Merge";
+      case CoverageReductionModeNV::eTruncate : return "Truncate";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_provoking_vertex ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value )
+  {
+    switch ( value )
+    {
+      case ProvokingVertexModeEXT::eFirstVertex : return "FirstVertex";
+      case ProvokingVertexModeEXT::eLastVertex : return "LastVertex";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
+  {
+    switch ( value )
+    {
+      case FullScreenExclusiveEXT::eDefault : return "Default";
+      case FullScreenExclusiveEXT::eAllowed : return "Allowed";
+      case FullScreenExclusiveEXT::eDisallowed : return "Disallowed";
+      case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_line_rasterization ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case LineRasterizationModeEXT::eDefault : return "Default";
+      case LineRasterizationModeEXT::eRectangular : return "Rectangular";
+      case LineRasterizationModeEXT::eBresenham : return "Bresenham";
+      case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
+  {
+    switch ( value )
+    {
+      case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32";
+      case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64";
+      case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64";
+      case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_surface_maintenance1 ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case PresentScalingFlagBitsEXT::eOneToOne : return "OneToOne";
+      case PresentScalingFlagBitsEXT::eAspectRatioStretch : return "AspectRatioStretch";
+      case PresentScalingFlagBitsEXT::eStretch : return "Stretch";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case PresentGravityFlagBitsEXT::eMin : return "Min";
+      case PresentGravityFlagBitsEXT::eMax : return "Max";
+      case PresentGravityFlagBitsEXT::eCentered : return "Centered";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_device_generated_commands ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup";
+      case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags";
+      case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer";
+      case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer";
+      case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant";
+      case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed";
+      case IndirectCommandsTokenTypeNV::eDraw : return "Draw";
+      case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks";
+      case IndirectCommandsTokenTypeNV::eDrawMeshTasks : return "DrawMeshTasks";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess";
+      case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences";
+      case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_device_memory_report ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceMemoryReportEventTypeEXT::eAllocate : return "Allocate";
+      case DeviceMemoryReportEventTypeEXT::eFree : return "Free";
+      case DeviceMemoryReportEventTypeEXT::eImport : return "Import";
+      case DeviceMemoryReportEventTypeEXT::eUnimport : return "Unimport";
+      case DeviceMemoryReportEventTypeEXT::eAllocationFailed : return "AllocationFailed";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_pipeline_creation_cache_control ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCacheCreateFlagBits::eExternallySynchronized : return "ExternallySynchronized";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes : return "PrecedingExternallyEncodedBytes";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeUsageFlagBitsKHR::eDefault : return "Default";
+      case VideoEncodeUsageFlagBitsKHR::eTranscoding : return "Transcoding";
+      case VideoEncodeUsageFlagBitsKHR::eStreaming : return "Streaming";
+      case VideoEncodeUsageFlagBitsKHR::eRecording : return "Recording";
+      case VideoEncodeUsageFlagBitsKHR::eConferencing : return "Conferencing";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeContentFlagBitsKHR::eDefault : return "Default";
+      case VideoEncodeContentFlagBitsKHR::eCamera : return "Camera";
+      case VideoEncodeContentFlagBitsKHR::eDesktop : return "Desktop";
+      case VideoEncodeContentFlagBitsKHR::eRendered : return "Rendered";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeTuningModeKHR value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeTuningModeKHR::eDefault : return "Default";
+      case VideoEncodeTuningModeKHR::eHighQuality : return "HighQuality";
+      case VideoEncodeTuningModeKHR::eLowLatency : return "LowLatency";
+      case VideoEncodeTuningModeKHR::eUltraLowLatency : return "UltraLowLatency";
+      case VideoEncodeTuningModeKHR::eLossless : return "Lossless";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case VideoEncodeRateControlModeFlagBitsKHR::eNone : return "None";
+      case VideoEncodeRateControlModeFlagBitsKHR::eCbr : return "Cbr";
+      case VideoEncodeRateControlModeFlagBitsKHR::eVbr : return "Vbr";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR )
+  {
+    return "(void)";
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo";
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking";
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints";
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting : return "EnableShaderErrorReporting";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalDevice : return "MetalDevice";
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue : return "MetalCommandQueue";
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer : return "MetalBuffer";
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalTexture : return "MetalTexture";
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface : return "MetalIosurface";
+      case ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent : return "MetalSharedEvent";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_graphics_pipeline_library ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface : return "VertexInputInterface";
+      case GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders : return "PreRasterizationShaders";
+      case GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader : return "FragmentShader";
+      case GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface : return "FragmentOutputInterface";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineLayoutCreateFlagBits::eIndependentSetsEXT : return "IndependentSetsEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value )
+  {
+    switch ( value )
+    {
+      case FragmentShadingRateNV::e1InvocationPerPixel : return "1InvocationPerPixel";
+      case FragmentShadingRateNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
+      case FragmentShadingRateNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
+      case FragmentShadingRateNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
+      case FragmentShadingRateNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
+      case FragmentShadingRateNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
+      case FragmentShadingRateNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
+      case FragmentShadingRateNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
+      case FragmentShadingRateNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
+      case FragmentShadingRateNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
+      case FragmentShadingRateNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
+      case FragmentShadingRateNV::eNoInvocations : return "NoInvocations";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value )
+  {
+    switch ( value )
+    {
+      case FragmentShadingRateTypeNV::eFragmentSize : return "FragmentSize";
+      case FragmentShadingRateTypeNV::eEnums : return "Enums";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceTypeNV value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureMotionInstanceTypeNV::eStatic : return "Static";
+      case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion : return "MatrixMotion";
+      case AccelerationStructureMotionInstanceTypeNV::eSrtMotion : return "SrtMotion";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_image_compression_control ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ImageCompressionFlagBitsEXT::eDefault : return "Default";
+      case ImageCompressionFlagBitsEXT::eFixedRateDefault : return "FixedRateDefault";
+      case ImageCompressionFlagBitsEXT::eFixedRateExplicit : return "FixedRateExplicit";
+      case ImageCompressionFlagBitsEXT::eDisabled : return "Disabled";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ImageCompressionFixedRateFlagBitsEXT::eNone : return "None";
+      case ImageCompressionFixedRateFlagBitsEXT::e1Bpc : return "1Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e2Bpc : return "2Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e3Bpc : return "3Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e4Bpc : return "4Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e5Bpc : return "5Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e6Bpc : return "6Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e7Bpc : return "7Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e8Bpc : return "8Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e9Bpc : return "9Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e10Bpc : return "10Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e11Bpc : return "11Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e12Bpc : return "12Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e13Bpc : return "13Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e14Bpc : return "14Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e15Bpc : return "15Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e16Bpc : return "16Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e17Bpc : return "17Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e18Bpc : return "18Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e19Bpc : return "19Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e20Bpc : return "20Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e21Bpc : return "21Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e22Bpc : return "22Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e23Bpc : return "23Bpc";
+      case ImageCompressionFixedRateFlagBitsEXT::e24Bpc : return "24Bpc";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_device_fault ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceFaultAddressTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceFaultAddressTypeEXT::eNone : return "None";
+      case DeviceFaultAddressTypeEXT::eReadInvalid : return "ReadInvalid";
+      case DeviceFaultAddressTypeEXT::eWriteInvalid : return "WriteInvalid";
+      case DeviceFaultAddressTypeEXT::eExecuteInvalid : return "ExecuteInvalid";
+      case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown : return "InstructionPointerUnknown";
+      case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid : return "InstructionPointerInvalid";
+      case DeviceFaultAddressTypeEXT::eInstructionPointerFault : return "InstructionPointerFault";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceFaultVendorBinaryHeaderVersionEXT::eOne : return "One";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
+  {
+    switch ( value )
+    {
+      case RayTracingShaderGroupTypeKHR::eGeneral : return "General";
+      case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup";
+      case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value )
+  {
+    switch ( value )
+    {
+      case ShaderGroupShaderKHR::eGeneral : return "General";
+      case ShaderGroupShaderKHR::eClosestHit : return "ClosestHit";
+      case ShaderGroupShaderKHR::eAnyHit : return "AnyHit";
+      case ShaderGroupShaderKHR::eIntersection : return "Intersection";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_device_address_binding_report ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceAddressBindingFlagBitsEXT::eInternalObject : return "InternalObject";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceAddressBindingTypeEXT::eBind : return "Bind";
+      case DeviceAddressBindingTypeEXT::eUnbind : return "Unbind";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value )
+  {
+    switch ( value )
+    {
+      case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely : return "CpuReadRarely";
+      case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften : return "CpuReadOften";
+      case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely : return "CpuWriteRarely";
+      case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften : return "CpuWriteOften";
+      case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional : return "ProtectedOptional";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX )
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( MicromapTypeEXT value )
+  {
+    switch ( value )
+    {
+      case MicromapTypeEXT::eOpacityMicromap : return "OpacityMicromap";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case BuildMicromapFlagBitsEXT::ePreferFastTrace : return "PreferFastTrace";
+      case BuildMicromapFlagBitsEXT::ePreferFastBuild : return "PreferFastBuild";
+      case BuildMicromapFlagBitsEXT::eAllowCompaction : return "AllowCompaction";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( CopyMicromapModeEXT value )
+  {
+    switch ( value )
+    {
+      case CopyMicromapModeEXT::eClone : return "Clone";
+      case CopyMicromapModeEXT::eSerialize : return "Serialize";
+      case CopyMicromapModeEXT::eDeserialize : return "Deserialize";
+      case CopyMicromapModeEXT::eCompact : return "Compact";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( BuildMicromapModeEXT value )
+  {
+    switch ( value )
+    {
+      case BuildMicromapModeEXT::eBuild : return "Build";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpacityMicromapFormatEXT value )
+  {
+    switch ( value )
+    {
+      case OpacityMicromapFormatEXT::e2State : return "2State";
+      case OpacityMicromapFormatEXT::e4State : return "4State";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpacityMicromapSpecialIndexEXT value )
+  {
+    switch ( value )
+    {
+      case OpacityMicromapSpecialIndexEXT::eFullyTransparent : return "FullyTransparent";
+      case OpacityMicromapSpecialIndexEXT::eFullyOpaque : return "FullyOpaque";
+      case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent : return "FullyUnknownTransparent";
+      case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque : return "FullyUnknownOpaque";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_memory_decompression ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case MemoryDecompressionMethodFlagBitsNV::eGdeflate10 : return "Gdeflate10";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_EXT_subpass_merge_feedback ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value )
+  {
+    switch ( value )
+    {
+      case SubpassMergeStatusEXT::eMerged : return "Merged";
+      case SubpassMergeStatusEXT::eDisallowed : return "Disallowed";
+      case SubpassMergeStatusEXT::eNotMergedSideEffects : return "NotMergedSideEffects";
+      case SubpassMergeStatusEXT::eNotMergedSamplesMismatch : return "NotMergedSamplesMismatch";
+      case SubpassMergeStatusEXT::eNotMergedViewsMismatch : return "NotMergedViewsMismatch";
+      case SubpassMergeStatusEXT::eNotMergedAliasing : return "NotMergedAliasing";
+      case SubpassMergeStatusEXT::eNotMergedDependencies : return "NotMergedDependencies";
+      case SubpassMergeStatusEXT::eNotMergedIncompatibleInputAttachment : return "NotMergedIncompatibleInputAttachment";
+      case SubpassMergeStatusEXT::eNotMergedTooManyAttachments : return "NotMergedTooManyAttachments";
+      case SubpassMergeStatusEXT::eNotMergedInsufficientStorage : return "NotMergedInsufficientStorage";
+      case SubpassMergeStatusEXT::eNotMergedDepthStencilCount : return "NotMergedDepthStencilCount";
+      case SubpassMergeStatusEXT::eNotMergedResolveAttachmentReuse : return "NotMergedResolveAttachmentReuse";
+      case SubpassMergeStatusEXT::eNotMergedSingleSubpass : return "NotMergedSingleSubpass";
+      case SubpassMergeStatusEXT::eNotMergedUnspecified : return "NotMergedUnspecified";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_LUNARG_direct_driver_loading ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingModeLUNARG value )
+  {
+    switch ( value )
+    {
+      case DirectDriverLoadingModeLUNARG::eExclusive : return "Exclusive";
+      case DirectDriverLoadingModeLUNARG::eInclusive : return "Inclusive";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagBitsLUNARG )
+  {
+    return "(void)";
+  }
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT : return "RasterizationOrderAttachmentAccessEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT : return "RasterizationOrderAttachmentDepthAccessEXT";
+      case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT : return "RasterizationOrderAttachmentStencilAccessEXT";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_optical_flow ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowUsageFlagBitsNV::eUnknown : return "Unknown";
+      case OpticalFlowUsageFlagBitsNV::eInput : return "Input";
+      case OpticalFlowUsageFlagBitsNV::eOutput : return "Output";
+      case OpticalFlowUsageFlagBitsNV::eHint : return "Hint";
+      case OpticalFlowUsageFlagBitsNV::eCost : return "Cost";
+      case OpticalFlowUsageFlagBitsNV::eGlobalFlow : return "GlobalFlow";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowGridSizeFlagBitsNV::eUnknown : return "Unknown";
+      case OpticalFlowGridSizeFlagBitsNV::e1X1 : return "1X1";
+      case OpticalFlowGridSizeFlagBitsNV::e2X2 : return "2X2";
+      case OpticalFlowGridSizeFlagBitsNV::e4X4 : return "4X4";
+      case OpticalFlowGridSizeFlagBitsNV::e8X8 : return "8X8";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowPerformanceLevelNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowPerformanceLevelNV::eUnknown : return "Unknown";
+      case OpticalFlowPerformanceLevelNV::eSlow : return "Slow";
+      case OpticalFlowPerformanceLevelNV::eMedium : return "Medium";
+      case OpticalFlowPerformanceLevelNV::eFast : return "Fast";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionBindingPointNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowSessionBindingPointNV::eUnknown : return "Unknown";
+      case OpticalFlowSessionBindingPointNV::eInput : return "Input";
+      case OpticalFlowSessionBindingPointNV::eReference : return "Reference";
+      case OpticalFlowSessionBindingPointNV::eHint : return "Hint";
+      case OpticalFlowSessionBindingPointNV::eFlowVector : return "FlowVector";
+      case OpticalFlowSessionBindingPointNV::eBackwardFlowVector : return "BackwardFlowVector";
+      case OpticalFlowSessionBindingPointNV::eCost : return "Cost";
+      case OpticalFlowSessionBindingPointNV::eBackwardCost : return "BackwardCost";
+      case OpticalFlowSessionBindingPointNV::eGlobalFlow : return "GlobalFlow";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowSessionCreateFlagBitsNV::eEnableHint : return "EnableHint";
+      case OpticalFlowSessionCreateFlagBitsNV::eEnableCost : return "EnableCost";
+      case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow : return "EnableGlobalFlow";
+      case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions : return "AllowRegions";
+      case OpticalFlowSessionCreateFlagBitsNV::eBothDirections : return "BothDirections";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+  VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints : return "DisableTemporalHints";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+
+
+  VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value )
+  {
+    switch ( value )
+    {
+      case RayTracingInvocationReorderModeNV::eNone : return "None";
+      case RayTracingInvocationReorderModeNV::eReorder : return "Reorder";
+      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+    }
+
+  }
+
+
+} // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_vi.h b/host/libs/graphics_detector/include/vulkan/vulkan_vi.h
new file mode 100644
index 0000000..0355e7a
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_vi.h
@@ -0,0 +1,47 @@
+#ifndef VULKAN_VI_H_
+#define VULKAN_VI_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_NN_vi_surface 1
+#define VK_NN_VI_SURFACE_SPEC_VERSION     1
+#define VK_NN_VI_SURFACE_EXTENSION_NAME   "VK_NN_vi_surface"
+typedef VkFlags VkViSurfaceCreateFlagsNN;
+typedef struct VkViSurfaceCreateInfoNN {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkViSurfaceCreateFlagsNN    flags;
+    void*                       window;
+} VkViSurfaceCreateInfoNN;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_wayland.h b/host/libs/graphics_detector/include/vulkan/vulkan_wayland.h
new file mode 100644
index 0000000..9afd0b7
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_wayland.h
@@ -0,0 +1,54 @@
+#ifndef VULKAN_WAYLAND_H_
+#define VULKAN_WAYLAND_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_wayland_surface 1
+#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface"
+typedef VkFlags VkWaylandSurfaceCreateFlagsKHR;
+typedef struct VkWaylandSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkWaylandSurfaceCreateFlagsKHR    flags;
+    struct wl_display*                display;
+    struct wl_surface*                surface;
+} VkWaylandSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_win32.h b/host/libs/graphics_detector/include/vulkan/vulkan_win32.h
new file mode 100644
index 0000000..a8e46c8
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_win32.h
@@ -0,0 +1,333 @@
+#ifndef VULKAN_WIN32_H_
+#define VULKAN_WIN32_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_win32_surface 1
+#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface"
+typedef VkFlags VkWin32SurfaceCreateFlagsKHR;
+typedef struct VkWin32SurfaceCreateInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkWin32SurfaceCreateFlagsKHR    flags;
+    HINSTANCE                       hinstance;
+    HWND                            hwnd;
+} VkWin32SurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif
+
+
+#define VK_KHR_external_memory_win32 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32"
+typedef struct VkImportMemoryWin32HandleInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    HANDLE                                handle;
+    LPCWSTR                               name;
+} VkImportMemoryWin32HandleInfoKHR;
+
+typedef struct VkExportMemoryWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportMemoryWin32HandleInfoKHR;
+
+typedef struct VkMemoryWin32HandlePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryWin32HandlePropertiesKHR;
+
+typedef struct VkMemoryGetWin32HandleInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif
+
+
+#define VK_KHR_win32_keyed_mutex 1
+#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1
+#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex"
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeouts;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoKHR;
+
+
+
+#define VK_KHR_external_semaphore_win32 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32"
+typedef struct VkImportSemaphoreWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    HANDLE                                   handle;
+    LPCWSTR                                  name;
+} VkImportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkExportSemaphoreWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkD3D12FenceSubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValuesCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValuesCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkD3D12FenceSubmitInfoKHR;
+
+typedef struct VkSemaphoreGetWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_KHR_external_fence_win32 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32"
+typedef struct VkImportFenceWin32HandleInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkFenceImportFlags                   flags;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+    HANDLE                               handle;
+    LPCWSTR                              name;
+} VkImportFenceWin32HandleInfoKHR;
+
+typedef struct VkExportFenceWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportFenceWin32HandleInfoKHR;
+
+typedef struct VkFenceGetWin32HandleInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkFenceGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_NV_external_memory_win32 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32"
+typedef struct VkImportMemoryWin32HandleInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleType;
+    HANDLE                               handle;
+} VkImportMemoryWin32HandleInfoNV;
+
+typedef struct VkExportMemoryWin32HandleInfoNV {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+} VkExportMemoryWin32HandleInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_NV_win32_keyed_mutex 1
+#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2
+#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex"
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeoutMilliseconds;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoNV;
+
+
+
+#define VK_EXT_full_screen_exclusive 1
+#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4
+#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive"
+
+typedef enum VkFullScreenExclusiveEXT {
+    VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0,
+    VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1,
+    VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2,
+    VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3,
+    VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkFullScreenExclusiveEXT;
+typedef struct VkSurfaceFullScreenExclusiveInfoEXT {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkFullScreenExclusiveEXT    fullScreenExclusive;
+} VkSurfaceFullScreenExclusiveInfoEXT;
+
+typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fullScreenExclusiveSupported;
+} VkSurfaceCapabilitiesFullScreenExclusiveEXT;
+
+typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    HMONITOR           hmonitor;
+} VkSurfaceFullScreenExclusiveWin32InfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain);
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+#endif
+
+
+#define VK_NV_acquire_winrt_display 1
+#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1
+#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    deviceRelativeId,
+    VkDisplayKHR*                               pDisplay);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_xcb.h b/host/libs/graphics_detector/include/vulkan/vulkan_xcb.h
new file mode 100644
index 0000000..68e61b8
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_xcb.h
@@ -0,0 +1,55 @@
+#ifndef VULKAN_XCB_H_
+#define VULKAN_XCB_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_xcb_surface 1
+#define VK_KHR_XCB_SURFACE_SPEC_VERSION   6
+#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface"
+typedef VkFlags VkXcbSurfaceCreateFlagsKHR;
+typedef struct VkXcbSurfaceCreateInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkXcbSurfaceCreateFlagsKHR    flags;
+    xcb_connection_t*             connection;
+    xcb_window_t                  window;
+} VkXcbSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_xlib.h b/host/libs/graphics_detector/include/vulkan/vulkan_xlib.h
new file mode 100644
index 0000000..ea5360a
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_xlib.h
@@ -0,0 +1,55 @@
+#ifndef VULKAN_XLIB_H_
+#define VULKAN_XLIB_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_xlib_surface 1
+#define VK_KHR_XLIB_SURFACE_SPEC_VERSION  6
+#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface"
+typedef VkFlags VkXlibSurfaceCreateFlagsKHR;
+typedef struct VkXlibSurfaceCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkXlibSurfaceCreateFlagsKHR    flags;
+    Display*                       dpy;
+    Window                         window;
+} VkXlibSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/include/vulkan/vulkan_xlib_xrandr.h b/host/libs/graphics_detector/include/vulkan/vulkan_xlib_xrandr.h
new file mode 100644
index 0000000..8fc35cf
--- /dev/null
+++ b/host/libs/graphics_detector/include/vulkan/vulkan_xlib_xrandr.h
@@ -0,0 +1,45 @@
+#ifndef VULKAN_XLIB_XRANDR_H_
+#define VULKAN_XLIB_XRANDR_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_acquire_xlib_display 1
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/host/libs/graphics_detector/lib.cpp b/host/libs/graphics_detector/lib.cpp
new file mode 100644
index 0000000..d86b8bc
--- /dev/null
+++ b/host/libs/graphics_detector/lib.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/lib.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <dlfcn.h>
+
+namespace cuttlefish {
+
+void Lib::LibraryCloser::operator()(void* library) {
+  if (library != nullptr) {
+    dlclose(library);
+  }
+}
+
+std::optional<Lib> Lib::Load(const char* name) {
+  Lib lib;
+  lib.lib_ = ManagedLibrary(dlopen(name, RTLD_NOW | RTLD_LOCAL));
+  if (!lib.lib_) {
+    LOG(ERROR) << "Failed to load library: " << name;
+    return std::nullopt;
+  }
+
+  LOG(VERBOSE) << "Loaded library: " << name;
+  return std::move(lib);
+}
+
+Lib::FunctionPtr Lib::GetSymbol(const char* name) {
+  return reinterpret_cast<FunctionPtr>(dlsym(lib_.get(), name));
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/lib.h b/host/libs/graphics_detector/lib.h
new file mode 100644
index 0000000..c8b0efd
--- /dev/null
+++ b/host/libs/graphics_detector/lib.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <memory>
+#include <optional>
+
+namespace cuttlefish {
+
+class Lib {
+ public:
+  static std::optional<Lib> Load(const char* name);
+
+  Lib() = default;
+
+  Lib(const Lib&) = delete;
+  Lib& operator=(const Lib&) = delete;
+
+  Lib(Lib&&) = default;
+  Lib& operator=(Lib&&) = default;
+
+  using FunctionPtr = void (*)(void);
+
+  FunctionPtr GetSymbol(const char* name);
+
+ private:
+  struct LibraryCloser {
+   public:
+    void operator()(void* library);
+  };
+
+  using ManagedLibrary = std::unique_ptr<void, LibraryCloser>;
+
+  ManagedLibrary lib_;
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/shaders/Android.bp b/host/libs/graphics_detector/shaders/Android.bp
new file mode 100644
index 0000000..e486904
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/Android.bp
@@ -0,0 +1,29 @@
+//
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "device_google_cuttlefish_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-MIT
+    default_applicable_licenses: ["device_google_cuttlefish_license"],
+}
+
+cc_binary_host {
+    name: "generate_shader_embed",
+    srcs: ["generate_shader_embed.cpp"],
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture.frag b/host/libs/graphics_detector/shaders/blit_texture.frag
new file mode 100644
index 0000000..4e1af09
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.frag
@@ -0,0 +1,11 @@
+#version 460
+
+layout(set = 0, binding = 0) uniform sampler2D uTexture;
+
+layout(location = 0) noperspective in vec2 iUV;
+
+layout(location = 0) out vec4 oColor;
+
+void main() {
+    oColor = texture(uTexture, iUV);
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture.frag.inl b/host/libs/graphics_detector/shaders/blit_texture.frag.inl
new file mode 100644
index 0000000..a031322
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.frag.inl
@@ -0,0 +1,56 @@
+// Generated from GLSL:
+//
+// #version 460
+//
+// layout(set = 0, binding = 0) uniform sampler2D uTexture;
+//
+// layout(location = 0) noperspective in vec2 iUV;
+//
+// layout(location = 0) out vec4 oColor;
+//
+// void main() {
+//     oColor = texture(uTexture, iUV);
+// }
+const std::vector<uint8_t> kBlitTextureFrag = {
+	0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x0d, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
+	0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x03, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0xcc, 0x01, 0x00, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c,
+	0x45, 0x5f, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x6e, 0x65,
+	0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+	0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64,
+	0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x6f, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x75, 0x54, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x00, 0x00, 0x00, 0x00,
+	0x05, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x69, 0x55, 0x56, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00,
+	0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00, 0x0a, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x03, 0x00,
+	0x0b, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x00, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xf8, 0x00, 0x02, 0x00,
+	0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00,
+	0x13, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
+};
+
diff --git a/host/libs/graphics_detector/shaders/blit_texture.frag.spv b/host/libs/graphics_detector/shaders/blit_texture.frag.spv
new file mode 100644
index 0000000..ba75fa1
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.frag.spv
Binary files differ
diff --git a/host/libs/graphics_detector/shaders/blit_texture.vert b/host/libs/graphics_detector/shaders/blit_texture.vert
new file mode 100644
index 0000000..a9d1113
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.vert
@@ -0,0 +1,22 @@
+#version 460
+
+vec2 kPositions[4] = vec2[](
+    vec2(-1.0,  1.0),
+    vec2(-1.0, -1.0),
+    vec2( 1.0,  1.0),
+    vec2( 1.0, -1.0)
+);
+
+vec2 kUVs[4] = vec2[](
+    vec2(0.0, 1.0),
+    vec2(0.0, 0.0),
+    vec2(1.0, 1.0),
+    vec2(1.0, 0.0)
+);
+
+layout (location = 0) out vec2 oUV;
+
+void main() {
+    gl_Position = vec4(kPositions[gl_VertexIndex], 0.0, 1.0);
+    oUV = kUVs[gl_VertexIndex];
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture.vert.inl b/host/libs/graphics_detector/shaders/blit_texture.vert.inl
new file mode 100644
index 0000000..eb8fad6
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.vert.inl
@@ -0,0 +1,117 @@
+// Generated from GLSL:
+//
+// #version 460
+//
+// vec2 kPositions[4] = vec2[](
+//     vec2(-1.0,  1.0),
+//     vec2(-1.0, -1.0),
+//     vec2( 1.0,  1.0),
+//     vec2( 1.0, -1.0)
+// );
+//
+// vec2 kUVs[4] = vec2[](
+//     vec2(0.0, 1.0),
+//     vec2(0.0, 0.0),
+//     vec2(1.0, 1.0),
+//     vec2(1.0, 0.0)
+// );
+//
+// layout (location = 0) out vec2 oUV;
+//
+// void main() {
+//     gl_Position = vec4(kPositions[gl_VertexIndex], 0.0, 1.0);
+//     oUV = kUVs[gl_VertexIndex];
+// }
+const std::vector<uint8_t> kBlitTextureVert = {
+	0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x0d, 0x00, 0x32, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
+	0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x1f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x2e, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00, 0xcc, 0x01, 0x00, 0x00, 0x04, 0x00, 0x0a, 0x00,
+	0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x74,
+	0x79, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69,
+	0x76, 0x65, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c,
+	0x45, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74,
+	0x69, 0x76, 0x65, 0x00, 0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x6b, 0x50, 0x6f, 0x73,
+	0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x6b, 0x55, 0x56, 0x73, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x1d, 0x00, 0x00, 0x00,
+	0x67, 0x6c, 0x5f, 0x50, 0x65, 0x72, 0x56, 0x65, 0x72, 0x74, 0x65, 0x78, 0x00, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x06, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x50,
+	0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x00, 0x06, 0x00, 0x07, 0x00, 0x1d, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x53, 0x69, 0x7a, 0x65,
+	0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x07, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x67, 0x6c, 0x5f, 0x43, 0x6c, 0x69, 0x70, 0x44, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x00,
+	0x06, 0x00, 0x07, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x43,
+	0x75, 0x6c, 0x6c, 0x44, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x00, 0x05, 0x00, 0x03, 0x00,
+	0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x23, 0x00, 0x00, 0x00,
+	0x67, 0x6c, 0x5f, 0x56, 0x65, 0x72, 0x74, 0x65, 0x78, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x00, 0x00,
+	0x05, 0x00, 0x03, 0x00, 0x2e, 0x00, 0x00, 0x00, 0x6f, 0x55, 0x56, 0x00, 0x48, 0x00, 0x05, 0x00,
+	0x1d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x48, 0x00, 0x05, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x0b, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x1d, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
+	0x1d, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x23, 0x00, 0x00, 0x00,
+	0x0b, 0x00, 0x00, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x2e, 0x00, 0x00, 0x00,
+	0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x00, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00,
+	0x0c, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xbf, 0x2b, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x2c, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x05, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x2c, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x07, 0x00, 0x0a, 0x00, 0x00, 0x00,
+	0x13, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00,
+	0x12, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00,
+	0x15, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x17, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x05, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00,
+	0x2c, 0x00, 0x07, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00,
+	0x17, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
+	0x1a, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00,
+	0x08, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x04, 0x00,
+	0x1c, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x06, 0x00,
+	0x1d, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,
+	0x1c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+	0x1d, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x1f, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, 0x20, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00, 0x20, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x22, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x20, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x22, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x25, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x2b, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+	0x1a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x2d, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x2d, 0x00, 0x00, 0x00, 0x2e, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xf8, 0x00, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00,
+	0x3e, 0x00, 0x03, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
+	0x14, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x20, 0x00, 0x00, 0x00,
+	0x24, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x41, 0x00, 0x05, 0x00, 0x25, 0x00, 0x00, 0x00,
+	0x26, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x27, 0x00, 0x00, 0x00, 0x26, 0x00, 0x00, 0x00, 0x51, 0x00, 0x05, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x27, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x51, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x29, 0x00, 0x00, 0x00, 0x27, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x50, 0x00, 0x07, 0x00, 0x1a, 0x00, 0x00, 0x00, 0x2a, 0x00, 0x00, 0x00,
+	0x28, 0x00, 0x00, 0x00, 0x29, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x41, 0x00, 0x05, 0x00, 0x2b, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x1f, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x2a, 0x00, 0x00, 0x00,
+	0x3d, 0x00, 0x04, 0x00, 0x20, 0x00, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00,
+	0x41, 0x00, 0x05, 0x00, 0x25, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x2f, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00,
+	0x30, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00, 0x2e, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00,
+	0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
+};
+
diff --git a/host/libs/graphics_detector/shaders/blit_texture.vert.spv b/host/libs/graphics_detector/shaders/blit_texture.vert.spv
new file mode 100644
index 0000000..20d1967
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture.vert.spv
Binary files differ
diff --git a/host/libs/graphics_detector/shaders/blit_texture_highp.frag b/host/libs/graphics_detector/shaders/blit_texture_highp.frag
new file mode 100644
index 0000000..45fd52d
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_highp.frag
@@ -0,0 +1,11 @@
+#version 460
+
+layout(set = 0, binding = 0) uniform highp sampler2D uTexture;
+
+layout(location = 0) noperspective in vec2 iUV;
+
+layout(location = 0) out vec4 oColor;
+
+void main() {
+    oColor = texture(uTexture, iUV);
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture_highp.frag.inl b/host/libs/graphics_detector/shaders/blit_texture_highp.frag.inl
new file mode 100644
index 0000000..9ec5201
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_highp.frag.inl
@@ -0,0 +1,56 @@
+// Generated from GLSL:
+//
+// #version 460
+//
+// layout(set = 0, binding = 0) uniform highp sampler2D uTexture;
+//
+// layout(location = 0) noperspective in vec2 iUV;
+//
+// layout(location = 0) out vec4 oColor;
+//
+// void main() {
+//     oColor = texture(uTexture, iUV);
+// }
+const std::vector<uint8_t> kBlitTextureHighpFrag = {
+	0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x0d, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
+	0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x03, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0xcc, 0x01, 0x00, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c,
+	0x45, 0x5f, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x6e, 0x65,
+	0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+	0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64,
+	0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x6f, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x75, 0x54, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x00, 0x00, 0x00, 0x00,
+	0x05, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x69, 0x55, 0x56, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00,
+	0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00, 0x0a, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x03, 0x00,
+	0x0b, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x00, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00,
+	0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xf8, 0x00, 0x02, 0x00,
+	0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00,
+	0x11, 0x00, 0x00, 0x00, 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00,
+	0x13, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
+};
+
diff --git a/host/libs/graphics_detector/shaders/blit_texture_highp.frag.spv b/host/libs/graphics_detector/shaders/blit_texture_highp.frag.spv
new file mode 100644
index 0000000..ba75fa1
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_highp.frag.spv
Binary files differ
diff --git a/host/libs/graphics_detector/shaders/blit_texture_lowp.frag b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag
new file mode 100644
index 0000000..171f924
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag
@@ -0,0 +1,11 @@
+#version 460
+
+layout(set = 0, binding = 0) uniform lowp sampler2D uTexture;
+
+layout(location = 0) noperspective in vec2 iUV;
+
+layout(location = 0) out vec4 oColor;
+
+void main() {
+    oColor = texture(uTexture, iUV);
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.inl b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.inl
new file mode 100644
index 0000000..f59cd39
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.inl
@@ -0,0 +1,58 @@
+// Generated from GLSL:
+//
+// #version 460
+//
+// layout(set = 0, binding = 0) uniform lowp sampler2D uTexture;
+//
+// layout(location = 0) noperspective in vec2 iUV;
+//
+// layout(location = 0) out vec4 oColor;
+//
+// void main() {
+//     oColor = texture(uTexture, iUV);
+// }
+const std::vector<uint8_t> kBlitTextureLowpFrag = {
+	0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x0d, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
+	0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x03, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0xcc, 0x01, 0x00, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c,
+	0x45, 0x5f, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x6e, 0x65,
+	0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+	0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64,
+	0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x6f, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x75, 0x54, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x00, 0x00, 0x00, 0x00,
+	0x05, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x69, 0x55, 0x56, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x47, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x47, 0x00, 0x03, 0x00, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00,
+	0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00,
+	0x0a, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x1b, 0x00, 0x03, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x0c, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
+	0x0f, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00,
+	0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+	0xf8, 0x00, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00,
+	0x12, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x13, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
+};
+
diff --git a/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.spv b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.spv
new file mode 100644
index 0000000..c4209d9
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_lowp.frag.spv
Binary files differ
diff --git a/host/libs/graphics_detector/shaders/blit_texture_mediump.frag b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag
new file mode 100644
index 0000000..b51c9e3
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag
@@ -0,0 +1,11 @@
+#version 460
+
+layout(set = 0, binding = 0) uniform mediump sampler2D uTexture;
+
+layout(location = 0) noperspective in vec2 iUV;
+
+layout(location = 0) out vec4 oColor;
+
+void main() {
+    oColor = texture(uTexture, iUV);
+}
diff --git a/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.inl b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.inl
new file mode 100644
index 0000000..5c079a0
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.inl
@@ -0,0 +1,58 @@
+// Generated from GLSL:
+//
+// #version 460
+//
+// layout(set = 0, binding = 0) uniform mediump sampler2D uTexture;
+//
+// layout(location = 0) noperspective in vec2 iUV;
+//
+// layout(location = 0) out vec4 oColor;
+//
+// void main() {
+//     oColor = texture(uTexture, iUV);
+// }
+const std::vector<uint8_t> kBlitTextureMediumpFrag = {
+	0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x0d, 0x00, 0x14, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00,
+	0x01, 0x00, 0x00, 0x00, 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
+	0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+	0x0f, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
+	0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x03, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0xcc, 0x01, 0x00, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c,
+	0x45, 0x5f, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x6e, 0x65,
+	0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+	0x47, 0x4c, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64,
+	0x65, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x6f, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x75, 0x54, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x00, 0x00, 0x00, 0x00,
+	0x05, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x69, 0x55, 0x56, 0x00, 0x47, 0x00, 0x04, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
+	0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x0e, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
+	0x47, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x47, 0x00, 0x03, 0x00, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00,
+	0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+	0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
+	0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00,
+	0x0a, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+	0x1b, 0x00, 0x03, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x0c, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
+	0x0f, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
+	0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
+	0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00,
+	0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+	0xf8, 0x00, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00,
+	0x0e, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00,
+	0x12, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00,
+	0x13, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
+	0x09, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
+};
+
diff --git a/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.spv b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.spv
new file mode 100644
index 0000000..c4209d9
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/blit_texture_mediump.frag.spv
Binary files differ
diff --git a/host/libs/graphics_detector/shaders/generate_shader_embed.cpp b/host/libs/graphics_detector/shaders/generate_shader_embed.cpp
new file mode 100644
index 0000000..ff55d70
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/generate_shader_embed.cpp
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstring>
+#include <fstream>
+#include <iomanip>
+#include <iostream>
+#include <sstream>
+#include <string>
+#include <vector>
+
+namespace {
+
+std::vector<std::string> StrSplit(const std::string& s, const char delimiter) {
+  std::vector<std::string> result;
+  std::istringstream stream(s);
+  std::string item;
+  while (std::getline(stream, item, delimiter)) {
+    result.push_back(item);
+  }
+  return result;
+}
+
+}  // namespace
+
+int main(int argc, char** argv) {
+  if (argc != 5) {
+    std::cout << "Expected 5 arguments.";
+    std::exit(1);
+  }
+
+  const std::string input_glsl_filename = argv[1];
+  const std::string input_spirv_filename = argv[2];
+  const std::string input_spirv_varname = argv[3];
+  const std::string output_embed_filename = argv[4];
+
+  std::ifstream input_glsl_file(input_glsl_filename);
+  if (!input_glsl_file.is_open()) {
+    std::cout << "Failed to open input glsl file " << input_spirv_filename;
+    std::exit(1);
+  }
+
+  const std::string input_glsl(
+      (std::istreambuf_iterator<char>(input_glsl_file)),
+      std::istreambuf_iterator<char>());
+  const std::vector<std::string> input_glsl_lines = StrSplit(input_glsl, '\n');
+
+  std::ifstream input_spirv_file(input_spirv_filename,
+                                 std::ios::ate | std::ios::binary);
+  if (!input_spirv_file.is_open()) {
+    std::cout << "Failed to open input spirv file " << input_spirv_filename;
+    std::exit(1);
+  }
+
+  const std::size_t input_spirv_bytes_size =
+      static_cast<std::size_t>(input_spirv_file.tellg());
+  std::vector<unsigned char> input_spirv_bytes(input_spirv_bytes_size);
+  input_spirv_file.seekg(0);
+  input_spirv_file.read(reinterpret_cast<char*>(input_spirv_bytes.data()),
+                        input_spirv_bytes_size);
+  input_spirv_file.close();
+
+  std::ofstream output_embed_file(output_embed_filename);
+  if (!output_embed_file.is_open()) {
+    std::cout << "Failed to open output file " << output_embed_filename;
+    std::exit(1);
+  }
+
+  output_embed_file << "// Generated from GLSL:\n//\n";
+  for (const std::string& input_glsl_line : input_glsl_lines) {
+    output_embed_file << "// " << input_glsl_line << "\n";
+  }
+
+  output_embed_file << "const std::vector<uint8_t> " << input_spirv_varname
+                    << " = {";
+
+  const unsigned char* spirv_data = input_spirv_bytes.data();
+  for (std::size_t i = 0; i < input_spirv_bytes_size; i++) {
+    constexpr const std::size_t kNumBytesPerLine = 16;
+
+    if (i % kNumBytesPerLine == 0) {
+      output_embed_file << "\n\t";
+    }
+
+    output_embed_file << "0x" << std::hex << std::setfill('0') << std::setw(2)
+                      << static_cast<int>(*spirv_data) << ", ";
+    ++spirv_data;
+  }
+  output_embed_file << "\n};\n\n";
+
+  output_embed_file.close();
+  return 0;
+}
\ No newline at end of file
diff --git a/host/libs/graphics_detector/shaders/generate_shader_embeds.sh b/host/libs/graphics_detector/shaders/generate_shader_embeds.sh
new file mode 100755
index 0000000..108e062
--- /dev/null
+++ b/host/libs/graphics_detector/shaders/generate_shader_embeds.sh
@@ -0,0 +1,25 @@
+for file in *.{frag,vert}; do
+    [ -f "${file}" ] || break
+
+    SHADER_GLSL="${file}"
+    echo "Found ${SHADER_GLSL}"
+
+    SHADER_SPV="${file}.spv"
+    SHADER_EMBED="${file}.inl"
+    SHADER_BASENAME="${file}"
+    SHADER_EMBED_VARNAME=$(sed -r 's/\./_/g' <<< $SHADER_BASENAME)
+    SHADER_EMBED_VARNAME=$(sed -r 's/(^|_)([a-z])/\U\2/g' <<< $SHADER_EMBED_VARNAME)
+    SHADER_EMBED_VARNAME="k${SHADER_EMBED_VARNAME}"
+
+    glslc \
+        "${SHADER_GLSL}" \
+        -o "${SHADER_SPV}"
+
+    generate_shader_embed \
+        "${SHADER_GLSL}" \
+        "${SHADER_SPV}" \
+        "${SHADER_EMBED_VARNAME}" \
+        "${SHADER_EMBED}"
+
+    echo "Generated ${SHADER_EMBED}"
+done
diff --git a/host/libs/graphics_detector/subprocess.cpp b/host/libs/graphics_detector/subprocess.cpp
new file mode 100644
index 0000000..bb3d266
--- /dev/null
+++ b/host/libs/graphics_detector/subprocess.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/subprocess.h"
+
+#include <dlfcn.h>
+#include <poll.h>
+#include <sys/prctl.h>
+#include <sys/wait.h>
+
+#include <condition_variable>
+#include <mutex>
+#include <thread>
+
+#include <android-base/logging.h>
+#include <android-base/scopeguard.h>
+#include <android-base/unique_fd.h>
+
+namespace cuttlefish {
+namespace {
+
+const char* const kFailedGraphicsSubprocessDisclaimer =
+    "Note: the Cuttlefish launcher runs some tests to check for the "
+    "availability of various graphics libraries and features on your "
+    "machine and failures during these tests can be expected.";
+
+int PidfdOpen(pid_t pid) {
+  // There is no glibc wrapper for pidfd_open.
+#ifndef SYS_pidfd_open
+  constexpr int SYS_pidfd_open = 434;
+#endif
+  return syscall(SYS_pidfd_open, pid, /*flags=*/0);
+}
+
+SubprocessResult WaitForChild(const std::string& message, pid_t pid) {
+  siginfo_t info;
+
+  int options = WEXITED | WNOWAIT;
+  if (TEMP_FAILURE_RETRY(waitid(P_PID, pid, &info, options)) != 0) {
+    PLOG(VERBOSE) << "Failed to wait for subprocess " << pid << " running "
+                  << message << " : waitid error. "
+                  << kFailedGraphicsSubprocessDisclaimer;
+    return SubprocessResult::kFailure;
+  }
+  if (info.si_pid != pid) {
+    LOG(VERBOSE) << "Failed to wait for subprocess " << pid << " running "
+                 << message << ": waitid returned different pid. "
+                 << kFailedGraphicsSubprocessDisclaimer;
+    return SubprocessResult::kFailure;
+  }
+  if (info.si_code != CLD_EXITED) {
+    LOG(VERBOSE) << "Failed to wait for subprocess " << pid << " running "
+                 << message << ": subprocess terminated by signal "
+                 << info.si_status << ". "
+                 << kFailedGraphicsSubprocessDisclaimer;
+    return SubprocessResult::kFailure;
+  }
+  return SubprocessResult::kSuccess;
+}
+
+SubprocessResult WaitForChildWithTimeoutFallback(
+    const std::string& message, pid_t pid, std::chrono::milliseconds timeout) {
+  bool child_exited = false;
+  bool child_timed_out = false;
+  std::condition_variable cv;
+  std::mutex m;
+
+  std::thread wait_thread([&]() {
+    std::unique_lock<std::mutex> lock(m);
+    if (!cv.wait_for(lock, timeout, [&] { return child_exited; })) {
+      child_timed_out = true;
+      if (kill(pid, SIGKILL) != 0) {
+        PLOG(VERBOSE) << "Failed to kill subprocess " << pid << " running "
+                      << message << " after " << timeout.count()
+                      << "ms timeout. " << kFailedGraphicsSubprocessDisclaimer;
+      }
+    }
+  });
+
+  SubprocessResult result = WaitForChild(message, pid);
+  {
+    std::unique_lock<std::mutex> lock(m);
+    child_exited = true;
+  }
+  cv.notify_all();
+  wait_thread.join();
+
+  if (child_timed_out) {
+    return SubprocessResult::kFailure;
+  }
+  return result;
+}
+
+// When `pidfd_open` is not available, fallback to using a second
+// thread to kill the child process after the given timeout.
+SubprocessResult WaitForChildWithTimeout(const std::string& message, pid_t pid,
+                                         android::base::unique_fd pidfd,
+                                         std::chrono::milliseconds timeout) {
+  auto cleanup = android::base::make_scope_guard([&]() {
+    kill(pid, SIGKILL);
+    WaitForChild(message, pid);
+  });
+
+  struct pollfd poll_info = {
+      .fd = pidfd.get(),
+      .events = POLLIN,
+  };
+  int ret = TEMP_FAILURE_RETRY(poll(&poll_info, 1, timeout.count()));
+  pidfd.reset();
+
+  if (ret < 0) {
+    LOG(ERROR) << "Failed to wait for subprocess " << pid << " running "
+               << message << ": poll failed with " << ret << ". "
+               << kFailedGraphicsSubprocessDisclaimer;
+    return SubprocessResult::kFailure;
+  }
+  if (ret == 0) {
+    LOG(ERROR) << "Subprocess " << pid << " running " << message
+               << " did not complete within " << timeout.count()
+               << "ms. Killing. " << kFailedGraphicsSubprocessDisclaimer;
+    return SubprocessResult::kFailure;
+  }
+
+  cleanup.Disable();
+  return WaitForChild(message, pid);
+}
+
+}  // namespace
+
+SubprocessResult DoWithSubprocessCheck(const std::string& message,
+                                       const std::function<void()>& function,
+                                       std::chrono::milliseconds timeout) {
+  LOG(VERBOSE) << "Running " << message << " in subprocess...";
+  pid_t pid = fork();
+  if (pid == 0) {
+    prctl(PR_SET_NAME, "gfxDtctCanSegv");
+    function();
+    std::exit(0);
+  }
+
+  LOG(VERBOSE) << "Waiting for subprocess " << pid << " running " << message
+               << "...";
+
+  SubprocessResult result = SubprocessResult::kFailure;
+
+  android::base::unique_fd pidfd(PidfdOpen(pid));
+  if (pidfd.get() >= 0) {
+    result = WaitForChildWithTimeout(message, pid, std::move(pidfd), timeout);
+  } else {
+    result = WaitForChildWithTimeoutFallback(message, pid, timeout);
+  }
+
+  if (result == SubprocessResult::kSuccess) {
+    LOG(VERBOSE) << "Subprocess running " << message << " succeeded. Running "
+                 << message << " in this process...";
+    function();
+    return SubprocessResult::kSuccess;
+  } else {
+    LOG(VERBOSE) << "Subprocess running " << message << " failed. Not running "
+                 << message << " in this process.";
+    return SubprocessResult::kFailure;
+  }
+}
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/subprocess.h b/host/libs/graphics_detector/subprocess.h
new file mode 100644
index 0000000..2bb0b4e
--- /dev/null
+++ b/host/libs/graphics_detector/subprocess.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <chrono>
+#include <functional>
+#include <string>
+
+namespace cuttlefish {
+
+enum class SubprocessResult {
+  kFailure,
+  kSuccess,
+};
+
+// Runs the given function in a forked subprocess first to check for
+// aborts/crashes/etc and then runs the given function in the current
+// process if the subprocess check succeeded.
+SubprocessResult DoWithSubprocessCheck(
+    const std::string& message, const std::function<void()>& function,
+    std::chrono::milliseconds timeout = std::chrono::milliseconds(5000));
+
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/graphics_detector/vk.cpp b/host/libs/graphics_detector/vk.cpp
new file mode 100644
index 0000000..1493d0f
--- /dev/null
+++ b/host/libs/graphics_detector/vk.cpp
@@ -0,0 +1,1059 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/graphics_detector/vk.h"
+
+#include <string>
+#include <unordered_set>
+#include <vector>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+
+VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+
+namespace cuttlefish {
+namespace {
+
+constexpr const bool kEnableValidationLayers = false;
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL VulkanDebugCallback(
+    VkDebugUtilsMessageSeverityFlagBitsEXT severity,
+    VkDebugUtilsMessageTypeFlagsEXT,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void*) {
+  if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
+    LOG(VERBOSE) << pCallbackData->pMessage;
+  } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
+    LOG(INFO) << pCallbackData->pMessage;
+  } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
+    LOG(ERROR) << pCallbackData->pMessage;
+  } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
+    LOG(ERROR) << pCallbackData->pMessage;
+  }
+  return VK_FALSE;
+}
+
+uint32_t GetMemoryType(const vk::raii::PhysicalDevice& physical_device,
+                       uint32_t memory_type_mask,
+                       vk::MemoryPropertyFlags memory_properties) {
+  const auto props = physical_device.getMemoryProperties();
+  for (uint32_t i = 0; i < props.memoryTypeCount; i++) {
+    if (!(memory_type_mask & (1 << i))) {
+      continue;
+    }
+    if ((props.memoryTypes[i].propertyFlags & memory_properties) !=
+        memory_properties) {
+      continue;
+    }
+    return i;
+  }
+  return -1;
+}
+
+VkExpected<Vk::BufferWithMemory> DoCreateBuffer(
+    const vk::raii::PhysicalDevice& physical_device,
+    const vk::raii::Device& device, vk::DeviceSize buffer_size,
+    vk::BufferUsageFlags buffer_usages,
+    vk::MemoryPropertyFlags buffer_memory_properties) {
+  const vk::BufferCreateInfo buffer_create_info = {
+      .size = static_cast<VkDeviceSize>(buffer_size),
+      .usage = buffer_usages,
+      .sharingMode = vk::SharingMode::eExclusive,
+  };
+  auto buffer = VK_EXPECT(vk::raii::Buffer::create(device, buffer_create_info));
+
+  const auto buffer_memory_requirements = buffer.getMemoryRequirements();
+  const auto buffer_memory_type =
+      GetMemoryType(physical_device, buffer_memory_requirements.memoryTypeBits,
+                    buffer_memory_properties);
+
+  const vk::MemoryAllocateInfo buffer_memory_allocate_info = {
+      .allocationSize = buffer_memory_requirements.size,
+      .memoryTypeIndex = buffer_memory_type,
+  };
+  auto buffer_memory = VK_EXPECT(
+      vk::raii::DeviceMemory::create(device, buffer_memory_allocate_info));
+
+  buffer.bindMemory(*buffer_memory, 0);
+
+  return Vk::BufferWithMemory{
+      .buffer = std::move(buffer),
+      .buffer_memory = std::move(buffer_memory),
+  };
+}
+
+}  // namespace
+
+/*static*/
+std::optional<Vk> Vk::Load(
+    const std::vector<std::string>& requested_instance_extensions,
+    const std::vector<std::string>& requested_instance_layers,
+    const std::vector<std::string>& requested_device_extensions) {
+  VkExpected<Vk> vk =
+      LoadImpl(requested_instance_extensions, requested_instance_layers,
+               requested_device_extensions);
+  if (vk.ok()) {
+    return std::move(vk.value());
+  }
+  return std::nullopt;
+}
+
+/*static*/
+VkExpected<Vk> Vk::LoadImpl(
+    const std::vector<std::string>& requested_instance_extensions,
+    const std::vector<std::string>& requested_instance_layers,
+    const std::vector<std::string>& requested_device_extensions) {
+  vk::DynamicLoader loader;
+  VULKAN_HPP_DEFAULT_DISPATCHER.init(
+      loader.getProcAddress<PFN_vkGetInstanceProcAddr>(
+          "vkGetInstanceProcAddr"));
+
+  vk::raii::Context context;
+
+  const auto available_instance_layers =
+      context.enumerateInstanceLayerProperties();
+  LOG(VERBOSE) << "Available instance layers:";
+  for (const vk::LayerProperties& layer : available_instance_layers) {
+    LOG(VERBOSE) << layer.layerName;
+  }
+  LOG(VERBOSE) << "";
+
+  std::vector<const char*> requested_instance_extensions_chars;
+  requested_instance_extensions_chars.reserve(
+      requested_instance_extensions.size());
+  for (const auto& e : requested_instance_extensions) {
+    requested_instance_extensions_chars.push_back(e.c_str());
+  }
+  if (kEnableValidationLayers) {
+    requested_instance_extensions_chars.push_back(
+        VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+  }
+
+  std::vector<const char*> requested_instance_layers_chars;
+  requested_instance_layers_chars.reserve(requested_instance_layers.size());
+  for (const auto& l : requested_instance_layers) {
+    requested_instance_layers_chars.push_back(l.c_str());
+  }
+
+  const vk::ApplicationInfo applicationInfo{
+      .pApplicationName = "Cuttlefish Graphics Detector",
+      .applicationVersion = 1,
+      .pEngineName = "Cuttlefish Graphics Detector",
+      .engineVersion = 1,
+      .apiVersion = VK_API_VERSION_1_2,
+  };
+  const vk::InstanceCreateInfo instance_create_info{
+      .pApplicationInfo = &applicationInfo,
+      .enabledLayerCount =
+          static_cast<uint32_t>(requested_instance_layers_chars.size()),
+      .ppEnabledLayerNames = requested_instance_layers_chars.data(),
+      .enabledExtensionCount =
+          static_cast<uint32_t>(requested_instance_extensions_chars.size()),
+      .ppEnabledExtensionNames = requested_instance_extensions_chars.data(),
+  };
+
+  auto instance =
+      VK_EXPECT(vk::raii::Instance::create(context, instance_create_info));
+
+  std::optional<vk::raii::DebugUtilsMessengerEXT> debug_messenger;
+  if (kEnableValidationLayers) {
+    const vk::DebugUtilsMessengerCreateInfoEXT debug_create_info = {
+        .messageSeverity = vk::DebugUtilsMessageSeverityFlagBitsEXT::eVerbose |
+                           vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning |
+                           vk::DebugUtilsMessageSeverityFlagBitsEXT::eError,
+        .messageType = vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral |
+                       vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation |
+                       vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance,
+        .pfnUserCallback = VulkanDebugCallback,
+        .pUserData = nullptr,
+    };
+    debug_messenger = VK_EXPECT(
+        vk::raii::DebugUtilsMessengerEXT::create(instance, debug_create_info));
+  }
+
+  auto physical_devices =
+      VK_EXPECT(vk::raii::PhysicalDevices::create(instance));
+
+  LOG(VERBOSE) << "Available physical devices:";
+  for (const auto& physical_device : physical_devices) {
+    const auto physical_device_props = physical_device.getProperties();
+    LOG(VERBOSE) << physical_device_props.deviceName;
+  }
+  LOG(VERBOSE) << "";
+
+  vk::raii::PhysicalDevice physical_device = std::move(physical_devices[0]);
+  {
+    const auto props = physical_device.getProperties();
+    LOG(VERBOSE) << "Selected physical device: " << props.deviceName;
+    LOG(VERBOSE) << "";
+  }
+
+  std::unordered_set<std::string> available_device_extensions;
+  {
+    const auto exts = physical_device.enumerateDeviceExtensionProperties();
+    LOG(VERBOSE) << "Available physical device extensions:";
+    for (const auto& ext : exts) {
+      LOG(VERBOSE) << ext.extensionName;
+      available_device_extensions.emplace(ext.extensionName);
+    }
+    LOG(VERBOSE) << "";
+  }
+
+  const auto features2 =
+      physical_device
+          .getFeatures2<vk::PhysicalDeviceFeatures2,  //
+                        vk::PhysicalDeviceSamplerYcbcrConversionFeatures>();
+
+  bool ycbcr_conversion_needed = false;
+
+  std::vector<const char*> requested_device_extensions_chars;
+  requested_device_extensions_chars.reserve(requested_device_extensions.size());
+  for (const auto& e : requested_device_extensions) {
+    if (e == std::string(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
+      // The interface of VK_KHR_sampler_ycbcr_conversion was promoted to core
+      // in Vulkan 1.1 but the feature/functionality is still optional. Check
+      // here:
+      const auto& sampler_features =
+          features2.get<vk::PhysicalDeviceSamplerYcbcrConversionFeatures>();
+
+      if (sampler_features.samplerYcbcrConversion == VK_FALSE) {
+        LOG(VERBOSE) << "Requested device extension " << e
+                     << " feature not available.";
+        return android::base::unexpected(vk::Result::eErrorExtensionNotPresent);
+      }
+      ycbcr_conversion_needed = true;
+    } else {
+      if (available_device_extensions.find(e) ==
+          available_device_extensions.end()) {
+        LOG(VERBOSE) << "Requested device extensions " << e
+                     << " not available.";
+        return android::base::unexpected(vk::Result::eErrorExtensionNotPresent);
+      }
+      requested_device_extensions_chars.push_back(e.c_str());
+    }
+  }
+
+  uint32_t queue_family_index = -1;
+  {
+    const auto props = physical_device.getQueueFamilyProperties();
+    for (uint32_t i = 0; i < props.size(); i++) {
+      const auto& prop = props[i];
+      if (prop.queueFlags & vk::QueueFlagBits::eGraphics) {
+        queue_family_index = i;
+        break;
+      }
+    }
+  }
+  LOG(VERBOSE) << "Graphics queue family index: " << queue_family_index;
+
+  const float queue_priority = 1.0f;
+  const vk::DeviceQueueCreateInfo device_queue_create_info = {
+      .queueFamilyIndex = queue_family_index,
+      .queueCount = 1,
+      .pQueuePriorities = &queue_priority,
+  };
+  const vk::PhysicalDeviceVulkan11Features device_enable_features = {
+      .samplerYcbcrConversion = ycbcr_conversion_needed,
+  };
+  const vk::DeviceCreateInfo device_create_info = {
+      .pNext = &device_enable_features,
+      .pQueueCreateInfos = &device_queue_create_info,
+      .queueCreateInfoCount = 1,
+      .enabledLayerCount =
+          static_cast<uint32_t>(requested_instance_layers_chars.size()),
+      .ppEnabledLayerNames = requested_instance_layers_chars.data(),
+      .enabledExtensionCount =
+          static_cast<uint32_t>(requested_device_extensions_chars.size()),
+      .ppEnabledExtensionNames = requested_device_extensions_chars.data(),
+  };
+  auto device =
+      VK_EXPECT(vk::raii::Device::create(physical_device, device_create_info));
+  auto queue = vk::raii::Queue(device, queue_family_index, 0);
+
+  const vk::CommandPoolCreateInfo command_pool_create_info = {
+      .queueFamilyIndex = queue_family_index,
+  };
+  auto command_pool = VK_EXPECT(
+      vk::raii::CommandPool::create(device, command_pool_create_info));
+
+  auto staging_buffer =
+      VK_EXPECT(DoCreateBuffer(physical_device, device, kStagingBufferSize,
+                               vk::BufferUsageFlagBits::eTransferDst |
+                                   vk::BufferUsageFlagBits::eTransferSrc,
+                               vk::MemoryPropertyFlagBits::eHostVisible |
+                                   vk::MemoryPropertyFlagBits::eHostCoherent));
+
+  return Vk(std::move(loader), std::move(context), std::move(instance),
+            std::move(debug_messenger), std::move(physical_device),
+            std::move(device), std::move(queue), queue_family_index,
+            std::move(command_pool), std::move(staging_buffer.buffer),
+            std::move(staging_buffer.buffer_memory));
+}
+
+VkExpected<Vk::BufferWithMemory> Vk::CreateBuffer(
+    vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
+    vk::MemoryPropertyFlags buffer_memory_properties) {
+  return DoCreateBuffer(vk_physical_device, vk_device, buffer_size,
+                        buffer_usages, buffer_memory_properties);
+}
+
+VkExpected<Vk::BufferWithMemory> Vk::CreateBufferWithData(
+    vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
+    vk::MemoryPropertyFlags buffer_memory_properties,
+    const uint8_t* buffer_data) {
+  auto buffer = VK_EXPECT(CreateBuffer(
+      buffer_size, buffer_usages | vk::BufferUsageFlagBits::eTransferDst,
+      buffer_memory_properties));
+
+  void* mapped = vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize);
+  if (mapped == nullptr) {
+    LOG(FATAL) << "Failed to map staging buffer.";
+  }
+
+  std::memcpy(mapped, buffer_data, buffer_size);
+  vk_staging_buffer_memory_.unmapMemory();
+
+  DoCommandsImmediate([&](vk::raii::CommandBuffer& cmd) {
+    const std::vector<vk::BufferCopy> regions = {
+        vk::BufferCopy{
+            .srcOffset = 0,
+            .dstOffset = 0,
+            .size = buffer_size,
+        },
+    };
+    cmd.copyBuffer(*vk_staging_buffer_, *buffer.buffer, regions);
+    return vk::Result::eSuccess;
+  });
+
+  return std::move(buffer);
+}
+
+VkExpected<Vk::ImageWithMemory> Vk::CreateImage(
+    uint32_t width, uint32_t height, vk::Format format,
+    vk::ImageUsageFlags usages, vk::MemoryPropertyFlags memory_properties,
+    vk::ImageLayout returned_layout) {
+  const vk::ImageCreateInfo image_create_info = {
+      .imageType = vk::ImageType::e2D,
+      .extent.width = width,
+      .extent.height = height,
+      .extent.depth = 1,
+      .mipLevels = 1,
+      .arrayLayers = 1,
+      .format = format,
+      .tiling = vk::ImageTiling::eOptimal,
+      .initialLayout = vk::ImageLayout::eUndefined,
+      .usage = usages,
+      .sharingMode = vk::SharingMode::eExclusive,
+      .samples = vk::SampleCountFlagBits::e1,
+  };
+  auto image = VK_EXPECT(vk::raii::Image::create(vk_device, image_create_info));
+
+  vk::MemoryRequirements memory_requirements = image.getMemoryRequirements();
+  const uint32_t memory_index =
+      GetMemoryType(vk_physical_device, memory_requirements.memoryTypeBits,
+                    memory_properties);
+
+  const vk::MemoryAllocateInfo image_memory_allocate_info = {
+      .allocationSize = memory_requirements.size,
+      .memoryTypeIndex = memory_index,
+  };
+  auto image_memory = VK_EXPECT(
+      vk::raii::DeviceMemory::create(vk_device, image_memory_allocate_info));
+
+  image.bindMemory(*image_memory, 0);
+
+  const vk::ImageViewCreateInfo image_view_create_info = {
+      .image = *image,
+      .viewType = vk::ImageViewType::e2D,
+      .format = format,
+      .components =
+          {
+              .r = vk::ComponentSwizzle::eIdentity,
+              .g = vk::ComponentSwizzle::eIdentity,
+              .b = vk::ComponentSwizzle::eIdentity,
+              .a = vk::ComponentSwizzle::eIdentity,
+          },
+      .subresourceRange =
+          {
+              .aspectMask = vk::ImageAspectFlagBits::eColor,
+              .baseMipLevel = 0,
+              .levelCount = 1,
+              .baseArrayLayer = 0,
+              .layerCount = 1,
+          },
+  };
+  auto image_view =
+      VK_EXPECT(vk::raii::ImageView::create(vk_device, image_view_create_info));
+
+  VK_ASSERT(DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
+    const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+        vk::ImageMemoryBarrier{
+            .oldLayout = vk::ImageLayout::eUndefined,
+            .newLayout = returned_layout,
+            .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+            .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+            .image = *image,
+            .subresourceRange =
+                {
+                    .aspectMask = vk::ImageAspectFlagBits::eColor,
+                    .baseMipLevel = 0,
+                    .levelCount = 1,
+                    .baseArrayLayer = 0,
+                    .layerCount = 1,
+                },
+            .srcAccessMask = {},
+            .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
+        },
+    };
+    command_buffer.pipelineBarrier(
+        /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+        /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+        /*dependencyFlags=*/{},
+        /*memoryBarriers=*/{},
+        /*bufferMemoryBarriers=*/{},
+        /*imageMemoryBarriers=*/image_memory_barriers);
+
+    return vk::Result::eSuccess;
+  }));
+
+  return ImageWithMemory{
+      .image_memory = std::move(image_memory),
+      .image = std::move(image),
+      .image_view = std::move(image_view),
+  };
+}
+
+vk::Result Vk::DownloadImage(uint32_t width, uint32_t height,
+                             const vk::raii::Image& image,
+                             vk::ImageLayout current_layout,
+                             vk::ImageLayout returned_layout,
+                             std::vector<uint8_t>* out_pixels) {
+  VK_RETURN_IF_NOT_SUCCESS(
+      DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
+        if (current_layout != vk::ImageLayout::eTransferSrcOptimal) {
+          const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+              vk::ImageMemoryBarrier{
+                  .oldLayout = current_layout,
+                  .newLayout = vk::ImageLayout::eTransferSrcOptimal,
+                  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                  .image = *image,
+                  .subresourceRange =
+                      {
+                          .aspectMask = vk::ImageAspectFlagBits::eColor,
+                          .baseMipLevel = 0,
+                          .levelCount = 1,
+                          .baseArrayLayer = 0,
+                          .layerCount = 1,
+                      },
+                  .srcAccessMask = vk::AccessFlagBits::eMemoryRead |
+                                   vk::AccessFlagBits::eMemoryWrite,
+                  .dstAccessMask = vk::AccessFlagBits::eTransferRead,
+              },
+          };
+          command_buffer.pipelineBarrier(
+              /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+              /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+              /*dependencyFlags=*/{},
+              /*memoryBarriers=*/{},
+              /*bufferMemoryBarriers=*/{},
+              /*imageMemoryBarriers=*/image_memory_barriers);
+        }
+
+        const std::vector<vk::BufferImageCopy> regions = {
+            vk::BufferImageCopy{
+                .bufferOffset = 0,
+                .bufferRowLength = 0,
+                .bufferImageHeight = 0,
+                .imageSubresource =
+                    {
+                        .aspectMask = vk::ImageAspectFlagBits::eColor,
+                        .mipLevel = 0,
+                        .baseArrayLayer = 0,
+                        .layerCount = 1,
+                    },
+                .imageOffset =
+                    {
+                        .x = 0,
+                        .y = 0,
+                        .z = 0,
+                    },
+                .imageExtent =
+                    {
+                        .width = width,
+                        .height = height,
+                        .depth = 1,
+                    },
+            },
+        };
+        command_buffer.copyImageToBuffer(*image,
+                                         vk::ImageLayout::eTransferSrcOptimal,
+                                         *vk_staging_buffer_, regions);
+
+        if (returned_layout != vk::ImageLayout::eTransferSrcOptimal) {
+          const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+              vk::ImageMemoryBarrier{
+                  .oldLayout = vk::ImageLayout::eTransferSrcOptimal,
+                  .newLayout = returned_layout,
+                  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                  .image = *image,
+                  .subresourceRange =
+                      {
+                          .aspectMask = vk::ImageAspectFlagBits::eColor,
+                          .baseMipLevel = 0,
+                          .levelCount = 1,
+                          .baseArrayLayer = 0,
+                          .layerCount = 1,
+                      },
+                  .srcAccessMask = vk::AccessFlagBits::eTransferRead,
+                  .dstAccessMask = vk::AccessFlagBits::eMemoryRead |
+                                   vk::AccessFlagBits::eMemoryWrite,
+              },
+          };
+          command_buffer.pipelineBarrier(
+              /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+              /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+              /*dependencyFlags=*/{},
+              /*memoryBarriers=*/{},
+              /*bufferMemoryBarriers=*/{},
+              /*imageMemoryBarriers=*/image_memory_barriers);
+        }
+
+        return vk::Result::eSuccess;
+      }));
+
+  auto* mapped = reinterpret_cast<uint8_t*>(
+      vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize));
+  if (mapped == nullptr) {
+    LOG(ERROR) << "Failed to map staging buffer.";
+    return vk::Result::eErrorMemoryMapFailed;
+  }
+
+  out_pixels->clear();
+  out_pixels->resize(width * height * 4);
+  std::memcpy(out_pixels->data(), mapped, out_pixels->size());
+  vk_staging_buffer_memory_.unmapMemory();
+
+  return vk::Result::eSuccess;
+}
+
+VkExpected<Vk::YuvImageWithMemory> Vk::CreateYuvImage(
+    uint32_t width, uint32_t height, vk::ImageUsageFlags usages,
+    vk::MemoryPropertyFlags memory_properties, vk::ImageLayout layout) {
+  const vk::SamplerYcbcrConversionCreateInfo conversion_create_info = {
+      .format = vk::Format::eG8B8R83Plane420Unorm,
+      .ycbcrModel = vk::SamplerYcbcrModelConversion::eYcbcr601,
+      .ycbcrRange = vk::SamplerYcbcrRange::eItuNarrow,
+      .components =
+          {
+              .r = vk::ComponentSwizzle::eIdentity,
+              .g = vk::ComponentSwizzle::eIdentity,
+              .b = vk::ComponentSwizzle::eIdentity,
+              .a = vk::ComponentSwizzle::eIdentity,
+          },
+      .xChromaOffset = vk::ChromaLocation::eMidpoint,
+      .yChromaOffset = vk::ChromaLocation::eMidpoint,
+      .chromaFilter = vk::Filter::eLinear,
+      .forceExplicitReconstruction = VK_FALSE,
+  };
+  auto image_sampler_conversion =
+      VK_EXPECT(vk::raii::SamplerYcbcrConversion::create(
+          vk_device, conversion_create_info));
+
+  const vk::SamplerYcbcrConversionInfo sampler_conversion_info = {
+      .conversion = *image_sampler_conversion,
+  };
+  const vk::SamplerCreateInfo sampler_create_info = {
+      .pNext = &sampler_conversion_info,
+      .magFilter = vk::Filter::eLinear,
+      .minFilter = vk::Filter::eLinear,
+      .mipmapMode = vk::SamplerMipmapMode::eNearest,
+      .addressModeU = vk::SamplerAddressMode::eClampToEdge,
+      .addressModeV = vk::SamplerAddressMode::eClampToEdge,
+      .addressModeW = vk::SamplerAddressMode::eClampToEdge,
+      .mipLodBias = 0.0f,
+      .anisotropyEnable = VK_FALSE,
+      .maxAnisotropy = 1.0f,
+      .compareEnable = VK_FALSE,
+      .compareOp = vk::CompareOp::eLessOrEqual,
+      .minLod = 0.0f,
+      .maxLod = 0.25f,
+      .borderColor = vk::BorderColor::eIntTransparentBlack,
+      .unnormalizedCoordinates = VK_FALSE,
+  };
+  auto image_sampler =
+      VK_EXPECT(vk::raii::Sampler::create(vk_device, sampler_create_info));
+
+  const vk::ImageCreateInfo image_create_info = {
+      .imageType = vk::ImageType::e2D,
+      .extent.width = width,
+      .extent.height = height,
+      .extent.depth = 1,
+      .mipLevels = 1,
+      .arrayLayers = 1,
+      .format = vk::Format::eG8B8R83Plane420Unorm,
+      .tiling = vk::ImageTiling::eOptimal,
+      .initialLayout = vk::ImageLayout::eUndefined,
+      .usage = usages,
+      .sharingMode = vk::SharingMode::eExclusive,
+      .samples = vk::SampleCountFlagBits::e1,
+  };
+  auto image = VK_EXPECT(vk::raii::Image::create(vk_device, image_create_info));
+
+  vk::MemoryRequirements memory_requirements = image.getMemoryRequirements();
+
+  const uint32_t memory_index =
+      GetMemoryType(vk_physical_device, memory_requirements.memoryTypeBits,
+                    memory_properties);
+
+  const vk::MemoryAllocateInfo image_memory_allocate_info = {
+      .allocationSize = memory_requirements.size,
+      .memoryTypeIndex = memory_index,
+  };
+  auto image_memory = VK_EXPECT(
+      vk::raii::DeviceMemory::create(vk_device, image_memory_allocate_info));
+
+  image.bindMemory(*image_memory, 0);
+
+  const vk::ImageViewCreateInfo image_view_create_info = {
+      .pNext = &sampler_conversion_info,
+      .image = *image,
+      .viewType = vk::ImageViewType::e2D,
+      .format = vk::Format::eG8B8R83Plane420Unorm,
+      .components =
+          {
+              .r = vk::ComponentSwizzle::eIdentity,
+              .g = vk::ComponentSwizzle::eIdentity,
+              .b = vk::ComponentSwizzle::eIdentity,
+              .a = vk::ComponentSwizzle::eIdentity,
+          },
+      .subresourceRange =
+          {
+              .aspectMask = vk::ImageAspectFlagBits::eColor,
+              .baseMipLevel = 0,
+              .levelCount = 1,
+              .baseArrayLayer = 0,
+              .layerCount = 1,
+          },
+  };
+  auto image_view =
+      VK_EXPECT(vk::raii::ImageView::create(vk_device, image_view_create_info));
+
+  VK_ASSERT(DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
+    const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+        vk::ImageMemoryBarrier{
+            .oldLayout = vk::ImageLayout::eUndefined,
+            .newLayout = layout,
+            .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+            .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+            .image = *image,
+            .subresourceRange =
+                {
+                    .aspectMask = vk::ImageAspectFlagBits::eColor,
+                    .baseMipLevel = 0,
+                    .levelCount = 1,
+                    .baseArrayLayer = 0,
+                    .layerCount = 1,
+                },
+            .srcAccessMask = {},
+            .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
+        },
+    };
+    command_buffer.pipelineBarrier(
+        /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+        /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+        /*dependencyFlags=*/{},
+        /*memoryBarriers=*/{},
+        /*bufferMemoryBarriers=*/{},
+        /*imageMemoryBarriers=*/image_memory_barriers);
+    return vk::Result::eSuccess;
+  }));
+
+  return YuvImageWithMemory{
+      .image_sampler_conversion = std::move(image_sampler_conversion),
+      .image_sampler = std::move(image_sampler),
+      .image_memory = std::move(image_memory),
+      .image = std::move(image),
+      .image_view = std::move(image_view),
+  };
+}
+
+vk::Result Vk::LoadYuvImage(const vk::raii::Image& image, uint32_t width,
+                            uint32_t height,
+                            const std::vector<uint8_t>& image_data_y,
+                            const std::vector<uint8_t>& image_data_u,
+                            const std::vector<uint8_t>& image_data_v,
+                            vk::ImageLayout current_layout,
+                            vk::ImageLayout returned_layout) {
+  auto* mapped = reinterpret_cast<uint8_t*>(
+      vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize));
+  if (mapped == nullptr) {
+    LOG(ERROR) << "Failed to map staging buffer.";
+    return vk::Result::eErrorMemoryMapFailed;
+  }
+
+  const VkDeviceSize y_offset = 0;
+  const VkDeviceSize u_offset = image_data_y.size();
+  const VkDeviceSize v_offset = image_data_y.size() + image_data_u.size();
+  std::memcpy(mapped + y_offset, image_data_y.data(), image_data_y.size());
+  std::memcpy(mapped + u_offset, image_data_u.data(), image_data_u.size());
+  std::memcpy(mapped + v_offset, image_data_v.data(), image_data_v.size());
+  vk_staging_buffer_memory_.unmapMemory();
+
+  return DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
+    if (current_layout != vk::ImageLayout::eTransferDstOptimal) {
+      const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+          vk::ImageMemoryBarrier{
+              .oldLayout = current_layout,
+              .newLayout = vk::ImageLayout::eTransferDstOptimal,
+              .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+              .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+              .image = *image,
+              .subresourceRange =
+                  {
+                      .aspectMask = vk::ImageAspectFlagBits::eColor,
+                      .baseMipLevel = 0,
+                      .levelCount = 1,
+                      .baseArrayLayer = 0,
+                      .layerCount = 1,
+                  },
+              .srcAccessMask = vk::AccessFlagBits::eMemoryRead |
+                               vk::AccessFlagBits::eMemoryWrite,
+              .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
+          },
+      };
+      command_buffer.pipelineBarrier(
+          /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+          /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+          /*dependencyFlags=*/{},
+          /*memoryBarriers=*/{},
+          /*bufferMemoryBarriers=*/{},
+          /*imageMemoryBarriers=*/image_memory_barriers);
+    }
+
+    const std::vector<vk::BufferImageCopy> image_copy_regions = {
+        vk::BufferImageCopy{
+            .bufferOffset = y_offset,
+            .bufferRowLength = 0,
+            .bufferImageHeight = 0,
+            .imageSubresource =
+                {
+                    .aspectMask = vk::ImageAspectFlagBits::ePlane0,
+                    .mipLevel = 0,
+                    .baseArrayLayer = 0,
+                    .layerCount = 1,
+                },
+            .imageOffset =
+                {
+                    .x = 0,
+                    .y = 0,
+                    .z = 0,
+                },
+            .imageExtent =
+                {
+                    .width = width,
+                    .height = height,
+                    .depth = 1,
+                },
+        },
+        vk::BufferImageCopy{
+            .bufferOffset = u_offset,
+            .bufferRowLength = 0,
+            .bufferImageHeight = 0,
+            .imageSubresource =
+                {
+                    .aspectMask = vk::ImageAspectFlagBits::ePlane1,
+                    .mipLevel = 0,
+                    .baseArrayLayer = 0,
+                    .layerCount = 1,
+                },
+            .imageOffset =
+                {
+                    .x = 0,
+                    .y = 0,
+                    .z = 0,
+                },
+            .imageExtent =
+                {
+                    .width = width / 2,
+                    .height = height / 2,
+                    .depth = 1,
+                },
+        },
+        vk::BufferImageCopy{
+            .bufferOffset = v_offset,
+            .bufferRowLength = 0,
+            .bufferImageHeight = 0,
+            .imageSubresource =
+                {
+                    .aspectMask = vk::ImageAspectFlagBits::ePlane2,
+                    .mipLevel = 0,
+                    .baseArrayLayer = 0,
+                    .layerCount = 1,
+                },
+            .imageOffset =
+                {
+                    .x = 0,
+                    .y = 0,
+                    .z = 0,
+                },
+            .imageExtent =
+                {
+                    .width = width / 2,
+                    .height = height / 2,
+                    .depth = 1,
+                },
+        },
+    };
+    command_buffer.copyBufferToImage(*vk_staging_buffer_, *image,
+                                     vk::ImageLayout::eTransferDstOptimal,
+                                     image_copy_regions);
+
+    if (returned_layout != vk::ImageLayout::eTransferDstOptimal) {
+      const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
+          vk::ImageMemoryBarrier{
+              .oldLayout = vk::ImageLayout::eTransferDstOptimal,
+              .newLayout = returned_layout,
+              .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+              .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+              .image = *image,
+              .subresourceRange =
+                  {
+                      .aspectMask = vk::ImageAspectFlagBits::eColor,
+                      .baseMipLevel = 0,
+                      .levelCount = 1,
+                      .baseArrayLayer = 0,
+                      .layerCount = 1,
+                  },
+              .srcAccessMask = vk::AccessFlagBits::eTransferWrite,
+              .dstAccessMask = vk::AccessFlagBits::eMemoryRead |
+                               vk::AccessFlagBits::eMemoryWrite,
+          },
+      };
+      command_buffer.pipelineBarrier(
+          /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+          /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
+          /*dependencyFlags=*/{},
+          /*memoryBarriers=*/{},
+          /*bufferMemoryBarriers=*/{},
+          /*imageMemoryBarriers=*/image_memory_barriers);
+    }
+    return vk::Result::eSuccess;
+  });
+}
+
+VkExpected<Vk::FramebufferWithAttachments> Vk::CreateFramebuffer(
+    uint32_t width, uint32_t height, vk::Format color_format,
+    vk::Format depth_format) {
+  std::optional<Vk::ImageWithMemory> color_attachment;
+  if (color_format != vk::Format::eUndefined) {
+    color_attachment =
+        VK_EXPECT(CreateImage(width, height, color_format,
+                              vk::ImageUsageFlagBits::eColorAttachment |
+                                  vk::ImageUsageFlagBits::eTransferSrc,
+                              vk::MemoryPropertyFlagBits::eDeviceLocal,
+                              vk::ImageLayout::eColorAttachmentOptimal));
+  }
+
+  std::optional<Vk::ImageWithMemory> depth_attachment;
+  if (depth_format != vk::Format::eUndefined) {
+    depth_attachment =
+        VK_EXPECT(CreateImage(width, height, depth_format,
+                              vk::ImageUsageFlagBits::eDepthStencilAttachment |
+                                  vk::ImageUsageFlagBits::eTransferSrc,
+                              vk::MemoryPropertyFlagBits::eDeviceLocal,
+                              vk::ImageLayout::eDepthStencilAttachmentOptimal));
+  }
+
+  std::vector<vk::AttachmentDescription> attachments;
+
+  std::optional<vk::AttachmentReference> color_attachment_reference;
+  if (color_format != vk::Format::eUndefined) {
+    attachments.push_back(vk::AttachmentDescription{
+        .format = color_format,
+        .samples = vk::SampleCountFlagBits::e1,
+        .loadOp = vk::AttachmentLoadOp::eClear,
+        .storeOp = vk::AttachmentStoreOp::eStore,
+        .stencilLoadOp = vk::AttachmentLoadOp::eClear,
+        .stencilStoreOp = vk::AttachmentStoreOp::eStore,
+        .initialLayout = vk::ImageLayout::eColorAttachmentOptimal,
+        .finalLayout = vk::ImageLayout::eColorAttachmentOptimal,
+    });
+
+    color_attachment_reference = vk::AttachmentReference{
+        .attachment = static_cast<uint32_t>(attachments.size() - 1),
+        .layout = vk::ImageLayout::eColorAttachmentOptimal,
+    };
+  }
+
+  std::optional<vk::AttachmentReference> depth_attachment_reference;
+  if (depth_format != vk::Format::eUndefined) {
+    attachments.push_back(vk::AttachmentDescription{
+        .format = depth_format,
+        .samples = vk::SampleCountFlagBits::e1,
+        .loadOp = vk::AttachmentLoadOp::eClear,
+        .storeOp = vk::AttachmentStoreOp::eStore,
+        .stencilLoadOp = vk::AttachmentLoadOp::eClear,
+        .stencilStoreOp = vk::AttachmentStoreOp::eStore,
+        .initialLayout = vk::ImageLayout::eColorAttachmentOptimal,
+        .finalLayout = vk::ImageLayout::eColorAttachmentOptimal,
+    });
+
+    depth_attachment_reference = vk::AttachmentReference{
+        .attachment = static_cast<uint32_t>(attachments.size() - 1),
+        .layout = vk::ImageLayout::eDepthStencilAttachmentOptimal,
+    };
+  }
+
+  vk::SubpassDependency dependency = {
+      .srcSubpass = 0,
+      .dstSubpass = 0,
+      .srcStageMask = {},
+      .dstStageMask = vk::PipelineStageFlagBits::eFragmentShader,
+      .srcAccessMask = {},
+      .dstAccessMask = vk::AccessFlagBits::eInputAttachmentRead,
+      .dependencyFlags = vk::DependencyFlagBits::eByRegion,
+  };
+  if (color_format != vk::Format::eUndefined) {
+    dependency.srcStageMask |=
+        vk::PipelineStageFlagBits::eColorAttachmentOutput;
+    dependency.dstStageMask |=
+        vk::PipelineStageFlagBits::eColorAttachmentOutput;
+    dependency.srcAccessMask |= vk::AccessFlagBits::eColorAttachmentWrite;
+  }
+  if (depth_format != vk::Format::eUndefined) {
+    dependency.srcStageMask |=
+        vk::PipelineStageFlagBits::eColorAttachmentOutput;
+    dependency.dstStageMask |=
+        vk::PipelineStageFlagBits::eColorAttachmentOutput;
+    dependency.srcAccessMask |= vk::AccessFlagBits::eColorAttachmentWrite;
+  }
+
+  vk::SubpassDescription subpass = {
+      .pipelineBindPoint = vk::PipelineBindPoint::eGraphics,
+      .inputAttachmentCount = 0,
+      .pInputAttachments = nullptr,
+      .colorAttachmentCount = 0,
+      .pColorAttachments = nullptr,
+      .pResolveAttachments = nullptr,
+      .pDepthStencilAttachment = nullptr,
+      .pPreserveAttachments = nullptr,
+  };
+  if (color_format != vk::Format::eUndefined) {
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &*color_attachment_reference;
+  }
+  if (depth_format != vk::Format::eUndefined) {
+    subpass.pDepthStencilAttachment = &*depth_attachment_reference;
+  }
+
+  const vk::RenderPassCreateInfo renderpass_create_info = {
+      .attachmentCount = static_cast<uint32_t>(attachments.size()),
+      .pAttachments = attachments.data(),
+      .subpassCount = 1,
+      .pSubpasses = &subpass,
+      .dependencyCount = 1,
+      .pDependencies = &dependency,
+  };
+  auto renderpass = VK_EXPECT(
+      vk::raii::RenderPass::create(vk_device, renderpass_create_info));
+
+  std::vector<vk::ImageView> frammebuffer_attachments;
+  if (color_attachment) {
+    frammebuffer_attachments.push_back(*color_attachment->image_view);
+  }
+  if (depth_attachment) {
+    frammebuffer_attachments.push_back(*depth_attachment->image_view);
+  }
+  const vk::FramebufferCreateInfo framebuffer_create_info = {
+      .renderPass = *renderpass,
+      .attachmentCount = static_cast<uint32_t>(frammebuffer_attachments.size()),
+      .pAttachments = frammebuffer_attachments.data(),
+      .width = width,
+      .height = height,
+      .layers = 1,
+  };
+  auto framebuffer = VK_EXPECT(
+      vk::raii::Framebuffer::create(vk_device, framebuffer_create_info));
+
+  return Vk::FramebufferWithAttachments{
+      .color_attachment = std::move(color_attachment),
+      .depth_attachment = std::move(depth_attachment),
+      .renderpass = std::move(renderpass),
+      .framebuffer = std::move(framebuffer),
+  };
+}
+
+vk::Result Vk::DoCommandsImmediate(
+    const std::function<vk::Result(vk::raii::CommandBuffer&)>& func,
+    const std::vector<vk::raii::Semaphore>& semaphores_wait,
+    const std::vector<vk::raii::Semaphore>& semaphores_signal) {
+  const vk::CommandBufferAllocateInfo command_buffer_allocate_info = {
+      .level = vk::CommandBufferLevel::ePrimary,
+      .commandPool = *vk_command_pool_,
+      .commandBufferCount = 1,
+  };
+  auto command_buffers = VK_EXPECT_RESULT(vk::raii::CommandBuffers::create(
+      vk_device, command_buffer_allocate_info));
+  auto command_buffer = std::move(command_buffers[0]);
+
+  const vk::CommandBufferBeginInfo command_buffer_begin_info = {
+      .flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit,
+  };
+  command_buffer.begin(command_buffer_begin_info);
+
+  VK_RETURN_IF_NOT_SUCCESS(func(command_buffer));
+
+  command_buffer.end();
+
+  std::vector<vk::CommandBuffer> command_buffer_handles;
+  command_buffer_handles.push_back(*command_buffer);
+
+  std::vector<vk::Semaphore> semaphores_handles_wait;
+  semaphores_handles_wait.reserve(semaphores_wait.size());
+  for (const auto& s : semaphores_wait) {
+    semaphores_handles_wait.emplace_back(*s);
+  }
+
+  std::vector<vk::Semaphore> semaphores_handles_signal;
+  semaphores_handles_signal.reserve(semaphores_signal.size());
+  for (const auto& s : semaphores_signal) {
+    semaphores_handles_signal.emplace_back(*s);
+  }
+
+  vk::SubmitInfo submit_info = {
+      .commandBufferCount =
+          static_cast<uint32_t>(command_buffer_handles.size()),
+      .pCommandBuffers = command_buffer_handles.data(),
+  };
+  if (!semaphores_handles_wait.empty()) {
+    submit_info.waitSemaphoreCount =
+        static_cast<uint32_t>(semaphores_handles_wait.size());
+    submit_info.pWaitSemaphores = semaphores_handles_wait.data();
+  }
+  if (!semaphores_handles_signal.empty()) {
+    submit_info.signalSemaphoreCount =
+        static_cast<uint32_t>(semaphores_handles_signal.size());
+    submit_info.pSignalSemaphores = semaphores_handles_signal.data();
+  }
+  vk_queue.submit(submit_info);
+  vk_queue.waitIdle();
+
+  return vk::Result::eSuccess;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/graphics_detector/vk.h b/host/libs/graphics_detector/vk.h
new file mode 100644
index 0000000..8007c7b
--- /dev/null
+++ b/host/libs/graphics_detector/vk.h
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <optional>
+#include <string>
+#include <vector>
+
+#define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+#define VULKAN_HPP_NO_CONSTRUCTORS
+#define VULKAN_HPP_NO_EXCEPTIONS
+#include <vulkan/vulkan_raii.hpp>
+#include <vulkan/vulkan_to_string.hpp>
+
+namespace cuttlefish {
+
+// For a function:
+//
+//   android::base::expected<vk::Type, vk::Result> Foo();
+//
+// simplifies
+//
+//   auto obj_expect = Foo();
+//   if (!obj_expect.ok()) {
+//     return expect.error();
+//   }
+//   auto obj = std::move(obj.value());
+//
+// to
+//
+//   auto obj = VK_EXPECT(Foo());
+#define VK_EXPECT(x)                                    \
+  ({                                                    \
+    auto vk_expect_android_base_expected = (x);         \
+    if (!vk_expect_android_base_expected.ok()) {        \
+      return android::base::unexpected(                 \
+          vk_expect_android_base_expected.error());     \
+    };                                                  \
+    std::move(vk_expect_android_base_expected.value()); \
+  })
+
+#define VK_EXPECT_RESULT(x)                             \
+  ({                                                    \
+    auto vk_expect_android_base_expected = (x);         \
+    if (!vk_expect_android_base_expected.ok()) {        \
+      return vk_expect_android_base_expected.error();   \
+    };                                                  \
+    std::move(vk_expect_android_base_expected.value()); \
+  })
+
+#define VK_RETURN_IF_NOT_SUCCESS(x)                    \
+  do {                                                 \
+    vk::Result result = (x);                           \
+    if (result != vk::Result::eSuccess) return result; \
+  } while (0);
+
+#define VK_RETURN_UNEXPECTED_IF_NOT_SUCCESS(x)  \
+  do {                                          \
+    vk::Result result = (x);                    \
+    if (result != vk::Result::eSuccess) {       \
+      return android::base::unexpected(result); \
+    }                                           \
+  } while (0);
+
+#define VK_ASSERT(x)                                                          \
+  do {                                                                        \
+    if (vk::Result result = (x); result != vk::Result::eSuccess) {            \
+      LOG(FATAL) << __FILE__ << ":" << __LINE__ << ":" << __PRETTY_FUNCTION__ \
+                 << ": " << #x << " returned " << to_string(result);          \
+    }                                                                         \
+  } while (0);
+
+template <typename VkType>
+using VkExpected = android::base::expected<VkType, vk::Result>;
+
+class Vk {
+ public:
+  static std::optional<Vk> Load(
+      const std::vector<std::string>& instance_extensions = {},
+      const std::vector<std::string>& instance_layers = {},
+      const std::vector<std::string>& device_extensions = {});
+
+  Vk(const Vk&) = delete;
+  Vk& operator=(const Vk&) = delete;
+
+  Vk(Vk&&) = default;
+  Vk& operator=(Vk&&) = default;
+
+  // Note: order is important for destruction.
+ private:
+  static VkExpected<Vk> LoadImpl(
+      const std::vector<std::string>& instance_extensions = {},
+      const std::vector<std::string>& instance_layers = {},
+      const std::vector<std::string>& device_extensions = {});
+
+  vk::DynamicLoader vk_loader_;
+  vk::raii::Context vk_context_;
+
+ public:
+  vk::raii::Instance vk_instance;
+
+ private:
+  std::optional<vk::raii::DebugUtilsMessengerEXT> vk_debug_messenger_;
+
+ public:
+  vk::raii::PhysicalDevice vk_physical_device;
+  vk::raii::Device vk_device;
+  vk::raii::Queue vk_queue;
+  uint32_t vk_queue_family_index;
+
+ private:
+  vk::raii::CommandPool vk_command_pool_;
+  static constexpr const VkDeviceSize kStagingBufferSize = 32 * 1024 * 1024;
+  vk::raii::Buffer vk_staging_buffer_;
+  vk::raii::DeviceMemory vk_staging_buffer_memory_;
+
+ public:
+  struct BufferWithMemory {
+    vk::raii::Buffer buffer;
+    vk::raii::DeviceMemory buffer_memory;
+  };
+
+  VkExpected<BufferWithMemory> CreateBuffer(
+      vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
+      vk::MemoryPropertyFlags buffer_memory_properties);
+
+  VkExpected<BufferWithMemory> CreateBufferWithData(
+      vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
+      vk::MemoryPropertyFlags buffer_memory_properties,
+      const uint8_t* buffer_data);
+
+  vk::Result DoCommandsImmediate(
+      const std::function<vk::Result(vk::raii::CommandBuffer&)>& func,
+      const std::vector<vk::raii::Semaphore>& semaphores_wait = {},
+      const std::vector<vk::raii::Semaphore>& semaphores_signal = {});
+
+  struct ImageWithMemory {
+    vk::raii::DeviceMemory image_memory;
+    vk::raii::Image image;
+    vk::raii::ImageView image_view;
+  };
+  VkExpected<ImageWithMemory> CreateImage(
+      uint32_t width, uint32_t height, vk::Format format,
+      vk::ImageUsageFlags usages, vk::MemoryPropertyFlags memory_properties,
+      vk::ImageLayout returned_layout);
+
+  vk::Result DownloadImage(uint32_t width, uint32_t height,
+                           const vk::raii::Image& image,
+                           vk::ImageLayout current_layout,
+                           vk::ImageLayout returned_layout,
+                           std::vector<uint8_t>* out_pixels);
+
+  struct YuvImageWithMemory {
+    vk::raii::SamplerYcbcrConversion image_sampler_conversion;
+    vk::raii::Sampler image_sampler;
+    vk::raii::DeviceMemory image_memory;
+    vk::raii::Image image;
+    vk::raii::ImageView image_view;
+  };
+
+  VkExpected<YuvImageWithMemory> CreateYuvImage(
+      uint32_t width, uint32_t height, vk::ImageUsageFlags usages,
+      vk::MemoryPropertyFlags memory_properties,
+      vk::ImageLayout returned_layout);
+
+  vk::Result LoadYuvImage(const vk::raii::Image& image, uint32_t width,
+                          uint32_t height,
+                          const std::vector<uint8_t>& image_data_y,
+                          const std::vector<uint8_t>& image_data_u,
+                          const std::vector<uint8_t>& image_data_v,
+                          vk::ImageLayout current_layout,
+                          vk::ImageLayout returned_layout);
+
+  struct FramebufferWithAttachments {
+    std::optional<ImageWithMemory> color_attachment;
+    std::optional<ImageWithMemory> depth_attachment;
+    vk::raii::RenderPass renderpass;
+    vk::raii::Framebuffer framebuffer;
+  };
+  VkExpected<FramebufferWithAttachments> CreateFramebuffer(
+      uint32_t width, uint32_t height,
+      vk::Format color_attachment_format = vk::Format::eUndefined,
+      vk::Format depth_attachment_format = vk::Format::eUndefined);
+
+ private:
+  Vk(vk::DynamicLoader loader, vk::raii::Context context,
+     vk::raii::Instance instance,
+     std::optional<vk::raii::DebugUtilsMessengerEXT> debug,
+     vk::raii::PhysicalDevice physical_device, vk::raii::Device device,
+     vk::raii::Queue queue, uint32_t queue_family_index,
+     vk::raii::CommandPool command_pool, vk::raii::Buffer staging_buffer,
+     vk::raii::DeviceMemory staging_buffer_memory)
+      : vk_loader_(std::move(loader)),
+        vk_context_(std::move(context)),
+        vk_instance(std::move(instance)),
+        vk_debug_messenger_(std::move(debug)),
+        vk_physical_device(std::move(physical_device)),
+        vk_device(std::move(device)),
+        vk_queue(std::move(queue)),
+        vk_queue_family_index(queue_family_index),
+        vk_command_pool_(std::move(command_pool)),
+        vk_staging_buffer_(std::move(staging_buffer)),
+        vk_staging_buffer_memory_(std::move(staging_buffer_memory)) {}
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/image_aggregator/Android.bp b/host/libs/image_aggregator/Android.bp
index 23d4874..2441889 100644
--- a/host/libs/image_aggregator/Android.bp
+++ b/host/libs/image_aggregator/Android.bp
@@ -36,17 +36,20 @@
     name: "libimage_aggregator",
     srcs: [
         "image_aggregator.cc",
+        "sparse_image_utils.cc",
     ],
     export_include_dirs: ["."],
     shared_libs: [
         "libcuttlefish_fs",
         "libcuttlefish_utils",
         "libbase",
+        "libjsoncpp",
         "libprotobuf-cpp-lite",
         "libz",
     ],
     static_libs: [
         "libcdisk_spec",
+        "libcuttlefish_host_config",
         "libext2_uuid",
         "libsparse",
     ],
diff --git a/host/libs/image_aggregator/cdisk_spec.proto b/host/libs/image_aggregator/cdisk_spec.proto
index 771c7ce..9b31a0d 100644
--- a/host/libs/image_aggregator/cdisk_spec.proto
+++ b/host/libs/image_aggregator/cdisk_spec.proto
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 syntax = "proto3";
 
 enum ReadWriteCapability {
diff --git a/host/libs/image_aggregator/image_aggregator.cc b/host/libs/image_aggregator/image_aggregator.cc
index 49ec27e..e2e7e66 100644
--- a/host/libs/image_aggregator/image_aggregator.cc
+++ b/host/libs/image_aggregator/image_aggregator.cc
@@ -45,6 +45,7 @@
 #include "common/libs/utils/size_utils.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/libs/config/mbr.h"
+#include "host/libs/image_aggregator/sparse_image_utils.h"
 
 namespace cuttlefish {
 namespace {
@@ -452,37 +453,9 @@
  */
 void DeAndroidSparse(const std::vector<ImagePartition>& partitions) {
   for (const auto& partition : partitions) {
-    auto fd = open(partition.image_file_path.c_str(), O_RDONLY);
-    if (fd < 0) {
-      PLOG(FATAL) << "Could not open \"" << partition.image_file_path;
-      break;
+    if (!ConvertToRawImage(partition.image_file_path)) {
+      LOG(DEBUG) << "Failed to desparse " << partition.image_file_path;
     }
-    auto sparse = sparse_file_import(fd, /* verbose */ false, /* crc */ false);
-    if (!sparse) {
-      close(fd);
-      continue;
-    }
-    LOG(INFO) << "Desparsing " << partition.image_file_path;
-    std::string out_file_name = partition.image_file_path + ".desparse";
-    auto write_fd = open(out_file_name.c_str(), O_RDWR | O_CREAT | O_TRUNC,
-                         S_IRUSR | S_IWUSR | S_IRGRP);
-    if (write_fd < 0) {
-      PLOG(FATAL) << "Could not open " << out_file_name;
-    }
-    int write_status = sparse_file_write(sparse, write_fd, /* gz */ false,
-                                         /* sparse */ false, /* crc */ false);
-    if (write_status < 0) {
-      LOG(FATAL) << "Failed to desparse \"" << partition.image_file_path
-                 << "\": " << write_status;
-    }
-    close(write_fd);
-    if (rename(out_file_name.c_str(), partition.image_file_path.c_str()) < 0) {
-      int error_num = errno;
-      LOG(FATAL) << "Could not move \"" << out_file_name << "\" to \""
-                 << partition.image_file_path << "\": " << strerror(error_num);
-    }
-    sparse_file_destroy(sparse);
-    close(fd);
   }
 }
 
@@ -531,6 +504,7 @@
                          const std::string& header_file,
                          const std::string& footer_file,
                          const std::string& output_composite_path) {
+  DeAndroidSparse(partitions);
   std::vector<MultipleImagePartition> multiple_image_partitions;
   for (const auto& partition : partitions) {
     multiple_image_partitions.push_back(ToMultipleImagePartition(partition));
@@ -571,7 +545,8 @@
                        const std::string& output_overlay_path) {
   Command cmd(crosvm_path);
   cmd.AddParameter("create_qcow2");
-  cmd.AddParameter("--backing_file=", backing_file);
+  cmd.AddParameter("--backing-file");
+  cmd.AddParameter(backing_file);
   cmd.AddParameter(output_overlay_path);
 
   std::string stdout_str;
@@ -581,7 +556,7 @@
 
   if (success != 0) {
     LOG(ERROR) << "Failed to run `" << crosvm_path
-               << " create_qcow2 --backing_file=" << backing_file << " "
+               << " create_qcow2 --backing-file " << backing_file << " "
                << output_overlay_path << "`";
     LOG(ERROR) << "stdout:\n###\n" << stdout_str << "\n###";
     LOG(ERROR) << "stderr:\n###\n" << stderr_str << "\n###";
diff --git a/host/libs/image_aggregator/sparse_image_utils.cc b/host/libs/image_aggregator/sparse_image_utils.cc
new file mode 100644
index 0000000..7cd1187
--- /dev/null
+++ b/host/libs/image_aggregator/sparse_image_utils.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "host/libs/image_aggregator/sparse_image_utils.h"
+
+#include <string.h>
+
+#include <fstream>
+
+#include <android-base/logging.h>
+
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/config/cuttlefish_config.h"
+
+const char ANDROID_SPARSE_IMAGE_MAGIC[] = "\x3A\xFF\x26\xED";
+namespace cuttlefish {
+
+bool IsSparseImage(const std::string& image_path) {
+  std::ifstream file(image_path, std::ios::binary);
+  if (!file) {
+    LOG(FATAL) << "Could not open " << image_path;
+    return false;
+  }
+  char buffer[5] = {0};
+  file.read(buffer, 4);
+  file.close();
+  return strcmp(ANDROID_SPARSE_IMAGE_MAGIC, buffer) == 0;
+}
+
+bool ConvertToRawImage(const std::string& image_path) {
+  if (!IsSparseImage(image_path)) {
+    LOG(DEBUG) << "Skip non-sparse image " << image_path;
+    return false;
+  }
+
+  auto simg2img_path = HostBinaryPath("simg2img");
+  Command simg2img_cmd(simg2img_path);
+  std::string tmp_raw_image_path = image_path + ".raw";
+  simg2img_cmd.AddParameter(image_path);
+  simg2img_cmd.AddParameter(tmp_raw_image_path);
+
+  // Use simg2img to convert sparse image to raw image.
+  int success = simg2img_cmd.Start().Wait();
+  if (success != 0) {
+    LOG(FATAL) << "Unable to convert Android sparse image " << image_path
+               << " to raw image. " << success;
+    return false;
+  }
+
+  // Replace the original sparse image with the raw image.
+  Command mv_cmd("/bin/mv");
+  mv_cmd.AddParameter("-f");
+  mv_cmd.AddParameter(tmp_raw_image_path);
+  mv_cmd.AddParameter(image_path);
+  success = mv_cmd.Start().Wait();
+  if (success != 0) {
+    LOG(FATAL) << "Unable to replace original sparse image " << success;
+    return false;
+  }
+
+  return true;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/image_aggregator/sparse_image_utils.h b/host/libs/image_aggregator/sparse_image_utils.h
new file mode 100644
index 0000000..bf9b335
--- /dev/null
+++ b/host/libs/image_aggregator/sparse_image_utils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string>
+
+namespace cuttlefish {
+
+bool IsSparseImage(const std::string& image_path);
+
+bool ConvertToRawImage(const std::string& image_path);
+
+}  // namespace cuttlefish
diff --git a/host/libs/location/Android.bp b/host/libs/location/Android.bp
new file mode 100644
index 0000000..0f0bb7c
--- /dev/null
+++ b/host/libs/location/Android.bp
@@ -0,0 +1,54 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "liblocation",
+    srcs: [
+        "StringParse.cpp",
+        "GpxParser.cpp",
+        "KmlParser.cpp",
+        "GnssClient.cpp",
+    ],
+    export_include_dirs: ["."],
+    shared_libs: [
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libprotobuf-cpp-full",
+        "libgrpc++_unsecure",
+        "libxml2",
+    ],
+    static_libs: [
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libcvd_gnss_grpc_proxy",
+    ],
+    cflags: [
+        "-Wno-unused-parameter",
+        "-D_XOPEN_SOURCE",
+    ],
+    defaults: ["cuttlefish_host"],
+    include_dirs: [
+        "external/grpc-grpc/include",
+        "external/protobuf/src",
+    ],
+}
\ No newline at end of file
diff --git a/host/libs/location/GnssClient.cpp b/host/libs/location/GnssClient.cpp
new file mode 100644
index 0000000..9f04f1c
--- /dev/null
+++ b/host/libs/location/GnssClient.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GnssClient.h"
+#include <android-base/logging.h>
+#include <host/libs/config/logging.h>
+#include <cassert>
+#include <string>
+
+using gnss_grpc_proxy::GnssGrpcProxy;
+using gnss_grpc_proxy::GpsCoordinates;
+using gnss_grpc_proxy::SendGpsCoordinatesReply;
+using gnss_grpc_proxy::SendGpsCoordinatesRequest;
+using grpc::ClientContext;
+
+namespace cuttlefish {
+
+GnssClient::GnssClient(const std::shared_ptr<grpc::Channel>& channel)
+    : stub_(GnssGrpcProxy::NewStub(channel)) {}
+
+Result<grpc::Status> GnssClient::SendGpsLocations(
+    int delay, const GpsFixArray& coordinates) {
+  // Data we are sending to the server.
+  SendGpsCoordinatesRequest request;
+  request.set_delay(delay);
+  for (const auto& loc : coordinates) {
+    GpsCoordinates* curr = request.add_coordinates();
+    curr->set_longitude(loc.longitude);
+    curr->set_latitude(loc.latitude);
+    curr->set_elevation(loc.elevation);
+  }
+
+  // Container for the data we expect from the server.
+  SendGpsCoordinatesReply reply;
+  // Context for the client. It could be used to convey extra information to
+  // the server and/or tweak certain RPC behaviors.
+  ClientContext context;
+  // The actual RPC.
+  grpc::Status status = stub_->SendGpsVector(&context, request, &reply);
+  // Act upon its status.
+  CF_EXPECT(status.ok(), "GPS data sending failed" << status.error_code()
+                                                   << ": "
+                                                   << status.error_message());
+
+  LOG(DEBUG) << reply.status();
+
+  return status;
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/location/GnssClient.h b/host/libs/location/GnssClient.h
new file mode 100644
index 0000000..0cda44b
--- /dev/null
+++ b/host/libs/location/GnssClient.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <grpc/grpc.h>
+#include <grpcpp/channel.h>
+#include <grpcpp/client_context.h>
+#include <grpcpp/create_channel.h>
+#include "common/libs/utils/result.h"
+#include "gnss_grpc_proxy.grpc.pb.h"
+#include "host/libs/location/GpsFix.h"
+
+namespace cuttlefish {
+class GnssClient {
+ public:
+  GnssClient(const std::shared_ptr<grpc::Channel>& channel);
+
+  Result<grpc::Status> SendGpsLocations(
+      int delay, const GpsFixArray& coordinates);
+
+ private:
+  std::unique_ptr<gnss_grpc_proxy::GnssGrpcProxy::Stub> stub_;
+};
+}  // namespace cuttlefish
diff --git a/host/libs/location/GpsFix.h b/host/libs/location/GpsFix.h
new file mode 100644
index 0000000..b7e2f72
--- /dev/null
+++ b/host/libs/location/GpsFix.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <time.h>
+
+#include <string>
+#include <vector>
+
+// A struct representing a location on a map
+struct GpsFix {
+  std::string name;
+  std::string description;
+  float latitude = 0.0;
+  float longitude = 0.0;
+  float elevation = 0.0;
+  time_t time = 0;
+
+  bool operator<(const GpsFix &other) const { return time < other.time; }
+};
+
+typedef std::vector<GpsFix> GpsFixArray;
diff --git a/host/libs/location/GpxParser.cpp b/host/libs/location/GpxParser.cpp
new file mode 100644
index 0000000..2057a0c
--- /dev/null
+++ b/host/libs/location/GpxParser.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GpxParser.h"
+#include <libxml/parser.h>
+#include <string.h>
+#include <time.h>
+#include <algorithm>
+#include "StringParse.h"
+
+using std::string;
+
+// format an error message
+template <class... Args>
+static string formatError(const char *format, Args &&...args) {
+  char buf[100] = {};
+  snprintf(buf, sizeof(buf) - 1, format, std::forward<Args>(args)...);
+  return buf;
+}
+
+static void cleanupXmlDoc(xmlDoc *doc) {
+  xmlFreeDoc(doc);
+  xmlCleanupParser();
+}
+
+static bool parseLocation(xmlNode *ptNode, xmlDoc *doc, GpsFix *result,
+                          string *error) {
+  float latitude;
+  float longitude;
+
+  xmlAttrPtr attr;
+  xmlChar *tmpStr;
+
+  // Check for and get the latitude attribute
+  attr = xmlHasProp(ptNode, (const xmlChar *)"lat");
+  if (!attr || !(tmpStr = xmlGetProp(ptNode, (const xmlChar *)"lat"))) {
+    *error = formatError("Point missing a latitude on line %d.", ptNode->line);
+    return false;  // Return error since a point *must* have a latitude
+  } else {
+    int read = SscanfWithCLocale(reinterpret_cast<const char *>(tmpStr), "%f",
+                                 &latitude);
+    xmlFree(tmpStr);  // Caller-freed
+    if (read != 1) {
+      return false;
+    }
+  }
+
+  // Check for and get the longitude attribute
+  attr = xmlHasProp(ptNode, (const xmlChar *)"lon");
+  if (!attr || !(tmpStr = xmlGetProp(ptNode, (const xmlChar *)"lon"))) {
+    *error = formatError("Point missing a longitude on line %d.", ptNode->line);
+    return false;  // Return error since a point *must* have a longitude
+  } else {
+    int read = SscanfWithCLocale(reinterpret_cast<const char *>(tmpStr), "%f",
+                                 &longitude);
+    xmlFree(tmpStr);  // Caller-freed
+    if (read != 1) {
+      return false;
+    }
+  }
+
+  // The result will be valid if this point is reached
+  result->latitude = latitude;
+  result->longitude = longitude;
+
+  // Check for potential children nodes (including time, elevation, name, and
+  // description) Note that none are actually required according to the GPX
+  // format.
+  int childCount = 0;
+  for (xmlNode *field = ptNode->children; field; field = field->next) {
+    tmpStr = nullptr;
+
+    if (!strcmp((const char *)field->name, "time")) {
+      if ((tmpStr = xmlNodeListGetString(doc, field->children, 1))) {
+        // Convert to a number
+        struct tm time = {};
+        time.tm_isdst = -1;
+        int results = sscanf((const char *)tmpStr, "%u-%u-%uT%u:%u:%u",
+                             &time.tm_year, &time.tm_mon, &time.tm_mday,
+                             &time.tm_hour, &time.tm_min, &time.tm_sec);
+        if (results != 6) {
+          *error = formatError(
+              "Improperly formatted time on line %d.<br/>"
+              "Times must be in ISO format.",
+              ptNode->line);
+          return false;
+        }
+
+        // Correct according to the struct tm specification
+        time.tm_year -= 1900;  // Years since 1900
+        time.tm_mon -= 1;      // Months since January, 0-11
+
+        result->time = mktime(&time);
+        xmlFree(tmpStr);  // Caller-freed
+        childCount++;
+      }
+    } else if (!strcmp((const char *)field->name, "ele")) {
+      if ((tmpStr = xmlNodeListGetString(doc, field->children, 1))) {
+        int read = SscanfWithCLocale(reinterpret_cast<const char *>(tmpStr),
+                                     "%f", &result->elevation);
+        xmlFree(tmpStr);  // Caller-freed
+        if (read != 1) {
+          return false;
+        }
+        childCount++;
+      }
+    } else if (!strcmp((const char *)field->name, "name")) {
+      if ((tmpStr = xmlNodeListGetString(doc, field->children, 1))) {
+        result->name = reinterpret_cast<const char *>(tmpStr);
+        xmlFree(tmpStr);  // Caller-freed
+        childCount++;
+      }
+    } else if (!strcmp((const char *)field->name, "desc")) {
+      if ((tmpStr = xmlNodeListGetString(doc, field->children, 1))) {
+        result->description = reinterpret_cast<const char *>(tmpStr);
+        xmlFree(tmpStr);  // Caller-freed
+        childCount++;
+      }
+    }
+
+    // We only care about 4 potential child fields, so quit after finding those
+    if (childCount == 4) {
+      break;
+    }
+  }
+
+  return true;
+}
+
+static bool parse(xmlDoc *doc, GpsFixArray *fixes, string *error) {
+  xmlNode *root = xmlDocGetRootElement(doc);
+  GpsFix location;
+  bool isOk;
+
+  for (xmlNode *child = root->children; child; child = child->next) {
+    // Individual <wpt> elements are parsed on their own
+    if (!strcmp((const char *)child->name, "wpt")) {
+      isOk = parseLocation(child, doc, &location, error);
+      if (!isOk) {
+        cleanupXmlDoc(doc);
+        return false;
+      }
+      fixes->push_back(location);
+    }
+
+    // <rte> elements require an additional depth of parsing
+    else if (!strcmp((const char *)child->name, "rte")) {
+      for (xmlNode *rtept = child->children; rtept; rtept = rtept->next) {
+        // <rtept> elements are parsed just like <wpt> elements
+        if (!strcmp((const char *)rtept->name, "rtept")) {
+          isOk = parseLocation(rtept, doc, &location, error);
+          if (!isOk) {
+            cleanupXmlDoc(doc);
+            return false;
+          }
+          fixes->push_back(location);
+        }
+      }
+    }
+
+    // <trk> elements require two additional depths of parsing
+    else if (!strcmp((const char *)child->name, "trk")) {
+      for (xmlNode *trkseg = child->children; trkseg; trkseg = trkseg->next) {
+        // Skip non <trkseg> elements
+        if (!strcmp((const char *)trkseg->name, "trkseg")) {
+          // <trkseg> elements an additional depth of parsing
+          for (xmlNode *trkpt = trkseg->children; trkpt; trkpt = trkpt->next) {
+            // <trkpt> elements are parsed just like <wpt> elements
+            if (!strcmp((const char *)trkpt->name, "trkpt")) {
+              isOk = parseLocation(trkpt, doc, &location, error);
+              if (!isOk) {
+                cleanupXmlDoc(doc);
+                return false;
+              }
+              fixes->push_back(location);
+            }
+          }
+        }
+      }
+    }
+  }
+
+  // Sort the values by timestamp
+  std::sort(fixes->begin(), fixes->end());
+
+  cleanupXmlDoc(doc);
+  return true;
+}
+
+bool GpxParser::parseFile(const char *filePath, GpsFixArray *fixes,
+                          string *error) {
+  xmlDocPtr doc = xmlReadFile(filePath, nullptr, 0);
+  if (doc == nullptr) {
+    cleanupXmlDoc(doc);
+    *error = "GPX document not parsed successfully.";
+    return false;
+  }
+  return parse(doc, fixes, error);
+}
+
+bool GpxParser::parseString(const char *str, int len, GpsFixArray *fixes,
+                            string *error) {
+  xmlDocPtr doc = xmlReadMemory(str, len, NULL, NULL, 0);
+  if (doc == nullptr) {
+    cleanupXmlDoc(doc);
+    *error = "GPX document not parsed successfully.";
+    return false;
+  }
+  return parse(doc, fixes, error);
+}
\ No newline at end of file
diff --git a/host/libs/location/GpxParser.h b/host/libs/location/GpxParser.h
new file mode 100644
index 0000000..a2714a7
--- /dev/null
+++ b/host/libs/location/GpxParser.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "GpsFix.h"
+
+class GpxParser {
+ public:
+  /* Parses a given .gpx file at |filePath| and extracts all contained GPS
+   * fixes into |*fixes|.
+   *
+   * Returns true on success, false otherwise. If false is returned, |*error|
+   * is set to a string describing the error.
+   */
+  static bool parseFile(const char *filePath, GpsFixArray *fixes,
+                        std::string *error);
+
+  static bool parseString(const char *str, int len, GpsFixArray *fixes,
+                          std::string *error);
+};
\ No newline at end of file
diff --git a/host/libs/location/KmlParser.cpp b/host/libs/location/KmlParser.cpp
new file mode 100644
index 0000000..9f21e2c
--- /dev/null
+++ b/host/libs/location/KmlParser.cpp
@@ -0,0 +1,213 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KmlParser.h"
+#include <libxml/parser.h>
+#include <string.h>
+#include <unistd.h>
+#include <string>
+#include <utility>
+#include "StringParse.h"
+using std::string;
+
+// Coordinates can be nested arbitrarily deep within a Placemark, depending
+// on the type of object (Point, LineString, Polygon) the Placemark contains
+static xmlNode* findCoordinates(xmlNode* current) {
+  for (; current != nullptr; current = current->next) {
+    if (!strcmp((const char*)current->name, "coordinates")) {
+      return current;
+    }
+    xmlNode* children = findCoordinates(current->xmlChildrenNode);
+    if (children != nullptr) {
+      return children;
+    }
+  }
+  return nullptr;
+}
+
+// Coordinates have the following format:
+//        <coordinates> -112.265654928602,36.09447672602546,2357
+//                ...
+//                -112.2657374587321,36.08646312301303,2357
+//        </coordinates>
+// often entirely contained in a single string, necessitating regex
+static bool parseCoordinates(xmlNode* current, GpsFixArray* fixes) {
+  xmlNode* coordinates_node = findCoordinates(current);
+  bool result = true;
+  if (coordinates_node == nullptr ||
+      coordinates_node->xmlChildrenNode == nullptr ||
+      coordinates_node->xmlChildrenNode->content == nullptr) {
+    return false;
+  }
+
+  const char* coordinates =
+      (const char*)(coordinates_node->xmlChildrenNode->content);
+  int coordinates_len = strlen(coordinates);
+  int offset = 0, n = 0;
+  GpsFix new_fix;
+  while (3 == SscanfWithCLocale(coordinates + offset, "%f , %f , %f%n",
+                                &new_fix.longitude, &new_fix.latitude,
+                                &new_fix.elevation, &n)) {
+    fixes->push_back(new_fix);
+    offset += n;
+  }
+
+  // Only allow whitespace at the end of the string to remain unconsumed.
+  for (int i = offset; i < coordinates_len && result; ++i) {
+    result = isspace(coordinates[i]);
+  }
+
+  return result;
+}
+
+static bool parseGxTrack(xmlNode* children, GpsFixArray* fixes) {
+  bool result = true;
+  for (xmlNode* current = children; result && current != nullptr;
+       current = current->next) {
+    if (current->ns && current->ns->prefix &&
+        !strcmp((const char*)current->ns->prefix, "gx") &&
+        !strcmp((const char*)current->name, "coord")) {
+      std::string coordinates{(const char*)current->xmlChildrenNode->content};
+      GpsFix new_fix;
+      result = (3 == SscanfWithCLocale(coordinates.c_str(), "%f %f %f",
+                                       &new_fix.longitude, &new_fix.latitude,
+                                       &new_fix.elevation));
+      fixes->push_back(new_fix);
+    }
+  }
+  return result;
+}
+
+static bool parsePlacemark(xmlNode* current, GpsFixArray* fixes) {
+  string description;
+  string name;
+  size_t ind = string::npos;
+  // not worried about case-sensitivity since .kml files
+  // are expected to be machine-generated
+  for (; current != nullptr; current = current->next) {
+    const bool hasContent =
+        current->xmlChildrenNode && current->xmlChildrenNode->content;
+
+    if (hasContent && !strcmp((const char*)current->name, "description")) {
+      description = (const char*)current->xmlChildrenNode->content;
+    } else if (hasContent && !strcmp((const char*)current->name, "name")) {
+      name = (const char*)current->xmlChildrenNode->content;
+    } else if (!strcmp((const char*)current->name, "Point") ||
+               !strcmp((const char*)current->name, "LineString") ||
+               !strcmp((const char*)current->name, "Polygon")) {
+      ind = (ind != string::npos ? ind : fixes->size());
+      if (!parseCoordinates(current->xmlChildrenNode, fixes)) {
+        return false;
+      }
+    } else if (current->ns && current->ns->prefix &&
+               !strcmp((const char*)current->ns->prefix, "gx") &&
+               !strcmp((const char*)current->name, "Track")) {
+      ind = (ind != string::npos ? ind : fixes->size());
+      if (!parseGxTrack(current->xmlChildrenNode, fixes)) {
+        return false;
+      }
+    }
+  }
+
+  if (ind == string::npos || ind >= fixes->size()) {
+    return false;
+  }
+
+  // only assign name and description to the first of the
+  // points to avoid needless repetition
+  (*fixes)[ind].description = std::move(description);
+  (*fixes)[ind].name = std::move(name);
+
+  return true;
+}
+
+// Placemarks (aka locations) can be nested arbitrarily deep
+static bool traverseSubtree(xmlNode* current, GpsFixArray* fixes,
+                            string* error) {
+  for (; current; current = current->next) {
+    if (current->name != nullptr &&
+        !strcmp((const char*)current->name, "Placemark")) {
+      if (!parsePlacemark(current->xmlChildrenNode, fixes)) {
+        *error = "Location found with missing or malformed coordinates";
+        return false;
+      }
+    } else if (current->name != nullptr &&
+               strcmp((const char*)current->name, "text") != 0) {
+      // if it's not a Placemark we must go deeper
+      if (!traverseSubtree(current->xmlChildrenNode, fixes, error)) {
+        return false;
+      }
+    }
+  }
+  error->clear();
+  return true;
+}
+
+bool KmlParser::parseFile(const char* filePath, GpsFixArray* fixes,
+                          string* error) {
+  // This initializes the library and checks potential ABI mismatches between
+  // the version it was compiled for and the actual shared library used.
+  LIBXML_TEST_VERSION
+
+  xmlDocPtr doc = xmlReadFile(filePath, nullptr, 0);
+  if (doc == nullptr) {
+    *error = "KML document not parsed successfully.";
+    xmlFreeDoc(doc);
+    return false;
+  }
+
+  xmlNodePtr cur = xmlDocGetRootElement(doc);
+  if (cur == nullptr) {
+    *error = "Could not get root element of parsed KML file.";
+    xmlFreeDoc(doc);
+    xmlCleanupParser();
+    return false;
+  }
+  bool isWellFormed = traverseSubtree(cur, fixes, error);
+
+  xmlFreeDoc(doc);
+  xmlCleanupParser();
+
+  return isWellFormed;
+}
+
+bool KmlParser::parseString(const char* str, int len, GpsFixArray* fixes,
+                            string* error) {
+  // This initializes the library and checks potential ABI mismatches between
+  // the version it was compiled for and the actual shared library used.
+  LIBXML_TEST_VERSION
+
+  xmlDocPtr doc = xmlReadMemory(str, len, NULL, NULL, 0);
+  if (doc == nullptr) {
+    *error = "KML document not parsed successfully.";
+    xmlFreeDoc(doc);
+    return false;
+  }
+
+  xmlNodePtr cur = xmlDocGetRootElement(doc);
+  if (cur == nullptr) {
+    *error = "Could not get root element of parsed KML file.";
+    xmlFreeDoc(doc);
+    xmlCleanupParser();
+    return false;
+  }
+  bool isWellFormed = traverseSubtree(cur, fixes, error);
+
+  xmlFreeDoc(doc);
+  xmlCleanupParser();
+
+  return isWellFormed;
+}
diff --git a/host/libs/location/KmlParser.h b/host/libs/location/KmlParser.h
new file mode 100644
index 0000000..cb627de
--- /dev/null
+++ b/host/libs/location/KmlParser.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "GpsFix.h"
+
+#include <string>
+
+class KmlParser {
+ public:
+  // Parses a given .kml file at |filePath| and extracts all contained GPS
+  // fixes into |*fixes|.
+  // Returns true on success, false otherwise. if false is returned, |*error|
+  // is set to a message describing the error.
+  static bool parseFile(const char* filePath, GpsFixArray* fixes,
+                        std::string* error);
+  static bool parseString(const char* str, int len, GpsFixArray* fixes,
+                          std::string* error);
+};
\ No newline at end of file
diff --git a/host/libs/location/StringParse.cpp b/host/libs/location/StringParse.cpp
new file mode 100644
index 0000000..9dd79b2
--- /dev/null
+++ b/host/libs/location/StringParse.cpp
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "StringParse.h"
+
+#include <assert.h>
+#include <locale.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+namespace cuttlefish {
+namespace {
+
+extern "C" int SscanfWithCLocale(const char* string, const char* format, ...) {
+  va_list args;
+  va_start(args, format);
+  const int res = ::vsscanf(string, format, args);
+  va_end(args);
+  return res;
+}
+}  // namespace
+}  // namespace cuttlefish
\ No newline at end of file
diff --git a/host/libs/location/StringParse.h b/host/libs/location/StringParse.h
new file mode 100644
index 0000000..a6e821a
--- /dev/null
+++ b/host/libs/location/StringParse.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2015-2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <stdarg.h>
+
+//
+// This file defines C and C++ replacements for scanf to parse a string in a
+// locale-independent way. This is useful when parsing input data that comes
+// not from user, but from some kind of a fixed protocol with predefined locale
+// settings.
+// Just use these functions as drop-in replacements of sscanf();
+//
+// Note1: if the input string contains any dot characters other than decimal
+// separators, the results of parsing will be screwed: in Windows the
+// implementation replaces all dots with the current decimal separator to parse
+// using current locale.
+// Note2: current implementation only supports parsing floating point numbers -
+// no code for monetary values, dates, digit grouping etc.
+// The limitation is because of MinGW's lack of per-thread locales support.
+//
+
+#ifdef __cplusplus
+#include <utility>
+
+extern "C" {
+int SscanfWithCLocale(const char* string, const char* format, ...);
+}
+namespace cuttlefish {
+
+template <class... Args>
+int SscanfWithCLocale(const char* string, const char* format, Args... args) {
+  return ::SscanfWithCLocale(string, format, std::forward<Args>(args)...);
+}
+
+}  // namespace cuttlefish
+
+#endif  // __cplusplus
diff --git a/host/libs/metrics/Android.bp b/host/libs/metrics/Android.bp
new file mode 100644
index 0000000..6c386ce
--- /dev/null
+++ b/host/libs/metrics/Android.bp
@@ -0,0 +1,38 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_static {
+    name: "libcuttlefish_metrics",
+    srcs: [
+        "metrics_receiver.cc",
+    ],
+    shared_libs: [
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libjsoncpp",
+        "libbase",
+        "libfruit",
+    ],
+    static_libs: [
+        "libcuttlefish_msg_queue",
+        "libcuttlefish_host_config",
+        "libgflags",
+    ],
+    defaults: ["cuttlefish_host"],
+}
diff --git a/host/libs/metrics/metrics_receiver.cc b/host/libs/metrics/metrics_receiver.cc
new file mode 100644
index 0000000..0004843
--- /dev/null
+++ b/host/libs/metrics/metrics_receiver.cc
@@ -0,0 +1,66 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "metrics_receiver.h"
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <gflags/gflags.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/ipc.h>
+#include <sys/msg.h>
+#include <fstream>
+#include <iostream>
+#include <memory>
+
+#include "common/libs/utils/tee_logging.h"
+#include "host/commands/metrics/metrics_defs.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/msg_queue/msg_queue.h"
+
+using cuttlefish::MetricsExitCodes;
+
+namespace cuttlefish {
+
+MetricsReceiver::MetricsReceiver() {}
+
+MetricsReceiver::~MetricsReceiver() {}
+
+void MetricsReceiver::SendHelper(const std::string &message) {
+  auto msg_queue = SysVMessageQueue::Create("cuttlefish_ipc", 'a', false);
+  if (msg_queue == NULL) {
+    LOG(FATAL) << "Create: failed to create cuttlefish_ipc";
+  }
+
+  struct msg_buffer msg;
+  msg.mesg_type = 1;
+  strcpy(msg.mesg_text, message.c_str());
+  int rc = msg_queue->Send(&msg, message.length() + 1, true);
+  if (rc == -1) {
+    LOG(FATAL) << "Send: failed to send message to msg_queue";
+  }
+}
+
+void MetricsReceiver::LogMetricsVMStart() { SendHelper("VMStart"); }
+
+void MetricsReceiver::LogMetricsVMStop() { SendHelper("VMStop"); }
+
+void MetricsReceiver::LogMetricsDeviceBoot() { SendHelper("DeviceBoot"); }
+
+void MetricsReceiver::LogMetricsLockScreen() { SendHelper("LockScreen"); }
+}  // namespace cuttlefish
diff --git a/host/libs/metrics/metrics_receiver.h b/host/libs/metrics/metrics_receiver.h
new file mode 100644
index 0000000..5fa0bbe
--- /dev/null
+++ b/host/libs/metrics/metrics_receiver.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+namespace cuttlefish {
+
+const uint32_t MAX_MSG_SIZE = 200;
+
+typedef struct msg_buffer {
+  long mesg_type;
+  char mesg_text[MAX_MSG_SIZE];
+} msg_buffer;
+
+class MetricsReceiver {
+ private:
+  static void SendHelper(const std::string &message);
+
+ public:
+  MetricsReceiver();
+  ~MetricsReceiver();
+  static void LogMetricsVMStart();
+  static void LogMetricsVMStop();
+  static void LogMetricsDeviceBoot();
+  static void LogMetricsLockScreen();
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/msg_queue/Android.bp b/host/libs/msg_queue/Android.bp
index 39d9c96..bfd6bd8 100644
--- a/host/libs/msg_queue/Android.bp
+++ b/host/libs/msg_queue/Android.bp
@@ -17,7 +17,7 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_library_host_static {
+cc_library_static {
     name: "libcuttlefish_msg_queue",
     srcs: [
         "msg_queue.cc",
diff --git a/host/libs/msg_queue/msg_queue.cc b/host/libs/msg_queue/msg_queue.cc
index 8016a23..6c84a8d 100644
--- a/host/libs/msg_queue/msg_queue.cc
+++ b/host/libs/msg_queue/msg_queue.cc
@@ -33,10 +33,11 @@
 // class holds `msgid` returned from msg_queue_create, and match the lifetime of
 // the message queue to the lifetime of the object.
 
-SysVMessageQueue::SysVMessageQueue(int id) { msgid = id; }
+SysVMessageQueue::SysVMessageQueue(int id, bool auto_close)
+    : msgid_(id), auto_close_(auto_close) {}
 
 SysVMessageQueue::~SysVMessageQueue(void) {
-  if (msgctl(msgid, IPC_RMID, NULL) < 0) {
+  if (auto_close_ && msgctl(msgid_, IPC_RMID, NULL) < 0) {
     int error_num = errno;
     LOG(ERROR) << "Could not remove message queue: " << strerror(error_num);
   }
@@ -45,7 +46,7 @@
 // SysVMessageQueue::Create would return an empty/null std::unique_ptr if
 // initialization failed.
 std::unique_ptr<SysVMessageQueue> SysVMessageQueue::Create(
-    const std::string& path, char proj_id) {
+    const std::string& path, char proj_id, bool auto_close) {
   // key file must exist before calling ftok
   std::fstream fs;
   fs.open(path, std::ios::out);
@@ -62,13 +63,14 @@
   if (queue_id < 0) {
     queue_id = msgget(key, IPC_CREAT | IPC_EXCL | 0600);
   }
-  auto msg = std::unique_ptr<SysVMessageQueue>(new SysVMessageQueue(queue_id));
+  auto msg = std::unique_ptr<SysVMessageQueue>(
+      new SysVMessageQueue(queue_id, auto_close));
   return msg;
 }
 
 int SysVMessageQueue::Send(void* data, size_t size, bool block) {
   int msgflg = block ? 0 : IPC_NOWAIT;
-  if (msgsnd(msgid, data, size, msgflg) < 0) {
+  if (msgsnd(msgid_, data, size, msgflg) < 0) {
     int error_num = errno;
     if (error_num == EAGAIN) {
       // returns EAGAIN if queue is full and non-blocking
@@ -90,7 +92,7 @@
   // System call fails with errno set to ENOMSG if queue is empty and
   // non-blocking.
   int msgflg = block ? 0 : IPC_NOWAIT;
-  return msgrcv(msgid, data, size, msgtyp, msgflg);
+  return msgrcv(msgid_, data, size, msgtyp, msgflg);
 }
 
 }  // namespace cuttlefish
diff --git a/host/libs/msg_queue/msg_queue.h b/host/libs/msg_queue/msg_queue.h
index 0d43cd1..187527b 100644
--- a/host/libs/msg_queue/msg_queue.h
+++ b/host/libs/msg_queue/msg_queue.h
@@ -21,14 +21,16 @@
 class SysVMessageQueue {
  public:
   static std::unique_ptr<SysVMessageQueue> Create(const std::string& path,
-                                                  char proj_id);
+                                                  char proj_id,
+                                                  bool auto_close = true);
   ~SysVMessageQueue();
 
   int Send(void* data, size_t size, bool block);
   ssize_t Receive(void* data, size_t size, long msgtyp, bool block);
 
  private:
-  SysVMessageQueue(int msgid);
-  int msgid;
+  SysVMessageQueue(int msgid, bool auto_close);
+  int msgid_;
+  bool auto_close_;
 };
 }  // namespace cuttlefish
diff --git a/host/libs/screen_connector/Android.bp b/host/libs/screen_connector/Android.bp
index ae1ef66..d0bf438 100644
--- a/host/libs/screen_connector/Android.bp
+++ b/host/libs/screen_connector/Android.bp
@@ -25,6 +25,7 @@
     shared_libs: [
         "libcuttlefish_fs",
         "libbase",
+        "libfruit",
         "libjsoncpp",
         "liblog",
     ],
diff --git a/host/libs/screen_connector/screen_connector.h b/host/libs/screen_connector/screen_connector.h
index 49e5253..846116c 100644
--- a/host/libs/screen_connector/screen_connector.h
+++ b/host/libs/screen_connector/screen_connector.h
@@ -22,13 +22,17 @@
 #include <mutex>
 #include <optional>
 #include <string>
+#include <string_view>
 #include <thread>
 #include <type_traits>
+#include <unordered_set>
 
 #include <android-base/logging.h>
+#include <fruit/fruit.h>
 
 #include "common/libs/confui/confui.h"
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/contains.h"
 #include "common/libs/utils/size_utils.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/confui/host_mode_ctrl.h"
@@ -51,6 +55,27 @@
 
   using FrameMultiplexer = ScreenConnectorInputMultiplexer<ProcessedFrameType>;
 
+  INJECT(ScreenConnector(WaylandScreenConnector& sc_android_src,
+                         HostModeCtrl& host_mode_ctrl))
+      : sc_android_src_(sc_android_src),
+        host_mode_ctrl_{host_mode_ctrl},
+        on_next_frame_cnt_{0},
+        render_confui_cnt_{0},
+        sc_frame_multiplexer_{host_mode_ctrl_} {
+    auto config = cuttlefish::CuttlefishConfig::Get();
+    if (!config) {
+      LOG(FATAL) << "CuttlefishConfig is not available.";
+    }
+    auto instance = config->ForDefaultInstance();
+    std::unordered_set<std::string_view> valid_gpu_modes{
+        cuttlefish::kGpuModeDrmVirgl, cuttlefish::kGpuModeGfxstream,
+        cuttlefish::kGpuModeGfxstreamGuestAngle,
+        cuttlefish::kGpuModeGuestSwiftshader};
+    if (!Contains(valid_gpu_modes, instance.gpu_mode())) {
+      LOG(FATAL) << "Invalid gpu mode: " << instance.gpu_mode();
+    }
+  }
+
   /**
    * This is the type of the callback function WebRTC is supposed to provide
    * ScreenConnector with.
@@ -66,21 +91,6 @@
       /* ScImpl enqueues this type into the Q */
       ProcessedFrameType& msg)>;
 
-  static std::unique_ptr<ScreenConnector<ProcessedFrameType>> Get(
-      const int frames_fd, HostModeCtrl& host_mode_ctrl) {
-    auto config = cuttlefish::CuttlefishConfig::Get();
-    ScreenConnector<ProcessedFrameType>* raw_ptr = nullptr;
-    if (config->gpu_mode() == cuttlefish::kGpuModeDrmVirgl ||
-        config->gpu_mode() == cuttlefish::kGpuModeGfxStream ||
-        config->gpu_mode() == cuttlefish::kGpuModeGuestSwiftshader) {
-      raw_ptr = new ScreenConnector<ProcessedFrameType>(
-          std::make_unique<WaylandScreenConnector>(frames_fd), host_mode_ctrl);
-    } else {
-      LOG(FATAL) << "Invalid gpu mode: " << config->gpu_mode();
-    }
-    return std::unique_ptr<ScreenConnector<ProcessedFrameType>>(raw_ptr);
-  }
-
   virtual ~ScreenConnector() = default;
 
   /**
@@ -93,7 +103,7 @@
     callback_from_streamer_ = std::move(frame_callback);
     streamer_callback_set_cv_.notify_all();
 
-    sc_android_src_->SetFrameCallback(
+    sc_android_src_.SetFrameCallback(
         [this](std::uint32_t display_number, std::uint32_t frame_w,
                std::uint32_t frame_h, std::uint32_t frame_stride_bytes,
                std::uint8_t* frame_bytes) {
@@ -122,6 +132,10 @@
     return false;
   }
 
+  void SetDisplayEventCallback(DisplayEventCallback event_callback) {
+    sc_android_src_.SetDisplayEventCallback(std::move(event_callback));
+  }
+
   /* returns the processed frame that also includes meta-info such as success/fail
    * and display number from the guest
    *
@@ -161,17 +175,10 @@
   }
 
  protected:
-  ScreenConnector(std::unique_ptr<WaylandScreenConnector>&& impl,
-                  HostModeCtrl& host_mode_ctrl)
-      : sc_android_src_{std::move(impl)},
-        host_mode_ctrl_{host_mode_ctrl},
-        on_next_frame_cnt_{0},
-        render_confui_cnt_{0},
-        sc_frame_multiplexer_{host_mode_ctrl_} {}
   ScreenConnector() = delete;
 
  private:
-  std::unique_ptr<WaylandScreenConnector> sc_android_src_;
+  WaylandScreenConnector& sc_android_src_;
   HostModeCtrl& host_mode_ctrl_;
   unsigned long long int on_next_frame_cnt_;
   unsigned long long int render_confui_cnt_;
diff --git a/host/libs/screen_connector/screen_connector_common.h b/host/libs/screen_connector/screen_connector_common.h
index 32bac76..123d6ac 100644
--- a/host/libs/screen_connector/screen_connector_common.h
+++ b/host/libs/screen_connector/screen_connector_common.h
@@ -46,18 +46,21 @@
   static constexpr std::uint32_t BytesPerPixel() { return 4; }
   static std::uint32_t ScreenCount() {
     auto config = ChkAndGetConfig();
-    auto display_configs = config->display_configs();
+    auto instance = config->ForDefaultInstance();
+    auto display_configs = instance.display_configs();
     return static_cast<std::uint32_t>(display_configs.size());
   }
   static std::uint32_t ScreenHeight(std::uint32_t display_number) {
     auto config = ChkAndGetConfig();
-    auto display_configs = config->display_configs();
+    auto instance = config->ForDefaultInstance();
+    auto display_configs = instance.display_configs();
     CHECK_GT(display_configs.size(), display_number);
     return display_configs[display_number].height;
   }
   static std::uint32_t ScreenWidth(std::uint32_t display_number) {
     auto config = ChkAndGetConfig();
-    auto display_configs = config->display_configs();
+    auto instance = config->ForDefaultInstance();
+    auto display_configs = instance.display_configs();
     CHECK_GE(display_configs.size(), display_number);
     return display_configs[display_number].width;
   }
diff --git a/host/libs/screen_connector/wayland_screen_connector.cpp b/host/libs/screen_connector/wayland_screen_connector.cpp
index dbd4052..025207a 100644
--- a/host/libs/screen_connector/wayland_screen_connector.cpp
+++ b/host/libs/screen_connector/wayland_screen_connector.cpp
@@ -25,7 +25,8 @@
 
 namespace cuttlefish {
 
-WaylandScreenConnector::WaylandScreenConnector(int frames_fd) {
+WaylandScreenConnector::WaylandScreenConnector(ANNOTATED(FramesFd, int)
+                                                   frames_fd) {
   int wayland_fd = fcntl(frames_fd, F_DUPFD_CLOEXEC, 3);
   CHECK(wayland_fd != -1) << "Unable to dup server, errno " << errno;
   close(frames_fd);
@@ -38,4 +39,9 @@
   server_->SetFrameCallback(std::move(frame_callback));
 }
 
+void WaylandScreenConnector::SetDisplayEventCallback(
+    DisplayEventCallback event_callback) {
+  server_->SetDisplayEventCallback(std::move(event_callback));
+}
+
 }  // namespace cuttlefish
diff --git a/host/libs/screen_connector/wayland_screen_connector.h b/host/libs/screen_connector/wayland_screen_connector.h
index ab3120b..5c8bbea 100644
--- a/host/libs/screen_connector/wayland_screen_connector.h
+++ b/host/libs/screen_connector/wayland_screen_connector.h
@@ -18,6 +18,8 @@
 
 #include <memory>
 
+#include <fruit/fruit.h>
+
 #include "host/libs/screen_connector/screen_connector_common.h"
 #include "host/libs/wayland/wayland_server.h"
 
@@ -25,10 +27,15 @@
 
 class WaylandScreenConnector {
  public:
-  WaylandScreenConnector(int frames_fd);
+  struct FramesFd {};
+  INJECT(WaylandScreenConnector(ANNOTATED(FramesFd, int) frames_fd));
+
   void SetFrameCallback(GenerateProcessedFrameCallbackImpl frame_callback);
 
+  void SetDisplayEventCallback(DisplayEventCallback event_callback);
+
  private:
   std::unique_ptr<wayland::WaylandServer> server_;
 };
-}
+
+}  // namespace cuttlefish
diff --git a/host/libs/vm_manager/Android.bp b/host/libs/vm_manager/Android.bp
index 18bcb52..eb39c33 100644
--- a/host/libs/vm_manager/Android.bp
+++ b/host/libs/vm_manager/Android.bp
@@ -17,28 +17,6 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-soong_config_module_type {
-    name: "cf_cc_defaults",
-    module_type: "cc_defaults",
-    config_namespace: "cvdhost",
-    bool_variables: ["enforce_mac80211_hwsim"],
-    properties: ["cflags"],
-}
-
-// This is the customization layer driven by soong config variables.
-cf_cc_defaults {
-    name: "cvd_cc_defaults",
-    soong_config_variables: {
-        // PRODUCT_ENFORCE_MAC80211_HWSIM sets this
-        enforce_mac80211_hwsim: {
-            cflags: ["-DENFORCE_MAC80211_HWSIM=true"],
-            conditions_default: {
-                cflags: [],
-            }
-        },
-    }
-}
-
 cc_library_static {
     name: "libcuttlefish_vm_manager",
     srcs: [
@@ -64,7 +42,6 @@
     ],
     defaults: [
         "cuttlefish_host",
-        "cuttlefish_libicuuc",
-        "cvd_cc_defaults",
+        "cuttlefish_libicuuc"
     ],
 }
diff --git a/host/libs/vm_manager/crosvm_builder.cpp b/host/libs/vm_manager/crosvm_builder.cpp
index 0d7658e..094cf91 100644
--- a/host/libs/vm_manager/crosvm_builder.cpp
+++ b/host/libs/vm_manager/crosvm_builder.cpp
@@ -21,26 +21,36 @@
 
 #include "common/libs/utils/network.h"
 #include "common/libs/utils/subprocess.h"
+#include "host/libs/config/cuttlefish_config.h"
+#include "host/libs/config/known_paths.h"
 
 namespace cuttlefish {
 
-CrosvmBuilder::CrosvmBuilder() : command_("crosvm") {
-  command_.AddParameter("run");
+CrosvmBuilder::CrosvmBuilder() : command_("crosvm") {}
+
+void CrosvmBuilder::ApplyProcessRestarter(const std::string& crosvm_binary,
+                                          int exit_code) {
+  command_.SetExecutableAndName(ProcessRestarterBinary());
+  command_.AddParameter("-when_exited_with_code=", exit_code);
+  command_.AddParameter("--");
+  command_.AddParameter(crosvm_binary);
+  // Flag allows exit codes other than 0 or 1, must be before command argument
+  command_.AddParameter("--extended-status");
 }
 
-void CrosvmBuilder::SetBinary(const std::string& binary) {
-  command_.SetExecutable(binary);
-}
-
-void CrosvmBuilder::AddControlSocket(const std::string& control_socket) {
-  // Store this value so it persists after std::move(this->Cmd())
-  auto crosvm = command_.Executable();
-  command_.SetStopper([crosvm, control_socket](Subprocess* proc) {
+void CrosvmBuilder::AddControlSocket(const std::string& control_socket,
+                                     const std::string& executable_path) {
+  command_.SetStopper([executable_path, control_socket](Subprocess* proc) {
+    Command stop_cmd(executable_path);
+    stop_cmd.AddParameter("stop");
+    stop_cmd.AddParameter(control_socket);
+    if (stop_cmd.Start().Wait() == 0) {
+      return StopperResult::kStopSuccess;
+    }
     LOG(WARNING) << "Failed to stop VMM nicely, attempting to KILL";
-    auto result = KillSubprocess(proc);
-    unlink(control_socket.c_str());
-    return result == StopperResult::kStopSuccess ? StopperResult::kStopCrash
-                                                 : StopperResult::kStopFailure;
+    return KillSubprocess(proc) == StopperResult::kStopSuccess
+               ? StopperResult::kStopCrash
+               : StopperResult::kStopFailure;
   });
   command_.AddParameter("--socket=", control_socket);
 }
@@ -49,13 +59,10 @@
   command_.AddParameter("--serial=hardware=virtio-console,num=", ++hvc_num_,
                         ",type=sink");
 }
-void CrosvmBuilder::AddHvcConsoleReadOnly(const std::string& output) {
+void CrosvmBuilder::AddHvcReadOnly(const std::string& output, bool console) {
   command_.AddParameter("--serial=hardware=virtio-console,num=", ++hvc_num_,
-                        ",type=file,path=", output, ",console=true");
-}
-void CrosvmBuilder::AddHvcReadOnly(const std::string& output) {
-  command_.AddParameter("--serial=hardware=virtio-console,num=", ++hvc_num_,
-                        ",type=file,path=", output);
+                        ",type=file,path=", output,
+                        console ? ",console=true" : "");
 }
 void CrosvmBuilder::AddHvcReadWrite(const std::string& output,
                                     const std::string& input) {
@@ -63,6 +70,14 @@
                         ",type=file,path=", output, ",input=", input);
 }
 
+void CrosvmBuilder::AddReadOnlyDisk(const std::string& path) {
+  command_.AddParameter("--disk=", path);
+}
+
+void CrosvmBuilder::AddReadWriteDisk(const std::string& path) {
+  command_.AddParameter("--rwdisk=", path);
+}
+
 void CrosvmBuilder::AddSerialSink() {
   command_.AddParameter("--serial=hardware=serial,num=", ++serial_num_,
                         ",type=sink");
@@ -72,10 +87,11 @@
                         ",type=file,path=", output, ",earlycon=true");
 }
 void CrosvmBuilder::AddSerialConsoleReadWrite(const std::string& output,
-                                              const std::string& input) {
+                                              const std::string& input,
+                                              bool earlycon) {
   command_.AddParameter("--serial=hardware=serial,num=", ++serial_num_,
                         ",type=file,path=", output, ",input=", input,
-                        ",earlycon=true");
+                        earlycon ? ",earlycon=true" : "");
 }
 void CrosvmBuilder::AddSerial(const std::string& output,
                               const std::string& input) {
@@ -86,7 +102,18 @@
 SharedFD CrosvmBuilder::AddTap(const std::string& tap_name) {
   auto tap_fd = OpenTapInterface(tap_name);
   if (tap_fd->IsOpen()) {
-    command_.AddParameter("--tap-fd=", tap_fd);
+    command_.AddParameter("--net=tap-fd=", tap_fd);
+  } else {
+    LOG(ERROR) << "Unable to connect to \"" << tap_name
+               << "\": " << tap_fd->StrError();
+  }
+  return tap_fd;
+}
+
+SharedFD CrosvmBuilder::AddTap(const std::string& tap_name, const std::string& mac) {
+  auto tap_fd = OpenTapInterface(tap_name);
+  if (tap_fd->IsOpen()) {
+    command_.AddParameter("--net=tap-fd=", tap_fd, ",mac=\"", mac, "\"");
   } else {
     LOG(ERROR) << "Unable to connect to \"" << tap_name
                << "\": " << tap_fd->StrError();
diff --git a/host/libs/vm_manager/crosvm_builder.h b/host/libs/vm_manager/crosvm_builder.h
index 90457b2..340a784 100644
--- a/host/libs/vm_manager/crosvm_builder.h
+++ b/host/libs/vm_manager/crosvm_builder.h
@@ -27,22 +27,25 @@
  public:
   CrosvmBuilder();
 
-  void SetBinary(const std::string&);
-  void AddControlSocket(const std::string&);
+  void ApplyProcessRestarter(const std::string& crosvm_binary, int exit_code);
+  void AddControlSocket(const std::string&, const std::string&);
 
   void AddHvcSink();
-  void AddHvcConsoleReadOnly(const std::string& output);
-  void AddHvcReadOnly(const std::string& output);
+  void AddHvcReadOnly(const std::string& output, bool console = false);
   void AddHvcReadWrite(const std::string& output, const std::string& input);
 
+  void AddReadOnlyDisk(const std::string& path);
+  void AddReadWriteDisk(const std::string& path);
+
   void AddSerialSink();
   void AddSerialConsoleReadOnly(const std::string& output);
   void AddSerialConsoleReadWrite(const std::string& output,
-                                 const std::string& input);
+                                 const std::string& input, bool earlycon);
   // [[deprecated("do not add any more users")]]
   void AddSerial(const std::string& output, const std::string& input);
 
   SharedFD AddTap(const std::string& tap_name);
+  SharedFD AddTap(const std::string& tap_name, const std::string& mac);
 
   int HvcNum();
 
diff --git a/host/libs/vm_manager/crosvm_manager.cpp b/host/libs/vm_manager/crosvm_manager.cpp
index 68609a3..bfa1c98 100644
--- a/host/libs/vm_manager/crosvm_manager.cpp
+++ b/host/libs/vm_manager/crosvm_manager.cpp
@@ -16,20 +16,24 @@
 
 #include "host/libs/vm_manager/crosvm_manager.h"
 
-#include <android-base/file.h>
-#include <android-base/logging.h>
-#include <android-base/strings.h>
 #include <sys/stat.h>
 #include <sys/types.h>
-#include <vulkan/vulkan.h>
 
 #include <cassert>
 #include <string>
+#include <unordered_map>
+#include <utility>
 #include <vector>
 
+#include <android-base/file.h>
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <vulkan/vulkan.h>
+
 #include "common/libs/utils/environment.h"
 #include "common/libs/utils/files.h"
 #include "common/libs/utils/network.h"
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/known_paths.h"
@@ -44,7 +48,7 @@
 std::string GetControlSocketPath(
     const CuttlefishConfig::InstanceSpecific& instance,
     const std::string& socket_name) {
-  return instance.PerInstanceInternalPath(socket_name.c_str());
+  return instance.PerInstanceInternalUdsPath(socket_name.c_str());
 }
 
 }  // namespace
@@ -57,132 +61,202 @@
 #endif
 }
 
-std::vector<std::string> CrosvmManager::ConfigureGraphics(
-    const CuttlefishConfig& config) {
+Result<std::unordered_map<std::string, std::string>>
+CrosvmManager::ConfigureGraphics(
+    const CuttlefishConfig::InstanceSpecific& instance) {
   // Override the default HAL search paths in all cases. We do this because
   // the HAL search path allows for fallbacks, and fallbacks in conjunction
   // with properities lead to non-deterministic behavior while loading the
   // HALs.
-  if (config.gpu_mode() == kGpuModeGuestSwiftshader) {
-    return {
-        "androidboot.cpuvulkan.version=" + std::to_string(VK_API_VERSION_1_2),
-        "androidboot.hardware.gralloc=minigbm",
-        "androidboot.hardware.hwcomposer="+ config.hwcomposer(),
-        "androidboot.hardware.egl=angle",
-        "androidboot.hardware.vulkan=pastel",
-        "androidboot.opengles.version=196609"};  // OpenGL ES 3.1
+
+  std::unordered_map<std::string, std::string> bootconfig_args;
+
+  if (instance.gpu_mode() == kGpuModeGuestSwiftshader) {
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", std::to_string(VK_API_VERSION_1_2)},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", instance.hwcomposer()},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "drm"},
+        {"androidboot.hardware.egl", "angle"},
+        {"androidboot.hardware.vulkan", "pastel"},
+        {"androidboot.opengles.version", "196609"},  // OpenGL ES 3.1
+    };
+  } else if (instance.gpu_mode() == kGpuModeDrmVirgl) {
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", "0"},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", "ranchu"},
+        {"androidboot.hardware.hwcomposer.mode", "client"},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "drm"},
+        {"androidboot.hardware.egl", "mesa"},
+        // No "hardware" Vulkan support, yet
+        {"androidboot.opengles.version", "196608"},  // OpenGL ES 3.0
+    };
+  } else if (instance.gpu_mode() == kGpuModeGfxstream ||
+             instance.gpu_mode() == kGpuModeGfxstreamGuestAngle) {
+    const bool uses_angle = instance.gpu_mode() == kGpuModeGfxstreamGuestAngle;
+    const std::string gles_impl = uses_angle ? "angle" : "emulation";
+    const std::string gltransport =
+        (instance.guest_android_version() == "11.0.0") ? "virtio-gpu-pipe"
+                                                       : "virtio-gpu-asg";
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", "0"},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", instance.hwcomposer()},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "drm"},
+        {"androidboot.hardware.egl", gles_impl},
+        {"androidboot.hardware.vulkan", "ranchu"},
+        {"androidboot.hardware.gltransport", gltransport},
+        {"androidboot.opengles.version", "196609"},  // OpenGL ES 3.1
+    };
+  } else if (instance.gpu_mode() == kGpuModeNone) {
+    return {};
+  } else {
+    return CF_ERR("Unknown GPU mode " << instance.gpu_mode());
   }
 
-  if (config.gpu_mode() == kGpuModeDrmVirgl) {
-    return {
-      "androidboot.cpuvulkan.version=0",
-      "androidboot.hardware.gralloc=minigbm",
-      "androidboot.hardware.hwcomposer=drm",
-      "androidboot.hardware.egl=mesa",
-    };
+  if (!instance.gpu_angle_feature_overrides_enabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_enabled"] =
+        instance.gpu_angle_feature_overrides_enabled();
   }
-  if (config.gpu_mode() == kGpuModeGfxStream) {
-    std::string gles_impl = config.enable_gpu_angle() ? "angle" : "emulation";
-    return {"androidboot.cpuvulkan.version=0",
-            "androidboot.hardware.gralloc=minigbm",
-            "androidboot.hardware.hwcomposer=" + config.hwcomposer(),
-            "androidboot.hardware.egl=" + gles_impl,
-            "androidboot.hardware.vulkan=ranchu",
-            "androidboot.hardware.gltransport=virtio-gpu-asg",
-            "androidboot.opengles.version=196608"};  // OpenGL ES 3.0
+  if (!instance.gpu_angle_feature_overrides_disabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_disabled"] =
+        instance.gpu_angle_feature_overrides_disabled();
   }
-  return {};
+
+  return bootconfig_args;
 }
 
-std::string CrosvmManager::ConfigureBootDevices(int num_disks) {
+Result<std::unordered_map<std::string, std::string>>
+CrosvmManager::ConfigureBootDevices(int num_disks, bool have_gpu) {
   // TODO There is no way to control this assignment with crosvm (yet)
   if (HostArch() == Arch::X86_64) {
     // crosvm has an additional PCI device for an ISA bridge
     // virtio_gpu and virtio_wl precedes the first console or disk
-    return ConfigureMultipleBootDevices("pci0000:00/0000:00:", 3, num_disks);
+    return ConfigureMultipleBootDevices("pci0000:00/0000:00:",
+                                        1 + (have_gpu ? 2 : 0), num_disks);
   } else {
     // On ARM64 crosvm, block devices are on their own bridge, so we don't
     // need to calculate it, and the path is always the same
-    return "androidboot.boot_devices=10000.pci";
+    return {{{"androidboot.boot_devices", "10000.pci"}}};
   }
 }
 
 constexpr auto crosvm_socket = "crosvm_control.sock";
 
-std::vector<Command> CrosvmManager::StartCommands(
+Result<std::vector<MonitorCommand>> CrosvmManager::StartCommands(
     const CuttlefishConfig& config) {
   auto instance = config.ForDefaultInstance();
-  CrosvmBuilder crosvm_cmd;
-  crosvm_cmd.SetBinary(config.crosvm_binary());
-  crosvm_cmd.AddControlSocket(GetControlSocketPath(instance, crosvm_socket));
 
-  if (!config.smt()) {
+  CrosvmBuilder crosvm_cmd;
+
+  crosvm_cmd.ApplyProcessRestarter(instance.crosvm_binary(),
+                                   kCrosvmVmResetExitCode);
+  crosvm_cmd.Cmd().AddParameter("run");
+  crosvm_cmd.AddControlSocket(GetControlSocketPath(instance, crosvm_socket),
+                              instance.crosvm_binary());
+  if (!instance.smt()) {
     crosvm_cmd.Cmd().AddParameter("--no-smt");
   }
 
   crosvm_cmd.Cmd().AddParameter("--core-scheduling=false");
 
-  if (config.vhost_net()) {
+  if (instance.vhost_net()) {
     crosvm_cmd.Cmd().AddParameter("--vhost-net");
   }
 
-#ifdef ENFORCE_MAC80211_HWSIM
-  if (!config.vhost_user_mac80211_hwsim().empty()) {
+  if (config.virtio_mac80211_hwsim() &&
+      !config.vhost_user_mac80211_hwsim().empty()) {
     crosvm_cmd.Cmd().AddParameter("--vhost-user-mac80211-hwsim=",
                                   config.vhost_user_mac80211_hwsim());
   }
-#endif
 
-  if (config.protected_vm()) {
+  if (instance.protected_vm()) {
     crosvm_cmd.Cmd().AddParameter("--protected-vm");
   }
 
-  if (config.gdb_port() > 0) {
-    CHECK(config.cpus() == 1) << "CPUs must be 1 for crosvm gdb mode";
-    crosvm_cmd.Cmd().AddParameter("--gdb=", config.gdb_port());
+  if (instance.gdb_port() > 0) {
+    CF_EXPECT(instance.cpus() == 1, "CPUs must be 1 for crosvm gdb mode");
+    crosvm_cmd.Cmd().AddParameter("--gdb=", instance.gdb_port());
   }
 
-  auto gpu_capture_enabled = !config.gpu_capture_binary().empty();
-  auto gpu_mode = config.gpu_mode();
-  auto udmabuf_string = config.enable_gpu_udmabuf() ? "true" : "false";
-  auto angle_string = config.enable_gpu_angle() ? ",angle=true" : "";
+  const auto gpu_capture_enabled = !instance.gpu_capture_binary().empty();
+  const auto gpu_mode = instance.gpu_mode();
+
+  const std::string gpu_angle_string =
+      gpu_mode == kGpuModeGfxstreamGuestAngle ? ",angle=true" : "";
+  // 256MB so it is small enough for a 32-bit kernel.
+  const std::string gpu_pci_bar_size = ",pci-bar-size=268435456";
+  const std::string gpu_udmabuf_string =
+      instance.enable_gpu_udmabuf() ? ",udmabuf=true" : "";
+
+  const std::string gpu_common_string = gpu_udmabuf_string + gpu_pci_bar_size;
+  const std::string gpu_common_3d_string =
+      gpu_common_string + ",egl=true,surfaceless=true,glx=false,gles=true";
+
   if (gpu_mode == kGpuModeGuestSwiftshader) {
-    crosvm_cmd.Cmd().AddParameter("--gpu=2D,udmabuf=", udmabuf_string);
-  } else if (gpu_mode == kGpuModeDrmVirgl || gpu_mode == kGpuModeGfxStream) {
-    crosvm_cmd.Cmd().AddParameter(
-        gpu_mode == kGpuModeGfxStream ? "--gpu=gfxstream," : "--gpu=",
-        "egl=true,surfaceless=true,glx=false,gles=true,udmabuf=", udmabuf_string,
-        angle_string);
+    crosvm_cmd.Cmd().AddParameter("--gpu=backend=2D", gpu_common_string);
+  } else if (gpu_mode == kGpuModeDrmVirgl) {
+    crosvm_cmd.Cmd().AddParameter("--gpu=backend=virglrenderer",
+                                  gpu_common_3d_string);
+  } else if (gpu_mode == kGpuModeGfxstream ||
+             gpu_mode == kGpuModeGfxstreamGuestAngle) {
+    const std::string capset_names = ",context-types=gfxstream";
+    crosvm_cmd.Cmd().AddParameter("--gpu=backend=gfxstream,gles31=true",
+                                  gpu_common_3d_string, gpu_angle_string,
+                                  capset_names);
   }
 
-  for (const auto& display_config : config.display_configs()) {
-    crosvm_cmd.Cmd().AddParameter(
-        "--gpu-display=", "width=", display_config.width, ",",
-        "height=", display_config.height);
-  }
+  if (instance.hwcomposer() != kHwComposerNone) {
+    if (!instance.mte() && FileExists(instance.hwcomposer_pmem_path())) {
+      crosvm_cmd.Cmd().AddParameter("--rw-pmem-device=",
+                                    instance.hwcomposer_pmem_path());
+    }
 
-  crosvm_cmd.Cmd().AddParameter("--wayland-sock=",
-                                instance.frames_socket_path());
+    for (const auto& display_config : instance.display_configs()) {
+      const auto display_w = std::to_string(display_config.width);
+      const auto display_h = std::to_string(display_config.height);
+      const auto display_dpi = std::to_string(display_config.dpi);
+      const auto display_rr = std::to_string(display_config.refresh_rate_hz);
+      const auto display_params = android::base::Join(
+          std::vector<std::string>{
+              "mode=windowed[" + display_w + "," + display_h + "]",
+              "dpi=[" + display_dpi + "," + display_dpi + "]",
+              "refresh-rate=" + display_rr,
+          },
+          ",");
+      crosvm_cmd.Cmd().AddParameter("--gpu-display=", display_params);
+    }
+
+    crosvm_cmd.Cmd().AddParameter("--wayland-sock=",
+                                  instance.frames_socket_path());
+  }
 
   // crosvm_cmd.Cmd().AddParameter("--null-audio");
-  crosvm_cmd.Cmd().AddParameter("--mem=", config.memory_mb());
-  crosvm_cmd.Cmd().AddParameter("--cpus=", config.cpus());
-
-  auto disk_num = instance.virtual_disk_paths().size();
-  CHECK_GE(VmManager::kMaxDisks, disk_num)
-      << "Provided too many disks (" << disk_num << "), maximum "
-      << VmManager::kMaxDisks << "supported";
-  for (const auto& disk : instance.virtual_disk_paths()) {
-    crosvm_cmd.Cmd().AddParameter(
-        config.protected_vm() ? "--disk=" : "--rwdisk=", disk);
+  crosvm_cmd.Cmd().AddParameter("--mem=", instance.memory_mb());
+  crosvm_cmd.Cmd().AddParameter("--cpus=", instance.cpus());
+  if (instance.mte()) {
+    crosvm_cmd.Cmd().AddParameter("--mte");
   }
 
-  if (config.enable_webrtc()) {
-    auto touch_type_parameter =
-        config.enable_webrtc() ? "--multi-touch=" : "--single-touch=";
+  auto disk_num = instance.virtual_disk_paths().size();
+  CF_EXPECT(VmManager::kMaxDisks >= disk_num,
+            "Provided too many disks (" << disk_num << "), maximum "
+                                        << VmManager::kMaxDisks << "supported");
+  for (const auto& disk : instance.virtual_disk_paths()) {
+    if (instance.protected_vm()) {
+      crosvm_cmd.AddReadOnlyDisk(disk);
+    } else {
+      crosvm_cmd.AddReadWriteDisk(disk);
+    }
+  }
 
-    auto display_configs = config.display_configs();
-    CHECK_GE(display_configs.size(), 1);
+  if (instance.enable_webrtc()) {
+    auto touch_type_parameter =
+        instance.enable_webrtc() ? "--multi-touch=" : "--single-touch=";
+
+    auto display_configs = instance.display_configs();
+    CF_EXPECT(display_configs.size() >= 1);
 
     for (int i = 0; i < display_configs.size(); ++i) {
       auto display_config = display_configs[i];
@@ -194,7 +268,7 @@
     crosvm_cmd.Cmd().AddParameter("--keyboard=",
                                   instance.keyboard_socket_path());
   }
-  if (config.enable_webrtc()) {
+  if (instance.enable_webrtc()) {
     crosvm_cmd.Cmd().AddParameter("--switches=",
                                   instance.switches_socket_path());
   }
@@ -203,43 +277,37 @@
   // GPU capture can only support named files and not file descriptors due to
   // having to pass arguments to crosvm via a wrapper script.
   if (!gpu_capture_enabled) {
-    crosvm_cmd.AddTap(instance.mobile_tap_name());
-    crosvm_cmd.AddTap(instance.ethernet_tap_name());
+    // The ordering of tap devices is important. Make sure any change here
+    // is reflected in ethprime u-boot variable
+    crosvm_cmd.AddTap(instance.mobile_tap_name(), instance.mobile_mac());
+    crosvm_cmd.AddTap(instance.ethernet_tap_name(), instance.ethernet_mac());
 
-    // TODO(b/199103204): remove this as well when
-    // PRODUCT_ENFORCE_MAC80211_HWSIM is removed
-#ifndef ENFORCE_MAC80211_HWSIM
-    wifi_tap = crosvm_cmd.AddTap(instance.wifi_tap_name());
-#endif
+    if (!config.virtio_mac80211_hwsim()) {
+      wifi_tap = crosvm_cmd.AddTap(instance.wifi_tap_name());
+    }
   }
 
-  if (FileExists(instance.access_kregistry_path())) {
+  if (!instance.mte() && FileExists(instance.access_kregistry_path())) {
     crosvm_cmd.Cmd().AddParameter("--rw-pmem-device=",
                                   instance.access_kregistry_path());
   }
 
-  if (FileExists(instance.hwcomposer_pmem_path())) {
-    crosvm_cmd.Cmd().AddParameter("--rw-pmem-device=",
-                                  instance.hwcomposer_pmem_path());
-  }
-
-  if (FileExists(instance.pstore_path())) {
+  if (!instance.mte() && FileExists(instance.pstore_path())) {
     crosvm_cmd.Cmd().AddParameter("--pstore=path=", instance.pstore_path(),
                                   ",size=", FileSize(instance.pstore_path()));
   }
 
-  if (config.enable_sandbox()) {
-    const bool seccomp_exists = DirectoryExists(config.seccomp_policy_dir());
+  if (instance.enable_sandbox()) {
+    const bool seccomp_exists = DirectoryExists(instance.seccomp_policy_dir());
     const std::string& var_empty_dir = kCrosvmVarEmptyDir;
     const bool var_empty_available = DirectoryExists(var_empty_dir);
-    if (!var_empty_available || !seccomp_exists) {
-      LOG(FATAL) << var_empty_dir << " is not an existing, empty directory."
-                 << "seccomp-policy-dir, " << config.seccomp_policy_dir()
-                 << " does not exist " << std::endl;
-      return {};
-    }
+    CF_EXPECT(var_empty_available && seccomp_exists,
+              var_empty_dir << " is not an existing, empty directory."
+                            << "seccomp-policy-dir, "
+                            << instance.seccomp_policy_dir()
+                            << " does not exist");
     crosvm_cmd.Cmd().AddParameter("--seccomp-policy-dir=",
-                                  config.seccomp_policy_dir());
+                                  instance.seccomp_policy_dir());
   } else {
     crosvm_cmd.Cmd().AddParameter("--disable-sandbox");
   }
@@ -248,19 +316,25 @@
     crosvm_cmd.Cmd().AddParameter("--cid=", instance.vsock_guest_cid());
   }
 
-  // Use a virtio-console instance for the main kernel console. All
-  // messages will switch from earlycon to virtio-console after the driver
-  // is loaded, and crosvm will append to the kernel log automatically
-  crosvm_cmd.AddHvcConsoleReadOnly(instance.kernel_log_pipe_name());
+  // If kernel log is enabled, the virtio-console port will be specified as
+  // a true console for Linux, and kernel messages will be printed there.
+  // Otherwise, the port will still be set up for bootloader and userspace
+  // messages, but the kernel will not print anything here. This keeps our
+  // kernel log event features working. If an alternative "earlycon" boot
+  // console is configured below on a legacy serial port, it will control
+  // the main log until the virtio-console takes over.
+  crosvm_cmd.AddHvcReadOnly(instance.kernel_log_pipe_name(),
+                            instance.enable_kernel_log());
 
-  if (config.console()) {
-    // stdin is the only currently supported way to write data to a serial port in
-    // crosvm. A file (named pipe) is used here instead of stdout to ensure only
-    // the serial port output is received by the console forwarder as crosvm may
-    // print other messages to stdout.
-    if (config.kgdb() || config.use_bootloader()) {
+  if (instance.console()) {
+    // stdin is the only currently supported way to write data to a serial port
+    // in crosvm. A file (named pipe) is used here instead of stdout to ensure
+    // only the serial port output is received by the console forwarder as
+    // crosvm may print other messages to stdout.
+    if (instance.kgdb() || instance.use_bootloader()) {
       crosvm_cmd.AddSerialConsoleReadWrite(instance.console_out_pipe_name(),
-                                           instance.console_in_pipe_name());
+                                           instance.console_in_pipe_name(),
+                                           instance.enable_kernel_log());
       // In kgdb mode, we have the interactive console on ttyS0 (both Android's
       // console and kdb), so we can disable the virtio-console port usually
       // allocated to Android's serial console, and redirect it to a sink. This
@@ -277,7 +351,8 @@
     // virtio-console driver may not be available for early messages
     // In kgdb mode, earlycon is an interactive console, and so early
     // dmesg will go there instead of the kernel.log
-    if (config.kgdb() || config.use_bootloader()) {
+    if (instance.enable_kernel_log() &&
+        (instance.kgdb() || instance.use_bootloader())) {
       crosvm_cmd.AddSerialConsoleReadOnly(instance.kernel_log_pipe_name());
     }
 
@@ -289,15 +364,24 @@
 
   auto crosvm_logs_path = instance.PerInstanceInternalPath("crosvm.fifo");
   auto crosvm_logs = SharedFD::Fifo(crosvm_logs_path, 0666);
-  if (!crosvm_logs->IsOpen()) {
-    LOG(FATAL) << "Failed to create log fifo for crosvm's stdout/stderr: "
-               << crosvm_logs->StrError();
-    return {};
-  }
+  CF_EXPECT(crosvm_logs->IsOpen(),
+            "Failed to create log fifo for crosvm's stdout/stderr: "
+                << crosvm_logs->StrError());
 
   Command crosvm_log_tee_cmd(HostBinaryPath("log_tee"));
   crosvm_log_tee_cmd.AddParameter("--process_name=crosvm");
   crosvm_log_tee_cmd.AddParameter("--log_fd_in=", crosvm_logs);
+  crosvm_log_tee_cmd.SetStopper([](Subprocess* proc) {
+    // Ask nicely so that log_tee gets a chance to process all the logs.
+    int rval = kill(proc->pid(), SIGINT);
+    if (rval != 0) {
+      LOG(ERROR) << "Failed to stop log_tee nicely, attempting to KILL";
+      return KillSubprocess(proc) == StopperResult::kStopSuccess
+                 ? StopperResult::kStopCrash
+                 : StopperResult::kStopFailure;
+    }
+    return StopperResult::kStopSuccess;
+  });
 
   // Serial port for logcat, redirected to a pipe
   crosvm_cmd.AddHvcReadOnly(instance.logcat_pipe_name());
@@ -316,7 +400,7 @@
   } else {
     crosvm_cmd.AddHvcSink();
   }
-  if (config.enable_gnss_grpc_proxy()) {
+  if (instance.enable_gnss_grpc_proxy()) {
     crosvm_cmd.AddHvcReadWrite(
         instance.PerInstanceInternalPath("gnsshvc_fifo_vm.out"),
         instance.PerInstanceInternalPath("gnsshvc_fifo_vm.in"));
@@ -329,22 +413,35 @@
     }
   }
 
+  crosvm_cmd.AddHvcReadWrite(
+      instance.PerInstanceInternalPath("confui_fifo_vm.out"),
+      instance.PerInstanceInternalPath("confui_fifo_vm.in"));
+
+  if (config.enable_host_uwb()) {
+    crosvm_cmd.AddHvcReadWrite(
+        instance.PerInstanceInternalPath("uwb_fifo_vm.out"),
+        instance.PerInstanceInternalPath("uwb_fifo_vm.in"));
+  } else {
+    crosvm_cmd.AddHvcSink();
+  }
+
   for (auto i = 0; i < VmManager::kMaxDisks - disk_num; i++) {
     crosvm_cmd.AddHvcSink();
   }
-  CHECK(crosvm_cmd.HvcNum() + disk_num ==
-        VmManager::kMaxDisks + VmManager::kDefaultNumHvcs)
-      << "HVC count (" << crosvm_cmd.HvcNum() << ") + disk count (" << disk_num
-      << ") is not the expected total of "
-      << VmManager::kMaxDisks + VmManager::kDefaultNumHvcs << " devices";
+  CF_EXPECT(crosvm_cmd.HvcNum() + disk_num ==
+                VmManager::kMaxDisks + VmManager::kDefaultNumHvcs,
+            "HVC count (" << crosvm_cmd.HvcNum() << ") + disk count ("
+                          << disk_num << ") is not the expected total of "
+                          << VmManager::kMaxDisks + VmManager::kDefaultNumHvcs
+                          << " devices");
 
-  if (config.enable_audio()) {
-    crosvm_cmd.Cmd().AddParameter(
-        "--sound=", config.ForDefaultInstance().audio_server_path());
+  if (instance.enable_audio()) {
+    crosvm_cmd.Cmd().AddParameter("--sound=", instance.audio_server_path());
   }
 
-  // TODO(b/162071003): virtiofs crashes without sandboxing, this should be fixed
-  if (0 && config.enable_sandbox()) {
+  // TODO(b/162071003): virtiofs crashes without sandboxing, this should be
+  // fixed
+  if (instance.enable_sandbox()) {
     // Set up directory shared with virtiofs
     crosvm_cmd.Cmd().AddParameter(
         "--shared-dir=", instance.PerInstancePath(kSharedDirName),
@@ -352,50 +449,30 @@
   }
 
   // This needs to be the last parameter
-  crosvm_cmd.Cmd().AddParameter("--bios=", config.bootloader());
+  crosvm_cmd.Cmd().AddParameter("--bios=", instance.bootloader());
 
-  // TODO(b/199103204): remove this as well when PRODUCT_ENFORCE_MAC80211_HWSIM
-  // is removed
-  // Only run the leases workaround if we are not using the new network
-  // bridge architecture - in that case, we have a wider DHCP address
-  // space and stale leases should be much less of an issue
-  if (!FileExists("/var/run/cuttlefish-dnsmasq-cvd-wbr.leases") &&
-      wifi_tap->IsOpen()) {
-    // TODO(schuffelen): QEMU also needs this and this is not the best place for
-    // this code. Find a better place to put it.
-    auto lease_file =
-        ForCurrentInstance("/var/run/cuttlefish-dnsmasq-cvd-wbr-") + ".leases";
-
-    std::uint8_t dhcp_server_ip[] = {
-        192, 168, 96, (std::uint8_t)(ForCurrentInstance(1) * 4 - 3)};
-    if (!ReleaseDhcpLeases(lease_file, wifi_tap, dhcp_server_ip)) {
-      LOG(ERROR) << "Failed to release wifi DHCP leases. Connecting to the wifi "
-                 << "network may not work.";
-    }
-  }
-
-  std::vector<Command> ret;
+  // log_tee must be added before crosvm_cmd to ensure all of crosvm's logs are
+  // captured during shutdown. Processes are stopped in reverse order.
+  std::vector<MonitorCommand> commands;
+  commands.emplace_back(std::move(crosvm_log_tee_cmd));
 
   if (gpu_capture_enabled) {
     const std::string gpu_capture_basename =
-        cpp_basename(config.gpu_capture_binary());
+        cpp_basename(instance.gpu_capture_binary());
 
     auto gpu_capture_logs_path =
         instance.PerInstanceInternalPath("gpu_capture.fifo");
     auto gpu_capture_logs = SharedFD::Fifo(gpu_capture_logs_path, 0666);
-    if (!gpu_capture_logs->IsOpen()) {
-      LOG(FATAL)
-          << "Failed to create log fifo for gpu capture's stdout/stderr: "
-          << gpu_capture_logs->StrError();
-      return {};
-    }
+    CF_EXPECT(gpu_capture_logs->IsOpen(),
+              "Failed to create log fifo for gpu capture's stdout/stderr: "
+                  << gpu_capture_logs->StrError());
 
     Command gpu_capture_log_tee_cmd(HostBinaryPath("log_tee"));
     gpu_capture_log_tee_cmd.AddParameter("--process_name=",
                                          gpu_capture_basename);
     gpu_capture_log_tee_cmd.AddParameter("--log_fd_in=", gpu_capture_logs);
 
-    Command gpu_capture_command(config.gpu_capture_binary());
+    Command gpu_capture_command(instance.gpu_capture_binary());
     if (gpu_capture_basename == "ngfx") {
       // Crosvm depends on command line arguments being passed as multiple
       // arguments but ngfx only allows a single `--args`. To work around this,
@@ -406,9 +483,9 @@
       const std::string crosvm_wrapper_content =
           crosvm_cmd.Cmd().AsBashScript(crosvm_logs_path);
 
-      CHECK(android::base::WriteStringToFile(crosvm_wrapper_content,
-                                             crosvm_wrapper_path));
-      CHECK(MakeFileExecutable(crosvm_wrapper_path));
+      CF_EXPECT(android::base::WriteStringToFile(crosvm_wrapper_content,
+                                                 crosvm_wrapper_path));
+      CF_EXPECT(MakeFileExecutable(crosvm_wrapper_path));
 
       gpu_capture_command.AddParameter("--exe=", crosvm_wrapper_path);
       gpu_capture_command.AddParameter("--launch-detached");
@@ -416,8 +493,8 @@
       gpu_capture_command.AddParameter("--activity=Frame Debugger");
     } else {
       // TODO(natsu): renderdoc
-      LOG(FATAL) << "Unhandled GPU capture binary: "
-                 << config.gpu_capture_binary();
+      return CF_ERR(
+          "Unhandled GPU capture binary: " << instance.gpu_capture_binary());
     }
 
     gpu_capture_command.RedirectStdIO(Subprocess::StdIOChannel::kStdOut,
@@ -425,21 +502,18 @@
     gpu_capture_command.RedirectStdIO(Subprocess::StdIOChannel::kStdErr,
                                       gpu_capture_logs);
 
-    ret.push_back(std::move(gpu_capture_log_tee_cmd));
-    ret.push_back(std::move(gpu_capture_command));
+    commands.emplace_back(std::move(gpu_capture_log_tee_cmd));
+    commands.emplace_back(std::move(gpu_capture_command));
   } else {
     crosvm_cmd.Cmd().RedirectStdIO(Subprocess::StdIOChannel::kStdOut,
                                    crosvm_logs);
     crosvm_cmd.Cmd().RedirectStdIO(Subprocess::StdIOChannel::kStdErr,
                                    crosvm_logs);
-
-    ret.push_back(std::move(crosvm_cmd.Cmd()));
+    commands.emplace_back(std::move(crosvm_cmd.Cmd()), true);
   }
 
-  ret.push_back(std::move(crosvm_log_tee_cmd));
-  return ret;
+  return commands;
 }
 
-} // namespace vm_manager
-} // namespace cuttlefish
-
+}  // namespace vm_manager
+}  // namespace cuttlefish
diff --git a/host/libs/vm_manager/crosvm_manager.h b/host/libs/vm_manager/crosvm_manager.h
index bfd5b79..ea4a42e 100644
--- a/host/libs/vm_manager/crosvm_manager.h
+++ b/host/libs/vm_manager/crosvm_manager.h
@@ -16,12 +16,13 @@
 #pragma once
 
 #include <string>
+#include <unordered_map>
 #include <vector>
 
-#include "host/libs/vm_manager/vm_manager.h"
-
 #include "common/libs/fs/shared_fd.h"
-#include "common/libs/utils/subprocess.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 namespace vm_manager {
@@ -34,14 +35,19 @@
   virtual ~CrosvmManager() = default;
 
   bool IsSupported() override;
-  std::vector<std::string> ConfigureGraphics(
-      const CuttlefishConfig& config) override;
-  std::string ConfigureBootDevices(int num_disks) override;
 
-  std::vector<cuttlefish::Command> StartCommands(
+  Result<std::unordered_map<std::string, std::string>> ConfigureGraphics(
+      const CuttlefishConfig::InstanceSpecific& instance) override;
+
+  Result<std::unordered_map<std::string, std::string>> ConfigureBootDevices(
+      int num_disks, bool have_gpu) override;
+
+  Result<std::vector<MonitorCommand>> StartCommands(
       const CuttlefishConfig& config) override;
+
+ private:
+  static constexpr int kCrosvmVmResetExitCode = 32;
 };
 
 } // namespace vm_manager
 } // namespace cuttlefish
-
diff --git a/host/libs/vm_manager/gem5_manager.cpp b/host/libs/vm_manager/gem5_manager.cpp
index ef9d2cd..04c8782 100644
--- a/host/libs/vm_manager/gem5_manager.cpp
+++ b/host/libs/vm_manager/gem5_manager.cpp
@@ -29,6 +29,8 @@
 #include <sstream>
 #include <string>
 #include <thread>
+#include <unordered_map>
+#include <utility>
 #include <vector>
 
 #include <android-base/strings.h>
@@ -37,11 +39,15 @@
 
 #include "common/libs/fs/shared_select.h"
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "common/libs/utils/users.h"
+#include "host/libs/config/command_source.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/known_paths.h"
 
+using cuttlefish::StringFromEnv;
+
 namespace cuttlefish {
 namespace vm_manager {
 namespace {
@@ -51,7 +57,8 @@
   LOG(INFO) << key << "=" << value;
 }
 
-void GenerateGem5File(const CuttlefishConfig& config) {
+void GenerateGem5File(const CuttlefishConfig& config,
+                      const CuttlefishConfig::InstanceSpecific& instance) {
   // Gem5 specific config, currently users have to change these config locally (without throug launch_cvd input flag) to meet their design
   // TODO: Add these config into launch_cvd input flag or parse from one json file
   std::string cpu_class = "AtomicSimpleCPU";
@@ -66,7 +73,8 @@
   std::string mem_ranks = "None";
 
   // start generating starter_fs.py
-  std::string fs_path = config.gem5_binary_dir() + "/configs/example/arm/starter_fs.py";
+  std::string fs_path = instance.gem5_binary_dir() +
+                        "/configs/example/arm/starter_fs.py";
   std::ofstream starter_fs_ofstream(fs_path.c_str());
   starter_fs_ofstream << fs_header << "\n";
 
@@ -77,13 +85,13 @@
   starter_fs_ofstream << "def main():\n";
 
   // args
-  auto instance = config.ForDefaultInstance();
   starter_fs_ofstream << "  parser = argparse.ArgumentParser(epilog=__doc__)\n";
   starter_fs_ofstream << "  parser.add_argument(\"--disk-image\", action=\"append\", type=str, default=[])\n";
   starter_fs_ofstream << "  parser.add_argument(\"--mem-type\", default=\"" << mem_type << "\", choices=ObjectList.mem_list.get_names())\n";
   starter_fs_ofstream << "  parser.add_argument(\"--mem-channels\", type=int, default=" << mem_channels << ")\n";
   starter_fs_ofstream << "  parser.add_argument(\"--mem-ranks\", type=int, default=" << mem_ranks << ")\n";
-  starter_fs_ofstream << "  parser.add_argument(\"--mem-size\", action=\"store\", type=str, default=\"" << config.memory_mb() << "MB\")\n";
+  starter_fs_ofstream << "  parser.add_argument(\"--mem-size\", action=\"store\", type=str, default=\"" << instance.memory_mb() << "MB\")\n";
+  starter_fs_ofstream << "  parser.add_argument(\"--restore\", type=str, default=None)\n";
   starter_fs_ofstream << "  args = parser.parse_args()\n";
 
   // instantiate system
@@ -102,6 +110,7 @@
   starter_fs_ofstream << "  root.system.workload.dtb_filename = os.path.join(m5.options.outdir, 'system.dtb')\n";
   starter_fs_ofstream << "  root.system.generateDtb(root.system.workload.dtb_filename)\n";
   starter_fs_ofstream << "  root.system.workload.initrd_filename = \"" << instance.PerInstancePath("initrd.img") << "\"\n";
+  starter_fs_ofstream << "  root_dir = \"" << StringFromEnv("HOME", ".") << "\"\n";
 
   //kernel cmd
   starter_fs_ofstream << fs_kernel_cmd << "\n";
@@ -118,36 +127,73 @@
   return HostSupportsQemuCli();
 }
 
-std::vector<std::string> Gem5Manager::ConfigureGraphics(
-    const CuttlefishConfig& config) {
+Result<std::unordered_map<std::string, std::string>>
+Gem5Manager::ConfigureGraphics(
+    const CuttlefishConfig::InstanceSpecific& instance) {
   // TODO: Add support for the gem5 gpu models
 
   // Override the default HAL search paths in all cases. We do this because
   // the HAL search path allows for fallbacks, and fallbacks in conjunction
   // with properities lead to non-deterministic behavior while loading the
   // HALs.
-  return {
-      "androidboot.cpuvulkan.version=" + std::to_string(VK_API_VERSION_1_1),
-      "androidboot.hardware.gralloc=minigbm",
-      "androidboot.hardware.hwcomposer=" + config.hwcomposer(),
-      "androidboot.hardware.hwcomposer.mode=noop",
-      "androidboot.hardware.egl=angle",
-      "androidboot.hardware.vulkan=pastel",
-  };
+
+  std::unordered_map<std::string, std::string> bootconfig_args;
+
+  if (instance.gpu_mode() == kGpuModeGuestSwiftshader) {
+    LOG(INFO) << "We are in SwiftShader mode";
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", std::to_string(VK_API_VERSION_1_1)},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", "ranchu"},
+        {"androidboot.hardware.hwcomposer.mode", "noop"},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "gem5"},
+        {"androidboot.hardware.egl", "angle"},
+        {"androidboot.hardware.vulkan", "pastel"},
+        {"androidboot.opengles.version", "196609"},  // OpenGL ES 3.1
+    };
+  } else if (instance.gpu_mode() == kGpuModeGfxstream) {
+    LOG(INFO) << "We are in Gfxstream mode";
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", "0"},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", "ranchu"},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "gem5"},
+        {"androidboot.hardware.egl", "emulation"},
+        {"androidboot.hardware.vulkan", "ranchu"},
+        {"androidboot.hardware.gltransport", "virtio-gpu-pipe"},
+        {"androidboot.opengles.version", "196609"},  // OpenGL ES 3.1
+    };
+  } else if (instance.gpu_mode() == kGpuModeNone) {
+    return {};
+  } else {
+    return CF_ERR("Unknown GPU mode " << instance.gpu_mode());
+  }
+
+  if (!instance.gpu_angle_feature_overrides_enabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_enabled"] =
+        instance.gpu_angle_feature_overrides_enabled();
+  }
+  if (!instance.gpu_angle_feature_overrides_disabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_disabled"] =
+        instance.gpu_angle_feature_overrides_disabled();
+  }
+
+  return bootconfig_args;
 }
 
-std::string Gem5Manager::ConfigureBootDevices(int /*num_disks*/) {
+Result<std::unordered_map<std::string, std::string>>
+Gem5Manager::ConfigureBootDevices(int /*num_disks*/, bool /*have_gpu*/) {
   switch (arch_) {
     case Arch::Arm:
     case Arch::Arm64:
-      return "androidboot.boot_devices=30000000.pci";
+      return {{{"androidboot.boot_devices", "30000000.pci"}}};
     // TODO: Add x86 support
     default:
-      return "";
+      return CF_ERR("Unhandled arch");
   }
 }
 
-std::vector<Command> Gem5Manager::StartCommands(
+Result<std::vector<MonitorCommand>> Gem5Manager::StartCommands(
     const CuttlefishConfig& config) {
   auto instance = config.ForDefaultInstance();
 
@@ -156,32 +202,57 @@
                ? StopperResult::kStopCrash
                : StopperResult::kStopFailure;
   };
-  std::string gem5_binary = config.gem5_binary_dir();
+  std::string gem5_binary = instance.gem5_binary_dir();
   switch (arch_) {
     case Arch::Arm:
     case Arch::Arm64:
       gem5_binary += "/build/ARM/gem5.opt";
       break;
+    case Arch::RiscV64:
+      gem5_binary += "/build/RISCV/gem5.opt";
+      break;
     case Arch::X86:
     case Arch::X86_64:
       gem5_binary += "/build/X86/gem5.opt";
       break;
   }
   // generate Gem5 starter_fs.py before we execute it
-  GenerateGem5File(config);
+  GenerateGem5File(config, instance);
 
   Command gem5_cmd(gem5_binary, stop);
-  gem5_cmd.AddParameter(config.gem5_binary_dir(), "/configs/example/arm/starter_fs.py");
-  gem5_cmd.AddParameter("--mem-size=", config.memory_mb() * 1024ULL * 1024ULL);
+
+  // Always enable listeners, because auto mode will disable once it detects
+  // gem5 is not run interactively
+  gem5_cmd.AddParameter("--listener-mode=on");
+
+  // Add debug-flags and debug-file before the script (i.e. starter_fs.py).
+  // We check the flags are not empty first since they are optional
+  if(!config.gem5_debug_flags().empty()) {
+    gem5_cmd.AddParameter("--debug-flags=", config.gem5_debug_flags());
+    if(!instance.gem5_debug_file().empty()) {
+      gem5_cmd.AddParameter("--debug-file=", instance.gem5_debug_file());
+    }
+  }
+
+  gem5_cmd.AddParameter(instance.gem5_binary_dir(),
+                        "/configs/example/arm/starter_fs.py");
+
+  // restore checkpoint case
+  if (instance.gem5_checkpoint_dir() != "") {
+    gem5_cmd.AddParameter("--restore=",
+                          instance.gem5_checkpoint_dir());
+  }
+
+  gem5_cmd.AddParameter("--mem-size=", instance.memory_mb() * 1024ULL * 1024ULL);
   for (const auto& disk : instance.virtual_disk_paths()) {
     gem5_cmd.AddParameter("--disk-image=", disk);
   }
 
   LogAndSetEnv("M5_PATH", config.assembly_dir());
 
-  std::vector<Command> ret;
-  ret.push_back(std::move(gem5_cmd));
-  return ret;
+  std::vector<MonitorCommand> commands;
+  commands.emplace_back(std::move(gem5_cmd), true);
+  return commands;
 }
 
 } // namespace vm_manager
diff --git a/host/libs/vm_manager/gem5_manager.h b/host/libs/vm_manager/gem5_manager.h
index 1c19683..c491034 100644
--- a/host/libs/vm_manager/gem5_manager.h
+++ b/host/libs/vm_manager/gem5_manager.h
@@ -16,11 +16,13 @@
 #pragma once
 
 #include <string>
+#include <unordered_map>
 #include <vector>
 
-#include "host/libs/vm_manager/vm_manager.h"
-
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 namespace vm_manager {
@@ -35,11 +37,14 @@
   virtual ~Gem5Manager() = default;
 
   bool IsSupported() override;
-  std::vector<std::string> ConfigureGraphics(
-      const CuttlefishConfig& config) override;
-  std::string ConfigureBootDevices(int num_disks) override;
 
-  std::vector<cuttlefish::Command> StartCommands(
+  Result<std::unordered_map<std::string, std::string>> ConfigureGraphics(
+      const CuttlefishConfig::InstanceSpecific& instance) override;
+
+  Result<std::unordered_map<std::string, std::string>> ConfigureBootDevices(
+      int num_disks, bool have_gpu) override;
+
+  Result<std::vector<MonitorCommand>> StartCommands(
       const CuttlefishConfig& config) override;
 
  private:
@@ -49,10 +54,12 @@
 const std::string fs_header = R"CPP_STR_END(import argparse
 import devices
 import os
+import shutil
 import m5
 from m5.util import addToPath
 from m5.objects import *
 from m5.options import *
+from m5.objects.Ethernet import NSGigE, IGbE_igb, IGbE_e1000, EtherTap
 from common import SysPaths
 from common import ObjectList
 from common import MemConfig
@@ -71,16 +78,23 @@
   pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=4, outfile="none"))))
   pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=5, outfile="none"))))
   pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=6, outfile="none"))))
+  pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=7, outfile="none"))))
+  pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=8, outfile="none"))))
+  pci_devices.append(PciVirtIO(vio=VirtIOConsole(device=Terminal(number=9, outfile="none"))))
 
   for each_item in args.disk_image:
     disk_image = CowDiskImage()
     disk_image.child.image_file = SysPaths.disk(each_item)
     pci_devices.append(PciVirtIO(vio=VirtIOBlock(image=disk_image)))
 
+  nic = IGbE_e1000(pci_bus=0, pci_dev=0, pci_func=0, InterruptLine=1, InterruptPin=1)
+  pci_devices.append(nic)
   root.system.pci_devices = pci_devices
   for pci_device in root.system.pci_devices:
     root.system.attach_pci(pci_device)
 
+  root.tap = EtherTap(tun_clone_device='/dev/net/tun', tap_device_name='cvd-mtap-01')
+  root.tap.tap = nic.interface
   root.system.connect()
 )CPP_STR_END";
 
@@ -101,8 +115,30 @@
     "androidboot.force_normal_boot=1",
   ]
   root.system.workload.command_line = " ".join(kernel_cmd)
-  m5.instantiate()
-  sys.exit(m5.simulate().getCode())
+  if args.restore is not None:
+    m5.instantiate(args.restore)
+  else:
+    m5.instantiate()
+
+  while True:
+    event = m5.simulate()
+    msg = event.getCause()
+    cur_tick = m5.curTick()
+    if msg == "checkpoint":
+      backup_path = backup_path = os.path.join(root_dir, "gem5_checkpoint")
+      if not os.path.isdir(backup_path):
+        os.mkdir(backup_path)
+
+      print("Checkpoint @", cur_tick)
+      src_dir = os.path.join(m5.options.outdir, "cpt.%d" % cur_tick)
+      backup_path = os.path.join(backup_path, "cpt.%d" % cur_tick)
+      m5.checkpoint(src_dir)
+      shutil.copytree(src_dir, backup_path)
+      print("Checkpoint done.")
+    else:
+      print("Exit msg: " + msg + " @", cur_tick)
+      break
+  sys.exit(event.getCode())
 )CPP_STR_END";
 
 const std::string fs_exe_main = R"CPP_STR_END(
diff --git a/host/libs/vm_manager/qemu_manager.cpp b/host/libs/vm_manager/qemu_manager.cpp
index adedf04..27a3ddf 100644
--- a/host/libs/vm_manager/qemu_manager.cpp
+++ b/host/libs/vm_manager/qemu_manager.cpp
@@ -28,6 +28,8 @@
 #include <sstream>
 #include <string>
 #include <thread>
+#include <unordered_map>
+#include <utility>
 #include <vector>
 
 #include <android-base/strings.h>
@@ -36,8 +38,10 @@
 
 #include "common/libs/fs/shared_select.h"
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
 #include "common/libs/utils/subprocess.h"
 #include "common/libs/utils/users.h"
+#include "host/libs/config/command_source.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/config/known_paths.h"
 
@@ -46,8 +50,8 @@
 namespace {
 
 std::string GetMonitorPath(const CuttlefishConfig& config) {
-  return config.ForDefaultInstance()
-      .PerInstanceInternalPath("qemu_monitor.sock");
+  return config.ForDefaultInstance().PerInstanceInternalUdsPath(
+      "qemu_monitor.sock");
 }
 
 void LogAndSetEnv(const char* key, const std::string& value) {
@@ -85,8 +89,7 @@
   return true;
 }
 
-std::pair<int,int> GetQemuVersion(const std::string& qemu_binary)
-{
+Result<std::pair<int, int>> GetQemuVersion(const std::string& qemu_binary) {
   Command qemu_version_cmd(qemu_binary);
   qemu_version_cmd.AddParameter("-version");
 
@@ -98,11 +101,10 @@
                                       &qemu_version_input,
                                       &qemu_version_output,
                                       &qemu_version_error, options);
-  if (qemu_version_ret != 0) {
-    LOG(FATAL) << qemu_binary << " -version returned unexpected response "
-               << qemu_version_output << ". Stderr was " << qemu_version_error;
-    return { 0, 0 };
-  }
+  CF_EXPECT(qemu_version_ret == 0,
+            qemu_binary << " -version returned unexpected response "
+                        << qemu_version_output << ". Stderr was "
+                        << qemu_version_error);
 
   // Snip around the extra text we don't care about
   qemu_version_output.erase(0, std::string("QEMU emulator version ").length());
@@ -112,7 +114,7 @@
   }
 
   auto qemu_version_bits = android::base::Split(qemu_version_output, ".");
-  return { std::stoi(qemu_version_bits[0]), std::stoi(qemu_version_bits[1]) };
+  return {{std::stoi(qemu_version_bits[0]), std::stoi(qemu_version_bits[1])}};
 }
 
 }  // namespace
@@ -123,50 +125,92 @@
   return HostSupportsQemuCli();
 }
 
-std::vector<std::string> QemuManager::ConfigureGraphics(
-    const CuttlefishConfig& config) {
-  if (config.gpu_mode() == kGpuModeGuestSwiftshader) {
-    // Override the default HAL search paths in all cases. We do this because
-    // the HAL search path allows for fallbacks, and fallbacks in conjunction
-    // with properities lead to non-deterministic behavior while loading the
-    // HALs.
-    return {
-        "androidboot.cpuvulkan.version=" + std::to_string(VK_API_VERSION_1_1),
-        "androidboot.hardware.gralloc=minigbm",
-        "androidboot.hardware.hwcomposer=" + config.hwcomposer(),
-        "androidboot.hardware.egl=angle",
-        "androidboot.hardware.vulkan=pastel",
+Result<std::unordered_map<std::string, std::string>>
+QemuManager::ConfigureGraphics(
+    const CuttlefishConfig::InstanceSpecific& instance) {
+  // Override the default HAL search paths in all cases. We do this because
+  // the HAL search path allows for fallbacks, and fallbacks in conjunction
+  // with properities lead to non-deterministic behavior while loading the
+  // HALs.
+
+  std::unordered_map<std::string, std::string> bootconfig_args;
+
+  if (instance.gpu_mode() == kGpuModeGuestSwiftshader) {
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", std::to_string(VK_API_VERSION_1_2)},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", instance.hwcomposer()},
+        {"androidboot.hardware.egl", "angle"},
+        {"androidboot.hardware.vulkan", "pastel"},
+        // OpenGL ES 3.1
+        {"androidboot.opengles.version", "196609"},
     };
+  } else if (instance.gpu_mode() == kGpuModeDrmVirgl) {
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", "0"},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", "ranchu"},
+        {"androidboot.hardware.hwcomposer.mode", "client"},
+        {"androidboot.hardware.egl", "mesa"},
+        // No "hardware" Vulkan support, yet
+        // OpenGL ES 3.0
+        {"androidboot.opengles.version", "196608"},
+    };
+  } else if (instance.gpu_mode() == kGpuModeGfxstream ||
+             instance.gpu_mode() == kGpuModeGfxstreamGuestAngle) {
+    const bool uses_angle = instance.gpu_mode() == kGpuModeGfxstreamGuestAngle;
+    const std::string gles_impl = uses_angle ? "angle" : "emulation";
+    const std::string gltransport =
+        (instance.guest_android_version() == "11.0.0") ? "virtio-gpu-pipe"
+                                                       : "virtio-gpu-asg";
+    bootconfig_args = {
+        {"androidboot.cpuvulkan.version", "0"},
+        {"androidboot.hardware.gralloc", "minigbm"},
+        {"androidboot.hardware.hwcomposer", instance.hwcomposer()},
+        {"androidboot.hardware.hwcomposer.display_finder_mode", "drm"},
+        {"androidboot.hardware.egl", gles_impl},
+        {"androidboot.hardware.vulkan", "ranchu"},
+        {"androidboot.hardware.gltransport", gltransport},
+        {"androidboot.opengles.version", "196609"},  // OpenGL ES 3.1
+    };
+  } else if (instance.gpu_mode() == kGpuModeNone) {
+    return {};
+  } else {
+    return CF_ERR("Unhandled GPU mode: " << instance.gpu_mode());
   }
 
-  if (config.gpu_mode() == kGpuModeDrmVirgl) {
-    return {
-      "androidboot.cpuvulkan.version=0",
-      "androidboot.hardware.gralloc=minigbm",
-      "androidboot.hardware.hwcomposer=drm",
-      "androidboot.hardware.egl=mesa",
-    };
+  if (!instance.gpu_angle_feature_overrides_enabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_enabled"] =
+        instance.gpu_angle_feature_overrides_enabled();
+  }
+  if (!instance.gpu_angle_feature_overrides_disabled().empty()) {
+    bootconfig_args["androidboot.hardware.angle_feature_overrides_disabled"] =
+        instance.gpu_angle_feature_overrides_disabled();
   }
 
-  return {};
+  return bootconfig_args;
 }
 
-std::string QemuManager::ConfigureBootDevices(int num_disks) {
+Result<std::unordered_map<std::string, std::string>>
+QemuManager::ConfigureBootDevices(int num_disks, bool have_gpu) {
   switch (arch_) {
+    case Arch::Arm:
+      return {{{"androidboot.boot_devices", "3f000000.pcie"}}};
+    case Arch::Arm64:
+      return {{{"androidboot.boot_devices", "4010000000.pcie"}}};
+    case Arch::RiscV64:
+      return {{{"androidboot.boot_devices", "soc/30000000.pci"}}};
     case Arch::X86:
     case Arch::X86_64: {
       // QEMU has additional PCI devices for an ISA bridge and PIIX4
       // virtio_gpu precedes the first console or disk
-      return ConfigureMultipleBootDevices("pci0000:00/0000:00:", 3, num_disks);
+      return ConfigureMultipleBootDevices("pci0000:00/0000:00:",
+                                          2 + (have_gpu ? 1 : 0), num_disks);
     }
-    case Arch::Arm:
-      return "androidboot.boot_devices=3f000000.pcie";
-    case Arch::Arm64:
-      return "androidboot.boot_devices=4010000000.pcie";
   }
 }
 
-std::vector<Command> QemuManager::StartCommands(
+Result<std::vector<MonitorCommand>> QemuManager::StartCommands(
     const CuttlefishConfig& config) {
   auto instance = config.ForDefaultInstance();
 
@@ -181,7 +225,7 @@
                ? StopperResult::kStopCrash
                : StopperResult::kStopFailure;
   };
-  std::string qemu_binary = config.qemu_binary_dir();
+  std::string qemu_binary = instance.qemu_binary_dir();
   switch (arch_) {
     case Arch::Arm:
       qemu_binary += "/qemu-system-arm";
@@ -189,6 +233,9 @@
     case Arch::Arm64:
       qemu_binary += "/qemu-system-aarch64";
       break;
+    case Arch::RiscV64:
+      qemu_binary += "/qemu-system-riscv64";
+      break;
     case Arch::X86:
       qemu_binary += "/qemu-system-i386";
       break;
@@ -197,7 +244,7 @@
       break;
   }
 
-  auto qemu_version = GetQemuVersion(qemu_binary);
+  auto qemu_version = CF_EXPECT(GetQemuVersion(qemu_binary));
   Command qemu_cmd(qemu_binary, stop);
 
   int hvc_num = 0;
@@ -265,37 +312,42 @@
   };
 
   bool is_arm = arch_ == Arch::Arm || arch_ == Arch::Arm64;
-  bool is_arm64 = arch_ == Arch::Arm64;
+  bool is_x86 = arch_ == Arch::X86 || arch_ == Arch::X86_64;
+  bool is_riscv64 = arch_ == Arch::RiscV64;
 
   auto access_kregistry_size_bytes = 0;
   if (FileExists(instance.access_kregistry_path())) {
     access_kregistry_size_bytes = FileSize(instance.access_kregistry_path());
-    CHECK((access_kregistry_size_bytes & (1024 * 1024 - 1)) == 0)
-        << instance.access_kregistry_path() <<  " file size ("
-        << access_kregistry_size_bytes << ") not a multiple of 1MB";
+    CF_EXPECT((access_kregistry_size_bytes & (1024 * 1024 - 1)) == 0,
+              instance.access_kregistry_path()
+                  << " file size (" << access_kregistry_size_bytes
+                  << ") not a multiple of 1MB");
   }
 
   auto hwcomposer_pmem_size_bytes = 0;
-  if (FileExists(instance.hwcomposer_pmem_path())) {
-    hwcomposer_pmem_size_bytes = FileSize(instance.hwcomposer_pmem_path());
-    CHECK((hwcomposer_pmem_size_bytes & (1024 * 1024 - 1)) == 0)
-        << instance.hwcomposer_pmem_path() << " file size ("
-        << hwcomposer_pmem_size_bytes << ") not a multiple of 1MB";
+  if (instance.hwcomposer() != kHwComposerNone) {
+    if (FileExists(instance.hwcomposer_pmem_path())) {
+      hwcomposer_pmem_size_bytes = FileSize(instance.hwcomposer_pmem_path());
+      CF_EXPECT((hwcomposer_pmem_size_bytes & (1024 * 1024 - 1)) == 0,
+                instance.hwcomposer_pmem_path()
+                    << " file size (" << hwcomposer_pmem_size_bytes
+                    << ") not a multiple of 1MB");
+    }
   }
 
   auto pstore_size_bytes = 0;
   if (FileExists(instance.pstore_path())) {
     pstore_size_bytes = FileSize(instance.pstore_path());
-    CHECK((pstore_size_bytes & (1024 * 1024 - 1)) == 0)
-        << instance.pstore_path() <<  " file size ("
-        << pstore_size_bytes << ") not a multiple of 1MB";
+    CF_EXPECT((pstore_size_bytes & (1024 * 1024 - 1)) == 0,
+              instance.pstore_path() << " file size (" << pstore_size_bytes
+                                     << ") not a multiple of 1MB");
   }
 
   qemu_cmd.AddParameter("-name");
   qemu_cmd.AddParameter("guest=", instance.instance_name(), ",debug-threads=on");
 
   qemu_cmd.AddParameter("-machine");
-  std::string machine = is_arm ? "virt" : "pc-i440fx-2.8,nvdimm=on";
+  std::string machine = is_x86 ? "pc,nvdimm=on" : "virt";
   if (IsHostCompatible(arch_)) {
     machine += ",accel=kvm";
     if (is_arm) {
@@ -304,22 +356,20 @@
   } else if (is_arm) {
     // QEMU doesn't support GICv3 with TCG yet
     machine += ",gic-version=2";
-    if (is_arm64) {
-      // Only enable MTE in TCG mode. We haven't started to run on ARMv8/ARMv9
-      // devices with KVM and MTE, so MTE will always require TCG
-      machine += ",mte=on";
-    }
-    CHECK(config.cpus() <= 8) << "CPUs must be no more than 8 with GICv2";
+    CF_EXPECT(instance.cpus() <= 8, "CPUs must be no more than 8 with GICv2");
+  }
+  if (instance.mte()) {
+    machine += ",mte=on";
   }
   qemu_cmd.AddParameter(machine, ",usb=off,dump-guest-core=off");
 
   qemu_cmd.AddParameter("-m");
-  auto maxmem = config.memory_mb() +
+  auto maxmem = instance.memory_mb() +
                 (access_kregistry_size_bytes / 1024 / 1024) +
                 (hwcomposer_pmem_size_bytes / 1024 / 1024) +
-                (is_arm ? 0 : pstore_size_bytes / 1024 / 1024);
-  auto slots = is_arm ? "" : ",slots=2";
-  qemu_cmd.AddParameter("size=", config.memory_mb(), "M",
+                (is_x86 ? pstore_size_bytes / 1024 / 1024 : 0);
+  auto slots = is_x86 ? ",slots=2" : "";
+  qemu_cmd.AddParameter("size=", instance.memory_mb(), "M",
                         ",maxmem=", maxmem, "M", slots);
 
   qemu_cmd.AddParameter("-overcommit");
@@ -328,14 +378,14 @@
   // Assume SMT is always 2 threads per core, which is how most hardware
   // today is configured, and the way crosvm does it
   qemu_cmd.AddParameter("-smp");
-  if (config.smt()) {
-    CHECK(config.cpus() % 2 == 0)
-        << "CPUs must be a multiple of 2 in SMT mode";
-    qemu_cmd.AddParameter(config.cpus(), ",cores=",
-                          config.cpus() / 2, ",threads=2");
+  if (instance.smt()) {
+    CF_EXPECT(instance.cpus() % 2 == 0,
+              "CPUs must be a multiple of 2 in SMT mode");
+    qemu_cmd.AddParameter(instance.cpus(), ",cores=",
+                          instance.cpus() / 2, ",threads=2");
   } else {
-    qemu_cmd.AddParameter(config.cpus(), ",cores=",
-                          config.cpus(), ",threads=1");
+    qemu_cmd.AddParameter(instance.cpus(), ",cores=",
+                          instance.cpus(), ",threads=1");
   }
 
   qemu_cmd.AddParameter("-uuid");
@@ -358,45 +408,69 @@
   qemu_cmd.AddParameter("-mon");
   qemu_cmd.AddParameter("chardev=charmonitor,id=monitor,mode=control");
 
-  if (config.gpu_mode() == kGpuModeDrmVirgl) {
+  if (instance.gpu_mode() == kGpuModeDrmVirgl) {
     qemu_cmd.AddParameter("-display");
     qemu_cmd.AddParameter("egl-headless");
 
     qemu_cmd.AddParameter("-vnc");
-    qemu_cmd.AddParameter(":", instance.qemu_vnc_server_port());
+    qemu_cmd.AddParameter("127.0.0.1:", instance.qemu_vnc_server_port());
+  } else if (instance.gpu_mode() == kGpuModeGfxstream ||
+             instance.gpu_mode() == kGpuModeGfxstreamGuestAngle) {
+    qemu_cmd.AddParameter("-vnc");
+    qemu_cmd.AddParameter("127.0.0.1:", instance.qemu_vnc_server_port());
   } else {
     qemu_cmd.AddParameter("-display");
     qemu_cmd.AddParameter("none");
   }
 
-  auto display_configs = config.display_configs();
-  CHECK_GE(display_configs.size(), 1);
-  auto display_config = display_configs[0];
+  if (instance.hwcomposer() != kHwComposerNone) {
+    auto display_configs = instance.display_configs();
+    CF_EXPECT(display_configs.size() >= 1);
+    auto display_config = display_configs[0];
 
-  qemu_cmd.AddParameter("-device");
+    qemu_cmd.AddParameter("-device");
 
-  bool use_gpu_gl = qemu_version.first >= 6 &&
-                    config.gpu_mode() != kGpuModeGuestSwiftshader;
-  qemu_cmd.AddParameter(use_gpu_gl ?
-                            "virtio-gpu-gl-pci" : "virtio-gpu-pci", ",id=gpu0",
-                        ",xres=", display_config.width,
-                        ",yres=", display_config.height);
+    std::string gpu_device;
+    if (instance.gpu_mode() == kGpuModeGuestSwiftshader ||
+        qemu_version.first < 6) {
+        gpu_device = "virtio-gpu-pci";
+    } else if (instance.gpu_mode() == kGpuModeDrmVirgl) {
+        gpu_device = "virtio-gpu-gl-pci";
+    } else if (instance.gpu_mode() == kGpuModeGfxstream ||
+               instance.gpu_mode() == kGpuModeGfxstreamGuestAngle) {
+        gpu_device = "virtio-gpu-gl-pci,capset_names=gfxstream,hostmem=256M";
+    }
 
-  // In kgdb mode, earlycon is an interactive console, and so early
-  // dmesg will go there instead of the kernel.log. On QEMU, we do this
-  // bit of logic up before the hvc console is set up, so the command line
-  // flags appear in the right order and "append=on" does the right thing
-  if (!config.console() && (config.kgdb() || config.use_bootloader())) {
-    add_serial_console_ro(instance.kernel_log_pipe_name());
+    qemu_cmd.AddParameter(gpu_device, ",id=gpu0",
+                          ",xres=", display_config.width,
+                          ",yres=", display_config.height);
   }
 
-  // Use a virtio-console instance for the main kernel console. All
-  // messages will switch from earlycon to virtio-console after the driver
-  // is loaded, and QEMU will append to the kernel log automatically
+  if (!instance.console()) {
+    // In kgdb mode, earlycon is an interactive console, and so early
+    // dmesg will go there instead of the kernel.log. On QEMU, we do this
+    // bit of logic up before the hvc console is set up, so the command line
+    // flags appear in the right order and "append=on" does the right thing
+    if (instance.enable_kernel_log() &&
+        (instance.kgdb() || instance.use_bootloader())) {
+      add_serial_console_ro(instance.kernel_log_pipe_name());
+    }
+  }
+
+  // If kernel log is enabled, the virtio-console port will be specified as
+  // a true console for Linux, and kernel messages will be printed there.
+  // Otherwise, the port will still be set up for bootloader and userspace
+  // messages, but the kernel will not print anything here. This keeps our
+  // kernel log event features working. If an alternative "earlycon" boot
+  // console is configured above on a legacy serial port, it will control
+  // the main log until the virtio-console takes over.
+  // (Note that QEMU does not automatically generate console= parameters for
+  //  the bootloader/kernel cmdline, so the control of whether this pipe is
+  //  actually managed by the kernel as a console is handled elsewhere.)
   add_hvc_ro(instance.kernel_log_pipe_name());
 
-  if (config.console()) {
-    if (config.kgdb() || config.use_bootloader()) {
+  if (instance.console()) {
+    if (instance.kgdb() || instance.use_bootloader()) {
       add_serial_console(instance.console_pipe_prefix());
 
       // In kgdb mode, we have the interactive console on ttyS0 (both Android's
@@ -410,7 +484,7 @@
       add_hvc(instance.console_pipe_prefix());
     }
   } else {
-    if (config.kgdb() || config.use_bootloader()) {
+    if (instance.kgdb() || instance.use_bootloader()) {
       // The add_serial_console_ro() call above was applied by the time we reach
       // this code, so we don't need another add_serial_*() call
     }
@@ -432,7 +506,7 @@
     add_hvc_sink();
   }
 
-  if (config.enable_gnss_grpc_proxy()) {
+  if (instance.enable_gnss_grpc_proxy()) {
     add_hvc(instance.PerInstanceInternalPath("gnsshvc_fifo_vm"));
     add_hvc(instance.PerInstanceInternalPath("locationhvc_fifo_vm"));
   } else {
@@ -441,21 +515,42 @@
     }
   }
 
+  /* Added one for confirmation UI.
+   *
+   * b/237452165
+   *
+   * Confirmation UI is not supported with QEMU for now. In order
+   * to not conflict with confirmation UI-related configurations used
+   * w/ Crosvm, we should add one generic avc.
+   *
+   * confui_fifo_vm.{in/out} are created along with the streamer process,
+   * which is not created w/ QEMU.
+   */
+  add_hvc_sink();
+
+  if (config.enable_host_uwb()) {
+    add_hvc("uwb_fifo_vm");
+  } else {
+    add_hvc_sink();
+  }
+
   auto disk_num = instance.virtual_disk_paths().size();
 
   for (auto i = 0; i < VmManager::kMaxDisks - disk_num; i++) {
     add_hvc_sink();
   }
 
-  CHECK(hvc_num + disk_num == VmManager::kMaxDisks + VmManager::kDefaultNumHvcs)
-      << "HVC count (" << hvc_num << ") + disk count (" << disk_num << ") "
-      << "is not the expected total of "
-      << VmManager::kMaxDisks + VmManager::kDefaultNumHvcs << " devices";
+  CF_EXPECT(
+      hvc_num + disk_num == VmManager::kMaxDisks + VmManager::kDefaultNumHvcs,
+      "HVC count (" << hvc_num << ") + disk count (" << disk_num << ") "
+                    << "is not the expected total of "
+                    << VmManager::kMaxDisks + VmManager::kDefaultNumHvcs
+                    << " devices");
 
-  CHECK_GE(VmManager::kMaxDisks, disk_num)
-      << "Provided too many disks (" << disk_num << "), maximum "
-      << VmManager::kMaxDisks << "supported";
-  auto readonly = config.protected_vm() ? ",readonly" : "";
+  CF_EXPECT(VmManager::kMaxDisks >= disk_num,
+            "Provided too many disks (" << disk_num << "), maximum "
+                                        << VmManager::kMaxDisks << "supported");
+  auto readonly = instance.protected_vm() ? ",readonly" : "";
   for (size_t i = 0; i < disk_num; i++) {
     auto bootindex = i == 0 ? ",bootindex=1" : "";
     auto format = i == 0 ? "" : ",format=raw";
@@ -468,8 +563,8 @@
                           ",id=virtio-disk", i, bootindex);
   }
 
-  if (!is_arm && FileExists(instance.pstore_path())) {
-    // QEMU will assign the NVDIMM (ramoops pstore region) 100000000-1001fffff
+  if (is_x86 && FileExists(instance.pstore_path())) {
+    // QEMU will assign the NVDIMM (ramoops pstore region) 150000000-1501fffff
     // As we will pass this to ramoops, define this region first so it is always
     // located at this address. This is currently x86 only.
     qemu_cmd.AddParameter("-object");
@@ -480,9 +575,9 @@
     qemu_cmd.AddParameter("nvdimm,memdev=objpmem0,id=ramoops");
   }
 
-  // QEMU does not implement virtio-pmem-pci for ARM64 yet; restore this
-  // when the device has been added
-  if (!is_arm) {
+  // QEMU does not implement virtio-pmem-pci for ARM64 or RISC-V yet; restore
+  // this when the device has been added
+  if (is_x86) {
     if (access_kregistry_size_bytes > 0) {
       qemu_cmd.AddParameter("-object");
       qemu_cmd.AddParameter(
@@ -524,34 +619,41 @@
   qemu_cmd.AddParameter("-device");
   qemu_cmd.AddParameter("virtio-keyboard-pci,disable-legacy=on");
 
-  auto vhost_net = config.vhost_net() ? ",vhost=on" : "";
+  auto vhost_net = instance.vhost_net() ? ",vhost=on" : "";
 
   qemu_cmd.AddParameter("-device");
   qemu_cmd.AddParameter("virtio-balloon-pci-non-transitional,id=balloon0");
 
+  // The ordering of tap devices is important. Make sure any change here
+  // is reflected in ethprime u-boot variable
   qemu_cmd.AddParameter("-netdev");
   qemu_cmd.AddParameter("tap,id=hostnet0,ifname=", instance.mobile_tap_name(),
                         ",script=no,downscript=no", vhost_net);
 
   qemu_cmd.AddParameter("-device");
-  qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet0,id=net0");
+  qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet0,id=net0,mac=",
+                        instance.mobile_mac());
 
   qemu_cmd.AddParameter("-netdev");
   qemu_cmd.AddParameter("tap,id=hostnet1,ifname=", instance.ethernet_tap_name(),
                         ",script=no,downscript=no", vhost_net);
 
   qemu_cmd.AddParameter("-device");
-  qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet1,id=net1");
-#ifndef ENFORCE_MAC80211_HWSIM
-  qemu_cmd.AddParameter("-netdev");
-  qemu_cmd.AddParameter("tap,id=hostnet2,ifname=", instance.wifi_tap_name(),
-                        ",script=no,downscript=no", vhost_net);
-  qemu_cmd.AddParameter("-device");
-  qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet2,id=net2");
-#endif
+  qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet1,id=net1,mac=",
+                        instance.ethernet_mac());
+  if (!config.virtio_mac80211_hwsim()) {
+    qemu_cmd.AddParameter("-netdev");
+    qemu_cmd.AddParameter("tap,id=hostnet2,ifname=", instance.wifi_tap_name(),
+                          ",script=no,downscript=no", vhost_net);
+    qemu_cmd.AddParameter("-device");
+    qemu_cmd.AddParameter("virtio-net-pci-non-transitional,netdev=hostnet2,id=net2,mac=",
+                          instance.wifi_mac());
+  }
 
-  qemu_cmd.AddParameter("-cpu");
-  qemu_cmd.AddParameter(IsHostCompatible(arch_) ? "host" : "max");
+  if (is_x86 || is_arm) {
+    qemu_cmd.AddParameter("-cpu");
+    qemu_cmd.AddParameter(IsHostCompatible(arch_) ? "host" : "max");
+  }
 
   qemu_cmd.AddParameter("-msg");
   qemu_cmd.AddParameter("timestamp=on");
@@ -566,22 +668,25 @@
   qemu_cmd.AddParameter("-device");
   qemu_cmd.AddParameter("qemu-xhci,id=xhci");
 
-  qemu_cmd.AddParameter("-bios");
-  qemu_cmd.AddParameter(config.bootloader());
+  if (is_riscv64) {
+    qemu_cmd.AddParameter("-kernel");
+  } else {
+    qemu_cmd.AddParameter("-bios");
+  }
+  qemu_cmd.AddParameter(instance.bootloader());
 
-  if (config.gdb_port() > 0) {
+  if (instance.gdb_port() > 0) {
     qemu_cmd.AddParameter("-S");
     qemu_cmd.AddParameter("-gdb");
-    qemu_cmd.AddParameter("tcp::", config.gdb_port());
+    qemu_cmd.AddParameter("tcp::", instance.gdb_port());
   }
 
   LogAndSetEnv("QEMU_AUDIO_DRV", "none");
 
-  std::vector<Command> ret;
-  ret.push_back(std::move(qemu_cmd));
-  return ret;
+  std::vector<MonitorCommand> commands;
+  commands.emplace_back(std::move(qemu_cmd), true);
+  return commands;
 }
 
 } // namespace vm_manager
-} // namespace cuttlefish
-
+}  // namespace cuttlefish
diff --git a/host/libs/vm_manager/qemu_manager.h b/host/libs/vm_manager/qemu_manager.h
index f213ca2..262bb0d 100644
--- a/host/libs/vm_manager/qemu_manager.h
+++ b/host/libs/vm_manager/qemu_manager.h
@@ -16,11 +16,13 @@
 #pragma once
 
 #include <string>
+#include <unordered_map>
 #include <vector>
 
-#include "host/libs/vm_manager/vm_manager.h"
-
 #include "common/libs/fs/shared_fd.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/vm_manager/vm_manager.h"
 
 namespace cuttlefish {
 namespace vm_manager {
@@ -35,11 +37,14 @@
   virtual ~QemuManager() = default;
 
   bool IsSupported() override;
-  std::vector<std::string> ConfigureGraphics(
-      const CuttlefishConfig& config) override;
-  std::string ConfigureBootDevices(int num_disks) override;
 
-  std::vector<cuttlefish::Command> StartCommands(
+  Result<std::unordered_map<std::string, std::string>> ConfigureGraphics(
+      const CuttlefishConfig::InstanceSpecific& instance) override;
+
+  Result<std::unordered_map<std::string, std::string>> ConfigureBootDevices(
+      int num_disks, bool have_gpu) override;
+
+  Result<std::vector<MonitorCommand>> StartCommands(
       const CuttlefishConfig& config) override;
 
  private:
@@ -48,4 +53,3 @@
 
 } // namespace vm_manager
 } // namespace cuttlefish
-
diff --git a/host/libs/vm_manager/vm_manager.cpp b/host/libs/vm_manager/vm_manager.cpp
index 2726f0c..a74a051 100644
--- a/host/libs/vm_manager/vm_manager.cpp
+++ b/host/libs/vm_manager/vm_manager.cpp
@@ -16,12 +16,18 @@
 
 #include "host/libs/vm_manager/vm_manager.h"
 
+#include <iomanip>
+#include <memory>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
 #include <android-base/logging.h>
 #include <fruit/fruit.h>
 
-#include <iomanip>
-#include <memory>
-
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
 #include "host/libs/config/cuttlefish_config.h"
 #include "host/libs/vm_manager/crosvm_manager.h"
 #include "host/libs/vm_manager/gem5_manager.h"
@@ -50,30 +56,58 @@
   return vmm;
 }
 
-std::string ConfigureMultipleBootDevices(const std::string& pci_path,
-                                         int pci_offset, int num_disks) {
+Result<std::unordered_map<std::string, std::string>>
+ConfigureMultipleBootDevices(const std::string& pci_path, int pci_offset,
+                             int num_disks) {
   int num_boot_devices =
       (num_disks < VmManager::kDefaultNumBootDevices) ? num_disks : VmManager::kDefaultNumBootDevices;
-  std::string boot_devices_prop = "androidboot.boot_devices=";
+  std::string boot_devices_prop_val = "";
   for (int i = 0; i < num_boot_devices; i++) {
     std::stringstream stream;
     stream << std::setfill('0') << std::setw(2) << std::hex
            << pci_offset + i + VmManager::kDefaultNumHvcs + VmManager::kMaxDisks - num_disks;
-    boot_devices_prop += pci_path + stream.str() + ".0,";
+    boot_devices_prop_val += pci_path + stream.str() + ".0,";
   }
-  boot_devices_prop.pop_back();
-  return {boot_devices_prop};
+  boot_devices_prop_val.pop_back();
+  return {{{"androidboot.boot_devices", boot_devices_prop_val}}};
 }
 
-fruit::Component<fruit::Required<const CuttlefishConfig>, VmManager>
+class VmmCommands : public CommandSource {
+ public:
+  INJECT(VmmCommands(const CuttlefishConfig& config, VmManager& vmm))
+      : config_(config), vmm_(vmm) {}
+
+  // CommandSource
+  Result<std::vector<MonitorCommand>> Commands() override {
+    return vmm_.StartCommands(config_);
+  }
+
+  // SetupFeature
+  std::string Name() const override { return "VirtualMachineManager"; }
+  bool Enabled() const override { return true; }
+
+ private:
+  std::unordered_set<SetupFeature*> Dependencies() const override { return {}; }
+  bool Setup() override { return true; }
+
+  const CuttlefishConfig& config_;
+  VmManager& vmm_;
+};
+
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
+                 VmManager>
 VmManagerComponent() {
-  return fruit::createComponent().registerProvider(
-      [](const CuttlefishConfig& config) {
-        auto vmm = GetVmManager(config.vm_manager(), config.target_arch());
+  return fruit::createComponent()
+      .registerProvider([](const CuttlefishConfig& config,
+                           const CuttlefishConfig::InstanceSpecific& instance) {
+        auto vmm = GetVmManager(config.vm_manager(), instance.target_arch());
         CHECK(vmm) << "Invalid VMM/Arch: \"" << config.vm_manager() << "\""
-                   << (int)config.target_arch() << "\"";
+                   << (int)instance.target_arch() << "\"";
         return vmm.release();  // fruit takes ownership of raw pointers
-      });
+      })
+      .addMultibinding<CommandSource, VmmCommands>()
+      .addMultibinding<SetupFeature, VmmCommands>();
 }
 
 } // namespace vm_manager
diff --git a/host/libs/vm_manager/vm_manager.h b/host/libs/vm_manager/vm_manager.h
index 7a81a97..739e0db 100644
--- a/host/libs/vm_manager/vm_manager.h
+++ b/host/libs/vm_manager/vm_manager.h
@@ -14,13 +14,18 @@
  * limitations under the License.
  */
 #pragma once
-#include <common/libs/utils/subprocess.h>
-#include <fruit/fruit.h>
-#include <host/libs/config/cuttlefish_config.h>
 
+#include <memory>
 #include <string>
+#include <unordered_map>
 #include <vector>
 
+#include <fruit/fruit.h>
+
+#include "common/libs/utils/result.h"
+#include "host/libs/config/command_source.h"
+#include "host/libs/config/cuttlefish_config.h"
+
 namespace cuttlefish {
 namespace vm_manager {
 
@@ -37,7 +42,7 @@
   // need to consume host resources, except for the PCI ID. Use this trick to
   // keep the number of PCI IDs assigned constant for all flags/vm manager
   // combinations
-  static const int kDefaultNumHvcs = 8;
+  static const int kDefaultNumHvcs = 10;
 
   // This is the number of virtual disks (block devices) that should be
   // configured by the VmManager. Related to the description above regarding
@@ -46,7 +51,7 @@
   // HVC virtual console ports, block devices cannot be configured to be sinks,
   // so we once again leverage HVC virtual console ports to "bump up" the last
   // assigned virtual disk PCI ID (i.e. 2 disks = 7 hvcs, 1 disks = 8 hvcs)
-  static const int kMaxDisks = 3;
+  static constexpr int kMaxDisks = 3;
 
   // This is the number of virtual disks that contribute to the named partition
   // list (/dev/block/by-name/*) under Android. The partitions names from
@@ -58,26 +63,31 @@
   virtual ~VmManager() = default;
 
   virtual bool IsSupported() = 0;
-  virtual std::vector<std::string> ConfigureGraphics(
-      const CuttlefishConfig& config) = 0;
-  virtual std::string ConfigureBootDevices(int num_disks) = 0;
+
+  virtual Result<std::unordered_map<std::string, std::string>>
+  ConfigureGraphics(const CuttlefishConfig::InstanceSpecific& instance) = 0;
+
+  virtual Result<std::unordered_map<std::string, std::string>>
+  ConfigureBootDevices(int num_disks, bool have_gpu) = 0;
 
   // Starts the VMM. It will usually build a command and pass it to the
   // command_starter function, although it may start more than one. The
   // command_starter function allows to customize the way vmm commands are
   // started/tracked/etc.
-  virtual std::vector<cuttlefish::Command> StartCommands(
+  virtual Result<std::vector<MonitorCommand>> StartCommands(
       const CuttlefishConfig& config) = 0;
 };
 
-fruit::Component<fruit::Required<const CuttlefishConfig>, VmManager>
+fruit::Component<fruit::Required<const CuttlefishConfig,
+                                 const CuttlefishConfig::InstanceSpecific>,
+                 VmManager>
 VmManagerComponent();
 
 std::unique_ptr<VmManager> GetVmManager(const std::string&, Arch arch);
 
-std::string ConfigureMultipleBootDevices(const std::string& pci_path, int pci_offset,
-                                         int num_disks);
+Result<std::unordered_map<std::string, std::string>>
+ConfigureMultipleBootDevices(const std::string& pci_path, int pci_offset,
+                             int num_disks);
 
 } // namespace vm_manager
 } // namespace cuttlefish
-
diff --git a/host/libs/wayland/wayland_compositor.cpp b/host/libs/wayland/wayland_compositor.cpp
index 4e2f8f5..1842a26 100644
--- a/host/libs/wayland/wayland_compositor.cpp
+++ b/host/libs/wayland/wayland_compositor.cpp
@@ -77,6 +77,8 @@
 void surface_destroy(wl_client*, wl_resource* surface) {
   LOG(VERBOSE) << __FUNCTION__
                << " surface=" << surface;
+
+  delete GetUserData<Surface>(surface);
 }
 
 void surface_attach(wl_client*,
@@ -182,10 +184,7 @@
   .damage_buffer = surface_damage_buffer,
 };
 
-void surface_destroy_resource_callback(struct wl_resource* surface_resource) {
-  Surface* surface = GetUserData<Surface>(surface_resource);
-  delete surface;
-}
+void surface_destroy_resource_callback(struct wl_resource*) {}
 
 void compositor_create_surface(wl_client* client,
                                wl_resource* compositor,
diff --git a/host/libs/wayland/wayland_server.cpp b/host/libs/wayland/wayland_server.cpp
index 7dbc7f7..8ace437 100644
--- a/host/libs/wayland/wayland_server.cpp
+++ b/host/libs/wayland/wayland_server.cpp
@@ -101,4 +101,8 @@
   server_state_->surfaces_.SetFrameCallback(std::move(callback));
 }
 
+void WaylandServer::SetDisplayEventCallback(DisplayEventCallback callback) {
+  server_state_->surfaces_.SetDisplayEventCallback(std::move(callback));
+}
+
 }  // namespace wayland
diff --git a/host/libs/wayland/wayland_server.h b/host/libs/wayland/wayland_server.h
index 24ee68c..7892f11 100644
--- a/host/libs/wayland/wayland_server.h
+++ b/host/libs/wayland/wayland_server.h
@@ -24,6 +24,7 @@
 #include <string>
 #include <thread>
 
+#include "host/libs/wayland/wayland_server_callbacks.h"
 #include "host/libs/wayland/wayland_surfaces.h"
 
 namespace wayland {
@@ -52,6 +53,8 @@
     // available.
     void SetFrameCallback(Surfaces::FrameCallback callback);
 
+    void SetDisplayEventCallback(DisplayEventCallback callback);
+
    private:
     void ServerLoop(int wayland_socket_fd);
 
diff --git a/host/libs/wayland/wayland_server_callbacks.h b/host/libs/wayland/wayland_server_callbacks.h
new file mode 100644
index 0000000..4cd9a10
--- /dev/null
+++ b/host/libs/wayland/wayland_server_callbacks.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+#include <functional>
+#include <variant>
+
+struct DisplayCreatedEvent {
+  std::uint32_t display_number;
+  std::uint32_t display_width;
+  std::uint32_t display_height;
+};
+
+struct DisplayDestroyedEvent {
+  std::uint32_t display_number;
+};
+
+using DisplayEvent = std::variant<DisplayCreatedEvent, DisplayDestroyedEvent>;
+using DisplayEventCallback = std::function<void(const DisplayEvent&)>;
diff --git a/host/libs/wayland/wayland_surface.cpp b/host/libs/wayland/wayland_surface.cpp
index 51a3b42..1052547 100644
--- a/host/libs/wayland/wayland_surface.cpp
+++ b/host/libs/wayland/wayland_surface.cpp
@@ -25,6 +25,13 @@
 
 Surface::Surface(Surfaces& surfaces) : surfaces_(surfaces) {}
 
+Surface::~Surface() {
+  if (state_.virtio_gpu_metadata_.scanout_id.has_value()) {
+    const uint32_t display_number = *state_.virtio_gpu_metadata_.scanout_id;
+    surfaces_.HandleSurfaceDestroyed(display_number);
+  }
+}
+
 void Surface::SetRegion(const Region& region) {
   std::unique_lock<std::mutex> lock(state_mutex_);
   state_.region = region;
@@ -58,6 +65,11 @@
     CHECK(buffer_h == state_.region.h);
     const int32_t buffer_stride_bytes = wl_shm_buffer_get_stride(shm_buffer);
 
+    if (!state_.has_notified_surface_create) {
+      surfaces_.HandleSurfaceCreated(display_number, buffer_w, buffer_h);
+      state_.has_notified_surface_create = true;
+    }
+
     uint8_t* buffer_pixels =
         reinterpret_cast<uint8_t*>(wl_shm_buffer_get_data(shm_buffer));
 
diff --git a/host/libs/wayland/wayland_surface.h b/host/libs/wayland/wayland_surface.h
index f3b17ae..2cc1c75 100644
--- a/host/libs/wayland/wayland_surface.h
+++ b/host/libs/wayland/wayland_surface.h
@@ -31,7 +31,7 @@
 class Surface {
  public:
   Surface(Surfaces& surfaces);
-  virtual ~Surface() = default;
+  virtual ~Surface();
 
   Surface(const Surface& rhs) = delete;
   Surface& operator=(const Surface& rhs) = delete;
@@ -76,6 +76,8 @@
     Region region;
 
     VirtioGpuMetadata virtio_gpu_metadata_;
+
+    bool has_notified_surface_create = false;
   };
 
   std::mutex state_mutex_;
diff --git a/host/libs/wayland/wayland_surfaces.cpp b/host/libs/wayland/wayland_surfaces.cpp
index 2fa1ed9..1d45843 100644
--- a/host/libs/wayland/wayland_surfaces.cpp
+++ b/host/libs/wayland/wayland_surfaces.cpp
@@ -28,6 +28,11 @@
   callback_.emplace(std::move(callback));
 }
 
+void Surfaces::SetDisplayEventCallback(DisplayEventCallback callback) {
+  std::unique_lock<std::mutex> lock(callback_mutex_);
+  event_callback_.emplace(std::move(callback));
+}
+
 void Surfaces::HandleSurfaceFrame(std::uint32_t display_number,
                                   std::uint32_t frame_width,
                                   std::uint32_t frame_height,
@@ -40,4 +45,30 @@
   }
 }
 
+void Surfaces::HandleSurfaceCreated(std::uint32_t display_number,
+                                    std::uint32_t display_width,
+                                    std::uint32_t display_height) {
+  const DisplayEvent event{DisplayCreatedEvent{
+      .display_number = display_number,
+      .display_width = display_width,
+      .display_height = display_height,
+  }};
+
+  std::unique_lock<std::mutex> lock(callback_mutex_);
+  if (event_callback_) {
+    (event_callback_.value())(event);
+  }
+}
+
+void Surfaces::HandleSurfaceDestroyed(std::uint32_t display_number) {
+  const DisplayEvent event{DisplayDestroyedEvent{
+      .display_number = display_number,
+  }};
+
+  std::unique_lock<std::mutex> lock(callback_mutex_);
+  if (event_callback_) {
+    (event_callback_.value())(event);
+  }
+}
+
 }  // namespace wayland
\ No newline at end of file
diff --git a/host/libs/wayland/wayland_surfaces.h b/host/libs/wayland/wayland_surfaces.h
index 58ed0e8..9fb259e 100644
--- a/host/libs/wayland/wayland_surfaces.h
+++ b/host/libs/wayland/wayland_surfaces.h
@@ -24,6 +24,8 @@
 #include <thread>
 #include <unordered_map>
 
+#include "host/libs/wayland/wayland_server_callbacks.h"
+
 namespace wayland {
 
 class Surface;
@@ -48,6 +50,8 @@
 
   void SetFrameCallback(FrameCallback callback);
 
+  void SetDisplayEventCallback(DisplayEventCallback callback);
+
  private:
   friend class Surface;
   void HandleSurfaceFrame(std::uint32_t display_number,      //
@@ -56,8 +60,15 @@
                           std::uint32_t frame_stride_bytes,  //
                           std::uint8_t* frame_bytes);
 
+  void HandleSurfaceCreated(std::uint32_t display_number,
+                            std::uint32_t display_width,
+                            std::uint32_t display_height);
+
+  void HandleSurfaceDestroyed(std::uint32_t display_number);
+
   std::mutex callback_mutex_;
   std::optional<FrameCallback> callback_;
+  std::optional<DisplayEventCallback> event_callback_;
 };
 
 }  // namespace wayland
diff --git a/host/libs/web/Android.bp b/host/libs/web/Android.bp
index a0bc146..62d8ebe 100644
--- a/host/libs/web/Android.bp
+++ b/host/libs/web/Android.bp
@@ -22,8 +22,8 @@
     srcs: [
         "build_api.cc",
         "credential_source.cc",
-        "curl_wrapper.cc",
-        "install_zip.cc",
+        "http_client/http_client.cc",
+        "http_client/sso_client.cc",
     ],
     static_libs: [
         "libcuttlefish_host_config",
@@ -59,3 +59,24 @@
     },
     defaults: ["cuttlefish_host"],
 }
+
+cc_test_host {
+    name: "libcuttlefish_web_test",
+    srcs: [
+        "http_client/unittest/main_test.cc",
+        "http_client/unittest/sso_client_test.cc",
+    ],
+    static_libs: [
+       "libbase",
+       "libcuttlefish_fs",
+       "libcuttlefish_utils",
+       "libcurl",
+       "libcrypto",
+       "liblog",
+       "libssl",
+       "libz",
+       "libjsoncpp",
+       "libcuttlefish_web",
+    ],
+    defaults: ["cuttlefish_host"],
+}
diff --git a/host/libs/web/build_api.cc b/host/libs/web/build_api.cc
index 85d32bd..396d985 100644
--- a/host/libs/web/build_api.cc
+++ b/host/libs/web/build_api.cc
@@ -19,15 +19,21 @@
 #include <unistd.h>
 
 #include <chrono>
+#include <memory>
 #include <set>
 #include <string>
 #include <thread>
+#include <tuple>
+#include <utility>
+#include <vector>
 
-#include <android-base/strings.h>
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 
 #include "common/libs/utils/environment.h"
 #include "common/libs/utils/files.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/web/credential_source.h"
 
 namespace cuttlefish {
 namespace {
@@ -37,36 +43,58 @@
 
 bool StatusIsTerminal(const std::string& status) {
   const static std::set<std::string> terminal_statuses = {
-    "abandoned",
-    "complete",
-    "error",
-    "ABANDONED",
-    "COMPLETE",
-    "ERROR",
+      "abandoned", "complete", "error", "ABANDONED", "COMPLETE", "ERROR",
   };
   return terminal_statuses.count(status) > 0;
 }
 
-} // namespace
+bool ArtifactsContain(const std::vector<Artifact>& artifacts,
+                      const std::string& name) {
+  for (const auto& artifact : artifacts) {
+    if (artifact.Name() == name) {
+      return true;
+    }
+  }
+  return false;
+}
+
+std::string BuildNameRegexp(
+    const std::vector<std::string>& artifact_filenames) {
+  // surrounding with \Q and \E treats the text literally to avoid
+  // characters being treated as regex
+  auto it = artifact_filenames.begin();
+  std::string name_regex = "^\\Q" + *it + "\\E$";
+  std::string result = name_regex;
+  ++it;
+  for (const auto end = artifact_filenames.end(); it != end; ++it) {
+    name_regex = "^\\Q" + *it + "\\E$";
+    result += "|" + name_regex;
+  }
+  return result;
+}
+
+}  // namespace
 
 Artifact::Artifact(const Json::Value& json_artifact) {
-  name = json_artifact["name"].asString();
-  size = std::stol(json_artifact["size"].asString());
-  last_modified_time = std::stol(json_artifact["lastModifiedTime"].asString());
-  md5 = json_artifact["md5"].asString();
-  content_type = json_artifact["contentType"].asString();
-  revision = json_artifact["revision"].asString();
-  creation_time = std::stol(json_artifact["creationTime"].asString());
-  crc32 = json_artifact["crc32"].asUInt();
+  name_ = json_artifact["name"].asString();
+  size_ = std::stol(json_artifact["size"].asString());
+  last_modified_time_ = std::stol(json_artifact["lastModifiedTime"].asString());
+  md5_ = json_artifact["md5"].asString();
+  content_type_ = json_artifact["contentType"].asString();
+  revision_ = json_artifact["revision"].asString();
+  creation_time_ = std::stol(json_artifact["creationTime"].asString());
+  crc32_ = json_artifact["crc32"].asUInt();
 }
 
 std::ostream& operator<<(std::ostream& out, const DeviceBuild& build) {
-  return out << "(id=\"" << build.id << "\", target=\"" << build.target << "\")";
+  return out << "(id=\"" << build.id << "\", target=\"" << build.target
+             << "\")";
 }
 
 std::ostream& operator<<(std::ostream& out, const DirectoryBuild& build) {
   auto paths = android::base::Join(build.paths, ":");
-  return out << "(paths=\"" << paths << "\", target=\"" << build.target << "\")";
+  return out << "(paths=\"" << paths << "\", target=\"" << build.target
+             << "\")";
 }
 
 std::ostream& operator<<(std::ostream& out, const Build& build) {
@@ -74,124 +102,141 @@
   return out;
 }
 
-DirectoryBuild::DirectoryBuild(const std::vector<std::string>& paths,
-                               const std::string& target)
-    : paths(paths), target(target), id("eng") {
+DirectoryBuild::DirectoryBuild(std::vector<std::string> paths,
+                               std::string target)
+    : paths(std::move(paths)), target(std::move(target)), id("eng") {
   product = StringFromEnv("TARGET_PRODUCT", "");
 }
 
-BuildApi::BuildApi(CurlWrapper& curl, CredentialSource* credential_source)
-    : BuildApi(curl, credential_source, "") {}
+BuildApi::BuildApi() : BuildApi(std::move(HttpClient::CurlClient()), nullptr) {}
 
-BuildApi::BuildApi(CurlWrapper& curl, CredentialSource* credential_source,
-                   std::string api_key)
-    : curl(curl),
-      credential_source(credential_source),
-      api_key_(std::move(api_key)) {}
+BuildApi::BuildApi(std::unique_ptr<HttpClient> http_client,
+                   std::unique_ptr<CredentialSource> credential_source)
+    : BuildApi(std::move(http_client), nullptr, std::move(credential_source),
+               "", std::chrono::seconds(0)) {}
 
-std::vector<std::string> BuildApi::Headers() {
+BuildApi::BuildApi(std::unique_ptr<HttpClient> http_client,
+                   std::unique_ptr<HttpClient> inner_http_client,
+                   std::unique_ptr<CredentialSource> credential_source,
+                   std::string api_key, const std::chrono::seconds retry_period)
+    : http_client(std::move(http_client)),
+      inner_http_client(std::move(inner_http_client)),
+      credential_source(std::move(credential_source)),
+      api_key_(std::move(api_key)),
+      retry_period_(retry_period) {}
+
+Result<std::vector<std::string>> BuildApi::Headers() {
   std::vector<std::string> headers;
   if (credential_source) {
     headers.push_back("Authorization: Bearer " +
-                      credential_source->Credential());
+                      CF_EXPECT(credential_source->Credential()));
   }
   return headers;
 }
 
-std::string BuildApi::LatestBuildId(const std::string& branch,
-                                    const std::string& target) {
+Result<std::string> BuildApi::LatestBuildId(const std::string& branch,
+                                            const std::string& target) {
   std::string url =
-      BUILD_API + "/builds?branch=" + curl.UrlEscape(branch) +
+      BUILD_API + "/builds?branch=" + http_client->UrlEscape(branch) +
       "&buildAttemptStatus=complete" +
       "&buildType=submitted&maxResults=1&successful=true&target=" +
-      curl.UrlEscape(target);
+      http_client->UrlEscape(target);
   if (!api_key_.empty()) {
-    url += "&key=" + curl.UrlEscape(api_key_);
+    url += "&key=" + http_client->UrlEscape(api_key_);
   }
-  auto curl_response = curl.DownloadToJson(url, Headers());
-  const auto& json = curl_response.data;
-  if (!curl_response.HttpSuccess()) {
-    LOG(FATAL) << "Error fetching the latest build of \"" << target
-               << "\" on \"" << branch << "\". The server response was \""
-               << json << "\", and code was " << curl_response.http_code;
-  }
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+  auto response =
+      CF_EXPECT(http_client->DownloadToJson(url, CF_EXPECT(Headers())));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess(), "Error fetching the latest build of \""
+                                        << target << "\" on \"" << branch
+                                        << "\". The server response was \""
+                                        << json << "\", and code was "
+                                        << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
   if (!json.isMember("builds") || json["builds"].size() != 1) {
     LOG(WARNING) << "expected to receive 1 build for \"" << target << "\" on \""
                  << branch << "\", but received " << json["builds"].size()
                  << ". Full response was " << json;
+    // TODO(schuffelen): Return a failed Result here, and update ArgumentToBuild
     return "";
   }
   return json["builds"][0]["buildId"].asString();
 }
 
-std::string BuildApi::BuildStatus(const DeviceBuild& build) {
-  std::string url = BUILD_API + "/builds/" + curl.UrlEscape(build.id) + "/" +
-                    curl.UrlEscape(build.target);
+Result<std::string> BuildApi::BuildStatus(const DeviceBuild& build) {
+  std::string url = BUILD_API + "/builds/" + http_client->UrlEscape(build.id) +
+                    "/" + http_client->UrlEscape(build.target);
   if (!api_key_.empty()) {
-    url += "?key=" + curl.UrlEscape(api_key_);
+    url += "?key=" + http_client->UrlEscape(api_key_);
   }
-  auto curl_response = curl.DownloadToJson(url, Headers());
-  const auto& json = curl_response.data;
-  if (!curl_response.HttpSuccess()) {
-    LOG(FATAL) << "Error fetching the status of \"" << build
-               << "\". The server response was \"" << json
-               << "\", and code was " << curl_response.http_code;
-  }
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+  auto response =
+      CF_EXPECT(http_client->DownloadToJson(url, CF_EXPECT(Headers())));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess(),
+            "Error fetching the status of \""
+                << build << "\". The server response was \"" << json
+                << "\", and code was " << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
   return json["buildAttemptStatus"].asString();
 }
 
-std::string BuildApi::ProductName(const DeviceBuild& build) {
-  std::string url = BUILD_API + "/builds/" + curl.UrlEscape(build.id) + "/" +
-                    curl.UrlEscape(build.target);
+Result<std::string> BuildApi::ProductName(const DeviceBuild& build) {
+  std::string url = BUILD_API + "/builds/" + http_client->UrlEscape(build.id) +
+                    "/" + http_client->UrlEscape(build.target);
   if (!api_key_.empty()) {
-    url += "?key=" + curl.UrlEscape(api_key_);
+    url += "?key=" + http_client->UrlEscape(api_key_);
   }
-  auto curl_response = curl.DownloadToJson(url, Headers());
-  const auto& json = curl_response.data;
-  if (!curl_response.HttpSuccess()) {
-    LOG(FATAL) << "Error fetching the product name of \"" << build
-               << "\". The server response was \"" << json
-               << "\", and code was " << curl_response.http_code;
-  }
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+  auto response =
+      CF_EXPECT(http_client->DownloadToJson(url, CF_EXPECT(Headers())));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess(),
+            "Error fetching the product name of \""
+                << build << "\". The server response was \"" << json
+                << "\", and code was " << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
-  CHECK(json.isMember("target")) << "Build was missing target field.";
+  CF_EXPECT(json.isMember("target"), "Build was missing target field.");
   return json["target"]["product"].asString();
 }
 
-std::vector<Artifact> BuildApi::Artifacts(const DeviceBuild& build) {
+Result<std::vector<Artifact>> BuildApi::Artifacts(
+    const DeviceBuild& build,
+    const std::vector<std::string>& artifact_filenames) {
   std::string page_token = "";
   std::vector<Artifact> artifacts;
   do {
-    std::string url = BUILD_API + "/builds/" + curl.UrlEscape(build.id) + "/" +
-                      curl.UrlEscape(build.target) +
+    std::string url = BUILD_API + "/builds/" +
+                      http_client->UrlEscape(build.id) + "/" +
+                      http_client->UrlEscape(build.target) +
                       "/attempts/latest/artifacts?maxResults=100";
+    if (!artifact_filenames.empty()) {
+      url += "&nameRegexp=" +
+             http_client->UrlEscape(BuildNameRegexp(artifact_filenames));
+    }
     if (page_token != "") {
-      url += "&pageToken=" + curl.UrlEscape(page_token);
+      url += "&pageToken=" + http_client->UrlEscape(page_token);
     }
     if (!api_key_.empty()) {
-      url += "&key=" + curl.UrlEscape(api_key_);
+      url += "&key=" + http_client->UrlEscape(api_key_);
     }
-    auto curl_response = curl.DownloadToJson(url, Headers());
-    const auto& json = curl_response.data;
-    if (!curl_response.HttpSuccess()) {
-      LOG(FATAL) << "Error fetching the artifacts of \"" << build
-                 << "\". The server response was \"" << json
-                 << "\", and code was " << curl_response.http_code;
-    }
-    CHECK(!json.isMember("error"))
-        << "Response had \"error\" but had http success status. Received \""
-        << json << "\"";
+    auto response =
+        CF_EXPECT(http_client->DownloadToJson(url, CF_EXPECT(Headers())));
+    const auto& json = response.data;
+    CF_EXPECT(response.HttpSuccess(),
+              "Error fetching the artifacts of \""
+                  << build << "\". The server response was \"" << json
+                  << "\", and code was " << response.http_code);
+    CF_EXPECT(!json.isMember("error"),
+              "Response had \"error\" but had http success status. Received \""
+                  << json << "\"");
     if (json.isMember("nextPageToken")) {
       page_token = json["nextPageToken"].asString();
     } else {
@@ -205,109 +250,102 @@
 }
 
 struct CloseDir {
-  void operator()(DIR* dir) {
-    closedir(dir);
-  }
+  void operator()(DIR* dir) { closedir(dir); }
 };
 
-using UniqueDir = std::unique_ptr<DIR, CloseDir>;
-
-std::vector<Artifact> BuildApi::Artifacts(const DirectoryBuild& build) {
+Result<std::vector<Artifact>> BuildApi::Artifacts(
+    const DirectoryBuild& build, const std::vector<std::string>&) {
   std::vector<Artifact> artifacts;
   for (const auto& path : build.paths) {
-    auto dir = UniqueDir(opendir(path.c_str()));
-    CHECK(dir != nullptr) << "Could not read files from \"" << path << "\"";
-    for (auto entity = readdir(dir.get()); entity != nullptr; entity = readdir(dir.get())) {
+    auto dir = std::unique_ptr<DIR, CloseDir>(opendir(path.c_str()));
+    CF_EXPECT(dir != nullptr, "Could not read files from \"" << path << "\"");
+    for (auto entity = readdir(dir.get()); entity != nullptr;
+         entity = readdir(dir.get())) {
       artifacts.emplace_back(std::string(entity->d_name));
     }
   }
   return artifacts;
 }
 
-bool BuildApi::ArtifactToCallback(const DeviceBuild& build,
-                                  const std::string& artifact,
-                                  CurlWrapper::DataCallback callback) {
+Result<void> BuildApi::ArtifactToCallback(const DeviceBuild& build,
+                                          const std::string& artifact,
+                                          HttpClient::DataCallback callback) {
   std::string download_url_endpoint =
-      BUILD_API + "/builds/" + curl.UrlEscape(build.id) + "/" +
-      curl.UrlEscape(build.target) + "/attempts/latest/artifacts/" +
-      curl.UrlEscape(artifact) + "/url";
+      BUILD_API + "/builds/" + http_client->UrlEscape(build.id) + "/" +
+      http_client->UrlEscape(build.target) + "/attempts/latest/artifacts/" +
+      http_client->UrlEscape(artifact) + "/url";
   if (!api_key_.empty()) {
-    download_url_endpoint += "?key=" + curl.UrlEscape(api_key_);
+    download_url_endpoint += "?key=" + http_client->UrlEscape(api_key_);
   }
-  auto curl_response = curl.DownloadToJson(download_url_endpoint, Headers());
-  const auto& json = curl_response.data;
-  if (!(curl_response.HttpSuccess() || curl_response.HttpRedirect())) {
-    LOG(ERROR) << "Error fetching the url of \"" << artifact << "\" for \""
-               << build << "\". The server response was \"" << json
-               << "\", and code was " << curl_response.http_code;
-    return false;
-  }
-  if (json.isMember("error")) {
-    LOG(ERROR) << "Response had \"error\" but had http success status. "
-               << "Received \"" << json << "\"";
-    return false;
-  }
-  if (!json.isMember("signedUrl")) {
-    LOG(ERROR) << "URL endpoint did not have json path: " << json;
-    return false;
-  }
+  auto response = CF_EXPECT(
+      http_client->DownloadToJson(download_url_endpoint, CF_EXPECT(Headers())));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess() || response.HttpRedirect(),
+            "Error fetching the url of \"" << artifact << "\" for \"" << build
+                                           << "\". The server response was \""
+                                           << json << "\", and code was "
+                                           << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. "
+                << "Received \"" << json << "\"");
+  CF_EXPECT(json.isMember("signedUrl"),
+            "URL endpoint did not have json path: " << json);
   std::string url = json["signedUrl"].asString();
-  return curl.DownloadToCallback(callback, url).HttpSuccess();
+  auto callback_response =
+      CF_EXPECT(http_client->DownloadToCallback(callback, url));
+  CF_EXPECT(IsHttpSuccess(callback_response.http_code));
+  return {};
 }
 
-bool BuildApi::ArtifactToFile(const DeviceBuild& build,
-                              const std::string& artifact,
-                              const std::string& path) {
+Result<void> BuildApi::ArtifactToFile(const DeviceBuild& build,
+                                      const std::string& artifact,
+                                      const std::string& path) {
   std::string download_url_endpoint =
-      BUILD_API + "/builds/" + curl.UrlEscape(build.id) + "/" +
-      curl.UrlEscape(build.target) + "/attempts/latest/artifacts/" +
-      curl.UrlEscape(artifact) + "/url";
+      BUILD_API + "/builds/" + http_client->UrlEscape(build.id) + "/" +
+      http_client->UrlEscape(build.target) + "/attempts/latest/artifacts/" +
+      http_client->UrlEscape(artifact) + "/url";
   if (!api_key_.empty()) {
-    download_url_endpoint += "?key=" + curl.UrlEscape(api_key_);
+    download_url_endpoint += "?key=" + http_client->UrlEscape(api_key_);
   }
-  auto curl_response = curl.DownloadToJson(download_url_endpoint, Headers());
-  const auto& json = curl_response.data;
-  if (!(curl_response.HttpSuccess() || curl_response.HttpRedirect())) {
-    LOG(ERROR) << "Error fetching the url of \"" << artifact << "\" for \""
-               << build << "\". The server response was \"" << json
-               << "\", and code was " << curl_response.http_code;
-    return false;
-  }
-  if (json.isMember("error")) {
-    LOG(ERROR) << "Response had \"error\" but had http success status. "
-               << "Received \"" << json << "\"";
-  }
-  if (!json.isMember("signedUrl")) {
-    LOG(ERROR) << "URL endpoint did not have json path: " << json;
-    return false;
-  }
+  auto response = CF_EXPECT(
+      http_client->DownloadToJson(download_url_endpoint, CF_EXPECT(Headers())));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess() || response.HttpRedirect(),
+            "Error fetching the url of \"" << artifact << "\" for \"" << build
+                                           << "\". The server response was \""
+                                           << json << "\", and code was "
+                                           << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. "
+                << "Received \"" << json << "\"");
+  CF_EXPECT(json.isMember("signedUrl"),
+            "URL endpoint did not have json path: " << json);
   std::string url = json["signedUrl"].asString();
-  return curl.DownloadToFile(url, path).HttpSuccess();
+  CF_EXPECT(CF_EXPECT(http_client->DownloadToFile(url, path)).HttpSuccess());
+  return {};
 }
 
-bool BuildApi::ArtifactToFile(const DirectoryBuild& build,
-                              const std::string& artifact,
-                              const std::string& destination) {
+Result<void> BuildApi::ArtifactToFile(const DirectoryBuild& build,
+                                      const std::string& artifact,
+                                      const std::string& destination) {
   for (const auto& path : build.paths) {
     auto source = path + "/" + artifact;
     if (!FileExists(source)) {
       continue;
     }
     unlink(destination.c_str());
-    if (symlink(source.c_str(), destination.c_str())) {
-      int error_num = errno;
-      LOG(ERROR) << "Could not create symlink from " << source << " to "
-                  << destination << ": " << strerror(error_num);
-      return false;
-    }
-    return true;
+    CF_EXPECT(symlink(source.c_str(), destination.c_str()) == 0,
+              "Could not create symlink from " << source << " to "
+                                               << destination << ": "
+                                               << strerror(errno));
+    return {};
   }
-  return false;
+  return CF_ERR("Could not find artifact \"" << artifact << "\" in build \""
+                                             << build << "\"");
 }
 
-Build ArgumentToBuild(BuildApi* build_api, const std::string& arg,
-                      const std::string& default_build_target,
-                      const std::chrono::seconds& retry_period) {
+Result<Build> BuildApi::ArgumentToBuild(
+    const std::string& arg, const std::string& default_build_target) {
   if (arg.find(':') != std::string::npos) {
     std::vector<std::string> dirs = android::base::Split(arg, ":");
     std::string id = dirs.back();
@@ -315,39 +353,84 @@
     return DirectoryBuild(dirs, id);
   }
   size_t slash_pos = arg.find('/');
-  if (slash_pos != std::string::npos
-        && arg.find('/', slash_pos + 1) != std::string::npos) {
-    LOG(FATAL) << "Build argument cannot have more than one '/' slash. Was at "
-        << slash_pos << " and " << arg.find('/', slash_pos + 1);
+  if (slash_pos != std::string::npos &&
+      arg.find('/', slash_pos + 1) != std::string::npos) {
+    return CF_ERR("Build argument cannot have more than one '/' slash. Was at "
+                  << slash_pos << " and " << arg.find('/', slash_pos + 1));
   }
   std::string build_target = slash_pos == std::string::npos
-      ? default_build_target : arg.substr(slash_pos + 1);
-  std::string branch_or_id = slash_pos == std::string::npos
-      ? arg: arg.substr(0, slash_pos);
+                                 ? default_build_target
+                                 : arg.substr(slash_pos + 1);
+  std::string branch_or_id =
+      slash_pos == std::string::npos ? arg : arg.substr(0, slash_pos);
   std::string branch_latest_build_id =
-      build_api->LatestBuildId(branch_or_id, build_target);
+      CF_EXPECT(LatestBuildId(branch_or_id, build_target));
   std::string build_id = branch_or_id;
   if (branch_latest_build_id != "") {
     LOG(INFO) << "The latest good build on branch \"" << branch_or_id
-        << "\"with build target \"" << build_target
-        << "\" is \"" << branch_latest_build_id << "\"";
+              << "\"with build target \"" << build_target << "\" is \""
+              << branch_latest_build_id << "\"";
     build_id = branch_latest_build_id;
   }
   DeviceBuild proposed_build = DeviceBuild(build_id, build_target);
-  std::string status = build_api->BuildStatus(proposed_build);
-  if (status == "") {
-    LOG(FATAL) << proposed_build << " is not a valid branch or build id.";
+  std::string status = CF_EXPECT(BuildStatus(proposed_build));
+  CF_EXPECT(status != "",
+            proposed_build << " is not a valid branch or build id.");
+  LOG(INFO) << "Status for build " << proposed_build << " is " << status;
+  while (retry_period_ != std::chrono::seconds::zero() &&
+         !StatusIsTerminal(status)) {
+    LOG(INFO) << "Status is \"" << status << "\". Waiting for "
+              << retry_period_.count() << " seconds.";
+    std::this_thread::sleep_for(retry_period_);
+    status = CF_EXPECT(BuildStatus(proposed_build));
   }
   LOG(INFO) << "Status for build " << proposed_build << " is " << status;
-  while (retry_period != std::chrono::seconds::zero() && !StatusIsTerminal(status)) {
-    LOG(INFO) << "Status is \"" << status << "\". Waiting for " << retry_period.count()
-        << " seconds.";
-    std::this_thread::sleep_for(retry_period);
-    status = build_api->BuildStatus(proposed_build);
-  }
-  LOG(INFO) << "Status for build " << proposed_build << " is " << status;
-  proposed_build.product = build_api->ProductName(proposed_build);
+  proposed_build.product = CF_EXPECT(ProductName(proposed_build));
   return proposed_build;
 }
 
-} // namespace cuttlefish
+Result<std::string> BuildApi::DownloadFile(const Build& build,
+                                           const std::string& target_directory,
+                                           const std::string& artifact_name) {
+  std::vector<Artifact> artifacts =
+      CF_EXPECT(Artifacts(build, {artifact_name}));
+  CF_EXPECT(ArtifactsContain(artifacts, artifact_name),
+            "Target " << build << " did not contain " << artifact_name);
+  return DownloadTargetFile(build, target_directory, artifact_name);
+}
+
+Result<std::string> BuildApi::DownloadFileWithBackup(
+    const Build& build, const std::string& target_directory,
+    const std::string& artifact_name, const std::string& backup_artifact_name) {
+  std::vector<Artifact> artifacts =
+      CF_EXPECT(Artifacts(build, {artifact_name, backup_artifact_name}));
+  std::string selected_artifact = artifact_name;
+  if (!ArtifactsContain(artifacts, artifact_name)) {
+    selected_artifact = backup_artifact_name;
+  }
+  return DownloadTargetFile(build, target_directory, selected_artifact);
+}
+
+Result<std::string> BuildApi::DownloadTargetFile(
+    const Build& build, const std::string& target_directory,
+    const std::string& artifact_name) {
+  std::string target_filepath = target_directory + "/" + artifact_name;
+  CF_EXPECT(ArtifactToFile(build, artifact_name, target_filepath),
+            "Unable to download " << build << ":" << artifact_name << " to "
+                                  << target_filepath);
+  return {target_filepath};
+}
+
+/** Returns the name of one of the artifact target zip files.
+ *
+ * For example, for a target "aosp_cf_x86_phone-userdebug" at a build "5824130",
+ * the image zip file would be "aosp_cf_x86_phone-img-5824130.zip"
+ */
+std::string GetBuildZipName(const Build& build, const std::string& name) {
+  std::string product =
+      std::visit([](auto&& arg) { return arg.product; }, build);
+  auto id = std::visit([](auto&& arg) { return arg.id; }, build);
+  return product + "-" + name + "-" + id + ".zip";
+}
+
+}  // namespace cuttlefish
diff --git a/host/libs/web/build_api.h b/host/libs/web/build_api.h
index 7a78389..89a5daa 100644
--- a/host/libs/web/build_api.h
+++ b/host/libs/web/build_api.h
@@ -20,41 +20,45 @@
 #include <memory>
 #include <ostream>
 #include <string>
+#include <tuple>
+#include <utility>
 #include <variant>
+#include <vector>
 
-#include "credential_source.h"
-#include "curl_wrapper.h"
+#include "common/libs/utils/result.h"
+#include "host/libs/web/credential_source.h"
+#include "host/libs/web/http_client/http_client.h"
 
 namespace cuttlefish {
 
 class Artifact {
-  std::string name;
-  size_t size;
-  unsigned long last_modified_time;
-  std::string md5;
-  std::string content_type;
-  std::string revision;
-  unsigned long creation_time;
-  unsigned int crc32;
-public:
+ public:
   Artifact(const Json::Value&);
-  Artifact(const std::string& name) : name(name) {}
+  Artifact(std::string name) : name_(std::move(name)) {}
 
-  const std::string& Name() const { return name; }
-  size_t Size() const { return size; }
-  unsigned long LastModifiedTime() const { return last_modified_time; }
-  const std::string& Md5() const { return md5; }
-  const std::string& ContentType() const { return content_type; }
-  const std::string& Revision() const { return revision; }
-  unsigned long CreationTime() const { return creation_time; }
-  unsigned int Crc32() const { return crc32; }
+  const std::string& Name() const { return name_; }
+  size_t Size() const { return size_; }
+  unsigned long LastModifiedTime() const { return last_modified_time_; }
+  const std::string& Md5() const { return md5_; }
+  const std::string& ContentType() const { return content_type_; }
+  const std::string& Revision() const { return revision_; }
+  unsigned long CreationTime() const { return creation_time_; }
+  unsigned int Crc32() const { return crc32_; }
+
+ private:
+  std::string name_;
+  size_t size_;
+  unsigned long last_modified_time_;
+  std::string md5_;
+  std::string content_type_;
+  std::string revision_;
+  unsigned long creation_time_;
+  unsigned int crc32_;
 };
 
 struct DeviceBuild {
-  DeviceBuild(const std::string& id, const std::string& target) {
-    this->id = id;
-    this->target = target;
-  }
+  DeviceBuild(std::string id, std::string target)
+      : id(std::move(id)), target(std::move(target)) {}
 
   std::string id;
   std::string target;
@@ -65,8 +69,7 @@
 
 struct DirectoryBuild {
   // TODO(schuffelen): Support local builds other than "eng"
-  DirectoryBuild(const std::vector<std::string>& paths,
-                 const std::string& target);
+  DirectoryBuild(std::vector<std::string> paths, std::string target);
 
   std::vector<std::string> paths;
   std::string target;
@@ -82,51 +85,90 @@
 
 class BuildApi {
  public:
-  BuildApi(CurlWrapper&, CredentialSource*);
-  BuildApi(CurlWrapper&, CredentialSource*, std::string api_key);
+  BuildApi();
+  BuildApi(std::unique_ptr<HttpClient>, std::unique_ptr<CredentialSource>);
+  BuildApi(std::unique_ptr<HttpClient>, std::unique_ptr<HttpClient>,
+           std::unique_ptr<CredentialSource>, std::string api_key,
+           const std::chrono::seconds retry_period);
   ~BuildApi() = default;
 
-  std::string LatestBuildId(const std::string& branch,
-                            const std::string& target);
+  Result<std::string> LatestBuildId(const std::string& branch,
+                                    const std::string& target);
 
-  std::string BuildStatus(const DeviceBuild&);
+  // download the artifact from the build and apply the callback
+  Result<void> ArtifactToCallback(const DeviceBuild& build,
+                                  const std::string& artifact,
+                                  HttpClient::DataCallback callback);
 
-  std::string ProductName(const DeviceBuild&);
+  // determine the format of the build source argument and parse for the
+  // relevant build identifiers
+  Result<Build> ArgumentToBuild(const std::string& arg,
+                                const std::string& default_build_target);
 
-  std::vector<Artifact> Artifacts(const DeviceBuild&);
+  Result<std::string> DownloadFile(const Build& build,
+                                   const std::string& target_directory,
+                                   const std::string& artifact_name);
 
-  bool ArtifactToCallback(const DeviceBuild& build, const std::string& artifact,
-                          CurlWrapper::DataCallback callback);
-
-  bool ArtifactToFile(const DeviceBuild& build, const std::string& artifact,
-                      const std::string& path);
-
-  std::vector<Artifact> Artifacts(const DirectoryBuild&);
-
-  bool ArtifactToFile(const DirectoryBuild& build, const std::string& artifact,
-                      const std::string& path);
-
-  std::vector<Artifact> Artifacts(const Build& build) {
-    return std::visit([this](auto&& arg) { return Artifacts(arg); }, build);
-  }
-
-  bool ArtifactToFile(const Build& build, const std::string& artifact,
-                      const std::string& path) {
-    return std::visit([this, &artifact, &path](auto&& arg) {
-      return ArtifactToFile(arg, artifact, path);
-    }, build);
-  }
+  Result<std::string> DownloadFileWithBackup(
+      const Build& build, const std::string& target_directory,
+      const std::string& artifact_name,
+      const std::string& backup_artifact_name);
 
  private:
-  std::vector<std::string> Headers();
+  Result<std::vector<std::string>> Headers();
 
-  CurlWrapper& curl;
-  CredentialSource* credential_source;
+  Result<std::string> BuildStatus(const DeviceBuild&);
+
+  Result<std::string> ProductName(const DeviceBuild&);
+
+  Result<std::vector<Artifact>> Artifacts(
+      const DeviceBuild& build,
+      const std::vector<std::string>& artifact_filenames);
+
+  Result<std::vector<Artifact>> Artifacts(
+      const DirectoryBuild& build,
+      const std::vector<std::string>& artifact_filenames);
+
+  Result<std::vector<Artifact>> Artifacts(
+      const Build& build, const std::vector<std::string>& artifact_filenames) {
+    auto res = std::visit(
+        [this, &artifact_filenames](auto&& arg) {
+          return Artifacts(arg, artifact_filenames);
+        },
+        build);
+    return CF_EXPECT(std::move(res));
+  }
+
+  Result<void> ArtifactToFile(const DeviceBuild& build,
+                              const std::string& artifact,
+                              const std::string& path);
+
+  Result<void> ArtifactToFile(const DirectoryBuild& build,
+                              const std::string& artifact,
+                              const std::string& path);
+
+  Result<void> ArtifactToFile(const Build& build, const std::string& artifact,
+                              const std::string& path) {
+    auto res = std::visit(
+        [this, &artifact, &path](auto&& arg) {
+          return ArtifactToFile(arg, artifact, path);
+        },
+        build);
+    CF_EXPECT(std::move(res));
+    return {};
+  }
+
+  Result<std::string> DownloadTargetFile(const Build& build,
+                                         const std::string& target_directory,
+                                         const std::string& artifact_name);
+
+  std::unique_ptr<HttpClient> http_client;
+  std::unique_ptr<HttpClient> inner_http_client;
+  std::unique_ptr<CredentialSource> credential_source;
   std::string api_key_;
+  std::chrono::seconds retry_period_;
 };
 
-Build ArgumentToBuild(BuildApi* api, const std::string& arg,
-                      const std::string& default_build_target,
-                      const std::chrono::seconds& retry_period);
+std::string GetBuildZipName(const Build& build, const std::string& name);
 
-} // namespace cuttlefish
+}  // namespace cuttlefish
diff --git a/host/libs/web/credential_source.cc b/host/libs/web/credential_source.cc
index c12c494..33b878d 100644
--- a/host/libs/web/credential_source.cc
+++ b/host/libs/web/credential_source.cc
@@ -34,56 +34,52 @@
 
 } // namespace
 
-GceMetadataCredentialSource::GceMetadataCredentialSource(CurlWrapper& curl)
-    : curl(curl) {
+GceMetadataCredentialSource::GceMetadataCredentialSource(
+    HttpClient& http_client)
+    : http_client(http_client) {
   latest_credential = "";
   expiration = std::chrono::steady_clock::now();
 }
 
-std::string GceMetadataCredentialSource::Credential() {
+Result<std::string> GceMetadataCredentialSource::Credential() {
   if (expiration - std::chrono::steady_clock::now() < REFRESH_WINDOW) {
-    RefreshCredential();
+    CF_EXPECT(RefreshCredential());
   }
   return latest_credential;
 }
 
-void GceMetadataCredentialSource::RefreshCredential() {
-  auto curl_response =
-      curl.DownloadToJson(REFRESH_URL, {"Metadata-Flavor: Google"});
-  const auto& json = curl_response.data;
-  if (!curl_response.HttpSuccess()) {
-    LOG(FATAL) << "Error fetching credentials. The server response was \""
-               << json << "\", and code was " << curl_response.http_code;
-  }
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+Result<void> GceMetadataCredentialSource::RefreshCredential() {
+  auto response = CF_EXPECT(
+      http_client.DownloadToJson(REFRESH_URL, {"Metadata-Flavor: Google"}));
+  const auto& json = response.data;
+  CF_EXPECT(response.HttpSuccess(),
+            "Error fetching credentials. The server response was \""
+                << json << "\", and code was " << response.http_code);
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
-  bool has_access_token = json.isMember("access_token");
-  bool has_expires_in = json.isMember("expires_in");
-  if (!has_access_token || !has_expires_in) {
-    LOG(FATAL) << "GCE credential was missing access_token or expires_in. "
-               << "Full response was " << json << "";
-  }
+  CF_EXPECT(json.isMember("access_token") && json.isMember("expires_in"),
+            "GCE credential was missing access_token or expires_in. "
+                << "Full response was " << json << "");
 
   expiration = std::chrono::steady_clock::now() +
                std::chrono::seconds(json["expires_in"].asInt());
   latest_credential = json["access_token"].asString();
+  return {};
 }
 
 std::unique_ptr<CredentialSource> GceMetadataCredentialSource::make(
-    CurlWrapper& curl) {
+    HttpClient& http_client) {
   return std::unique_ptr<CredentialSource>(
-      new GceMetadataCredentialSource(curl));
+      new GceMetadataCredentialSource(http_client));
 }
 
 FixedCredentialSource::FixedCredentialSource(const std::string& credential) {
   this->credential = credential;
 }
 
-std::string FixedCredentialSource::Credential() {
-  return credential;
-}
+Result<std::string> FixedCredentialSource::Credential() { return credential; }
 
 std::unique_ptr<CredentialSource> FixedCredentialSource::make(
     const std::string& credential) {
@@ -91,7 +87,7 @@
 }
 
 Result<RefreshCredentialSource> RefreshCredentialSource::FromOauth2ClientFile(
-    CurlWrapper& curl, std::istream& stream) {
+    HttpClient& http_client, std::istream& stream) {
   Json::CharReaderBuilder builder;
   std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
   Json::Value json;
@@ -118,61 +114,56 @@
   auto& client_secret = credential["client_secret"];
   CF_EXPECT(client_secret.type() == Json::ValueType::stringValue);
 
-  CF_EXPECT(credential.isMember("token_response"));
-  auto& token_response = credential["token_response"];
-  CF_EXPECT(token_response.type() == Json::ValueType::objectValue);
-
-  CF_EXPECT(token_response.isMember("refresh_token"));
+  CF_EXPECT(credential.isMember("refresh_token"));
   auto& refresh_token = credential["refresh_token"];
   CF_EXPECT(refresh_token.type() == Json::ValueType::stringValue);
 
-  return RefreshCredentialSource(curl, client_id.asString(),
+  return RefreshCredentialSource(http_client, client_id.asString(),
                                  client_secret.asString(),
                                  refresh_token.asString());
 }
 
 RefreshCredentialSource::RefreshCredentialSource(
-    CurlWrapper& curl, const std::string& client_id,
+    HttpClient& http_client, const std::string& client_id,
     const std::string& client_secret, const std::string& refresh_token)
-    : curl_(curl),
+    : http_client_(http_client),
       client_id_(client_id),
       client_secret_(client_secret),
       refresh_token_(refresh_token) {}
 
-std::string RefreshCredentialSource::Credential() {
+Result<std::string> RefreshCredentialSource::Credential() {
   if (expiration_ - std::chrono::steady_clock::now() < REFRESH_WINDOW) {
-    UpdateLatestCredential();
+    CF_EXPECT(UpdateLatestCredential());
   }
   return latest_credential_;
 }
 
-void RefreshCredentialSource::UpdateLatestCredential() {
+Result<void> RefreshCredentialSource::UpdateLatestCredential() {
   std::vector<std::string> headers = {
       "Content-Type: application/x-www-form-urlencoded"};
   std::stringstream data;
-  data << "client_id=" << curl_.UrlEscape(client_id_) << "&";
-  data << "client_secret=" << curl_.UrlEscape(client_secret_) << "&";
-  data << "refresh_token=" << curl_.UrlEscape(refresh_token_) << "&";
+  data << "client_id=" << http_client_.UrlEscape(client_id_) << "&";
+  data << "client_secret=" << http_client_.UrlEscape(client_secret_) << "&";
+  data << "refresh_token=" << http_client_.UrlEscape(refresh_token_) << "&";
   data << "grant_type=refresh_token";
 
   static constexpr char kUrl[] = "https://oauth2.googleapis.com/token";
-  auto response = curl_.PostToJson(kUrl, data.str(), headers);
-  CHECK(response.HttpSuccess()) << response.data;
+  auto response = CF_EXPECT(http_client_.PostToJson(kUrl, data.str(), headers));
+  CF_EXPECT(response.HttpSuccess(), response.data);
   auto& json = response.data;
 
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
-  bool has_access_token = json.isMember("access_token");
-  bool has_expires_in = json.isMember("expires_in");
-  CHECK(has_access_token && has_expires_in)
-      << "GCE credential was missing access_token or expires_in. "
-      << "Full response was " << json << "";
+  CF_EXPECT(json.isMember("access_token") && json.isMember("expires_in"),
+            "Refresh credential was missing access_token or expires_in."
+                << " Full response was " << json << "");
 
   expiration_ = std::chrono::steady_clock::now() +
                 std::chrono::seconds(json["expires_in"].asInt());
   latest_credential_ = json["access_token"].asString();
+  return {};
 }
 
 static std::string CollectSslErrors() {
@@ -186,10 +177,10 @@
 }
 
 Result<ServiceAccountOauthCredentialSource>
-ServiceAccountOauthCredentialSource::FromJson(CurlWrapper& curl,
+ServiceAccountOauthCredentialSource::FromJson(HttpClient& http_client,
                                               const Json::Value& json,
                                               const std::string& scope) {
-  ServiceAccountOauthCredentialSource source(curl);
+  ServiceAccountOauthCredentialSource source(http_client);
   source.scope_ = scope;
 
   CF_EXPECT(json.isMember("client_email"));
@@ -213,25 +204,26 @@
 }
 
 ServiceAccountOauthCredentialSource::ServiceAccountOauthCredentialSource(
-    CurlWrapper& curl)
-    : curl_(curl), private_key_(nullptr, EVP_PKEY_free) {}
+    HttpClient& http_client)
+    : http_client_(http_client), private_key_(nullptr, EVP_PKEY_free) {}
 
-static std::string Base64Url(const char* data, std::size_t size) {
+static Result<std::string> Base64Url(const char* data, std::size_t size) {
   std::string base64;
-  CHECK(EncodeBase64(data, size, &base64));
+  CF_EXPECT(EncodeBase64(data, size, &base64));
   base64 = android::base::StringReplace(base64, "+", "-", /* all */ true);
   base64 = android::base::StringReplace(base64, "/", "_", /* all */ true);
   return base64;
 }
 
-static std::string JsonToBase64Url(const Json::Value& json) {
+static Result<std::string> JsonToBase64Url(const Json::Value& json) {
   Json::StreamWriterBuilder factory;
   auto serialized = Json::writeString(factory, json);
-  return Base64Url(serialized.c_str(), serialized.size());
+  return CF_EXPECT(Base64Url(serialized.c_str(), serialized.size()));
 }
 
-static std::string CreateJwt(const std::string& email, const std::string& scope,
-                             EVP_PKEY* private_key) {
+static Result<std::string> CreateJwt(const std::string& email,
+                                     const std::string& scope,
+                                     EVP_PKEY* private_key) {
   using std::chrono::duration_cast;
   using std::chrono::minutes;
   using std::chrono::seconds;
@@ -240,7 +232,7 @@
   Json::Value header_json;
   header_json["alg"] = "RS256";
   header_json["typ"] = "JWT";
-  std::string header_str = JsonToBase64Url(header_json);
+  std::string header_str = CF_EXPECT(JsonToBase64Url(header_json));
 
   Json::Value claim_set_json;
   claim_set_json["iss"] = email;
@@ -252,60 +244,58 @@
   auto exp = time + minutes(30);
   claim_set_json["exp"] =
       (uint64_t)duration_cast<seconds>(exp.time_since_epoch()).count();
-  std::string claim_set_str = JsonToBase64Url(claim_set_json);
+  std::string claim_set_str = CF_EXPECT(JsonToBase64Url(claim_set_json));
 
   std::string jwt_to_sign = header_str + "." + claim_set_str;
 
   std::unique_ptr<EVP_MD_CTX, void (*)(EVP_MD_CTX*)> sign_ctx(
       EVP_MD_CTX_create(), EVP_MD_CTX_free);
-  CHECK(EVP_DigestSignInit(sign_ctx.get(), nullptr, EVP_sha256(), nullptr,
-                           private_key));
-  CHECK(EVP_DigestSignUpdate(sign_ctx.get(), jwt_to_sign.c_str(),
-                             jwt_to_sign.size()));
+  CF_EXPECT(EVP_DigestSignInit(sign_ctx.get(), nullptr, EVP_sha256(), nullptr,
+                               private_key));
+  CF_EXPECT(EVP_DigestSignUpdate(sign_ctx.get(), jwt_to_sign.c_str(),
+                                 jwt_to_sign.size()));
   size_t length;
-  CHECK(EVP_DigestSignFinal(sign_ctx.get(), nullptr, &length));
+  CF_EXPECT(EVP_DigestSignFinal(sign_ctx.get(), nullptr, &length));
   std::vector<uint8_t> sig_raw(length);
-  CHECK(EVP_DigestSignFinal(sign_ctx.get(), sig_raw.data(), &length));
+  CF_EXPECT(EVP_DigestSignFinal(sign_ctx.get(), sig_raw.data(), &length));
 
-  return jwt_to_sign + "." + Base64Url((const char*)sig_raw.data(), length);
+  auto signature = CF_EXPECT(Base64Url((const char*)sig_raw.data(), length));
+  return jwt_to_sign + "." + signature;
 }
 
-void ServiceAccountOauthCredentialSource::RefreshCredential() {
+Result<void> ServiceAccountOauthCredentialSource::RefreshCredential() {
   static constexpr char URL[] = "https://oauth2.googleapis.com/token";
   static constexpr char GRANT[] = "urn:ietf:params:oauth:grant-type:jwt-bearer";
   std::stringstream content;
-  content << "grant_type=" << curl_.UrlEscape(GRANT) << "&";
-  auto jwt = CreateJwt(email_, scope_, private_key_.get());
-  content << "assertion=" << curl_.UrlEscape(jwt);
+  content << "grant_type=" << http_client_.UrlEscape(GRANT) << "&";
+  auto jwt = CF_EXPECT(CreateJwt(email_, scope_, private_key_.get()));
+  content << "assertion=" << http_client_.UrlEscape(jwt);
   std::vector<std::string> headers = {
       "Content-Type: application/x-www-form-urlencoded"};
-  auto curl_response = curl_.PostToJson(URL, content.str(), headers);
-  if (!curl_response.HttpSuccess()) {
-    LOG(FATAL) << "Error fetching credentials. The server response was \""
-               << curl_response.data << "\", and code was "
-               << curl_response.http_code;
-  }
-  Json::Value json = curl_response.data;
+  auto response =
+      CF_EXPECT(http_client_.PostToJson(URL, content.str(), headers));
+  CF_EXPECT(response.HttpSuccess(),
+            "Error fetching credentials. The server response was \""
+                << response.data << "\", and code was " << response.http_code);
+  Json::Value json = response.data;
 
-  CHECK(!json.isMember("error"))
-      << "Response had \"error\" but had http success status. Received \""
-      << json << "\"";
+  CF_EXPECT(!json.isMember("error"),
+            "Response had \"error\" but had http success status. Received \""
+                << json << "\"");
 
-  bool has_access_token = json.isMember("access_token");
-  bool has_expires_in = json.isMember("expires_in");
-  if (!has_access_token || !has_expires_in) {
-    LOG(FATAL) << "GCE credential was missing access_token or expires_in. "
-               << "Full response was " << json << "";
-  }
+  CF_EXPECT(json.isMember("access_token") && json.isMember("expires_in"),
+            "Service account credential was missing access_token or expires_in."
+                << " Full response was " << json << "");
 
   expiration_ = std::chrono::steady_clock::now() +
                 std::chrono::seconds(json["expires_in"].asInt());
   latest_credential_ = json["access_token"].asString();
+  return {};
 }
 
-std::string ServiceAccountOauthCredentialSource::Credential() {
+Result<std::string> ServiceAccountOauthCredentialSource::Credential() {
   if (expiration_ - std::chrono::steady_clock::now() < REFRESH_WINDOW) {
-    RefreshCredential();
+    CF_EXPECT(RefreshCredential());
   }
   return latest_credential_;
 }
diff --git a/host/libs/web/credential_source.h b/host/libs/web/credential_source.h
index 7ab64e1..03b74b8 100644
--- a/host/libs/web/credential_source.h
+++ b/host/libs/web/credential_source.h
@@ -22,29 +22,30 @@
 #include <openssl/evp.h>
 
 #include "common/libs/utils/result.h"
-#include "host/libs/web/curl_wrapper.h"
+#include "host/libs/web/http_client/http_client.h"
 
 namespace cuttlefish {
 
 class CredentialSource {
 public:
   virtual ~CredentialSource() = default;
-  virtual std::string Credential() = 0;
+  virtual Result<std::string> Credential() = 0;
 };
 
 class GceMetadataCredentialSource : public CredentialSource {
-  CurlWrapper& curl;
+  HttpClient& http_client;
   std::string latest_credential;
   std::chrono::steady_clock::time_point expiration;
 
-  void RefreshCredential();
-public:
- GceMetadataCredentialSource(CurlWrapper&);
- GceMetadataCredentialSource(GceMetadataCredentialSource&&) = default;
+  Result<void> RefreshCredential();
 
- virtual std::string Credential();
+ public:
+  GceMetadataCredentialSource(HttpClient&);
+  GceMetadataCredentialSource(GceMetadataCredentialSource&&) = default;
 
- static std::unique_ptr<CredentialSource> make(CurlWrapper&);
+  Result<std::string> Credential() override;
+
+  static std::unique_ptr<CredentialSource> make(HttpClient&);
 };
 
 class FixedCredentialSource : public CredentialSource {
@@ -52,7 +53,7 @@
 public:
   FixedCredentialSource(const std::string& credential);
 
-  virtual std::string Credential();
+  Result<std::string> Credential() override;
 
   static std::unique_ptr<CredentialSource> make(const std::string& credential);
 };
@@ -60,18 +61,18 @@
 class RefreshCredentialSource : public CredentialSource {
  public:
   static Result<RefreshCredentialSource> FromOauth2ClientFile(
-      CurlWrapper& curl, std::istream& stream);
+      HttpClient& http_client, std::istream& stream);
 
-  RefreshCredentialSource(CurlWrapper& curl, const std::string& client_id,
+  RefreshCredentialSource(HttpClient& http_client, const std::string& client_id,
                           const std::string& client_secret,
                           const std::string& refresh_token);
 
-  std::string Credential() override;
+  Result<std::string> Credential() override;
 
  private:
-  void UpdateLatestCredential();
+  Result<void> UpdateLatestCredential();
 
-  CurlWrapper& curl_;
+  HttpClient& http_client_;
   std::string client_id_;
   std::string client_secret_;
   std::string refresh_token_;
@@ -83,18 +84,18 @@
 class ServiceAccountOauthCredentialSource : public CredentialSource {
  public:
   static Result<ServiceAccountOauthCredentialSource> FromJson(
-      CurlWrapper& curl, const Json::Value& service_account_json,
+      HttpClient& http_client, const Json::Value& service_account_json,
       const std::string& scope);
   ServiceAccountOauthCredentialSource(ServiceAccountOauthCredentialSource&&) =
       default;
 
-  std::string Credential() override;
+  Result<std::string> Credential() override;
 
  private:
-  ServiceAccountOauthCredentialSource(CurlWrapper& curl);
-  void RefreshCredential();
+  ServiceAccountOauthCredentialSource(HttpClient& http_client);
+  Result<void> RefreshCredential();
 
-  CurlWrapper& curl_;
+  HttpClient& http_client_;
   std::string email_;
   std::string scope_;
   std::unique_ptr<EVP_PKEY, void (*)(EVP_PKEY*)> private_key_;
diff --git a/host/libs/web/curl_wrapper.cc b/host/libs/web/curl_wrapper.cc
deleted file mode 100644
index b81d498..0000000
--- a/host/libs/web/curl_wrapper.cc
+++ /dev/null
@@ -1,403 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "host/libs/web/curl_wrapper.h"
-
-#include <stdio.h>
-
-#include <fstream>
-#include <mutex>
-#include <sstream>
-#include <string>
-#include <thread>
-
-#include <android-base/logging.h>
-#include <curl/curl.h>
-#include <json/json.h>
-
-namespace cuttlefish {
-namespace {
-
-size_t curl_to_function_cb(char* ptr, size_t, size_t nmemb, void* userdata) {
-  CurlWrapper::DataCallback* callback = (CurlWrapper::DataCallback*)userdata;
-  if (!(*callback)(ptr, nmemb)) {
-    return 0;  // Signals error to curl
-  }
-  return nmemb;
-}
-
-size_t file_write_callback(char *ptr, size_t, size_t nmemb, void *userdata) {
-  std::stringstream* stream = (std::stringstream*) userdata;
-  stream->write(ptr, nmemb);
-  return nmemb;
-}
-
-curl_slist* build_slist(const std::vector<std::string>& strings) {
-  curl_slist* curl_headers = nullptr;
-  for (const auto& str : strings) {
-    curl_slist* temp = curl_slist_append(curl_headers, str.c_str());
-    if (temp == nullptr) {
-      LOG(ERROR) << "curl_slist_append failed to add " << str;
-      if (curl_headers) {
-        curl_slist_free_all(curl_headers);
-        return nullptr;
-      }
-    }
-    curl_headers = temp;
-  }
-  return curl_headers;
-}
-
-class CurlWrapperImpl : public CurlWrapper {
- public:
-  CurlWrapperImpl() {
-    curl_ = curl_easy_init();
-    if (!curl_) {
-      LOG(ERROR) << "failed to initialize curl";
-      return;
-    }
-  }
-  ~CurlWrapperImpl() { curl_easy_cleanup(curl_); }
-
-  CurlResponse<std::string> PostToString(
-      const std::string& url, const std::string& data_to_write,
-      const std::vector<std::string>& headers) override {
-    std::lock_guard<std::mutex> lock(mutex_);
-    LOG(INFO) << "Attempting to download \"" << url << "\"";
-    if (!curl_) {
-      LOG(ERROR) << "curl was not initialized\n";
-      return {"", -1};
-    }
-    curl_slist* curl_headers = build_slist(headers);
-    curl_easy_reset(curl_);
-    curl_easy_setopt(curl_, CURLOPT_CAINFO,
-                     "/etc/ssl/certs/ca-certificates.crt");
-    curl_easy_setopt(curl_, CURLOPT_HTTPHEADER, curl_headers);
-    curl_easy_setopt(curl_, CURLOPT_URL, url.c_str());
-    curl_easy_setopt(curl_, CURLOPT_POSTFIELDSIZE, data_to_write.size());
-    curl_easy_setopt(curl_, CURLOPT_POSTFIELDS, data_to_write.c_str());
-    std::stringstream data_to_read;
-    curl_easy_setopt(curl_, CURLOPT_WRITEFUNCTION, file_write_callback);
-    curl_easy_setopt(curl_, CURLOPT_WRITEDATA, &data_to_read);
-    char error_buf[CURL_ERROR_SIZE];
-    curl_easy_setopt(curl_, CURLOPT_ERRORBUFFER, error_buf);
-    curl_easy_setopt(curl_, CURLOPT_VERBOSE, 1L);
-    CURLcode res = curl_easy_perform(curl_);
-    if (curl_headers) {
-      curl_slist_free_all(curl_headers);
-    }
-    if (res != CURLE_OK) {
-      LOG(ERROR) << "curl_easy_perform() failed. "
-                 << "Code was \"" << res << "\". "
-                 << "Strerror was \"" << curl_easy_strerror(res) << "\". "
-                 << "Error buffer was \"" << error_buf << "\".";
-      return {"", -1};
-    }
-    long http_code = 0;
-    curl_easy_getinfo(curl_, CURLINFO_RESPONSE_CODE, &http_code);
-    return {data_to_read.str(), http_code};
-  }
-
-  CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const std::string& data_to_write,
-      const std::vector<std::string>& headers) override {
-    CurlResponse<std::string> response =
-        PostToString(url, data_to_write, headers);
-    const std::string& contents = response.data;
-    Json::CharReaderBuilder builder;
-    std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
-    Json::Value json;
-    std::string errorMessage;
-    if (!reader->parse(&*contents.begin(), &*contents.end(), &json,
-                       &errorMessage)) {
-      LOG(ERROR) << "Could not parse json: " << errorMessage;
-      json["error"] = "Failed to parse json.";
-      json["response"] = contents;
-    }
-    return {json, response.http_code};
-  }
-
-  CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const Json::Value& data_to_write,
-      const std::vector<std::string>& headers) override {
-    std::stringstream json_str;
-    json_str << data_to_write;
-    return PostToJson(url, json_str.str(), headers);
-  }
-
-  CurlResponse<bool> DownloadToCallback(
-      DataCallback callback, const std::string& url,
-      const std::vector<std::string>& headers) {
-    std::lock_guard<std::mutex> lock(mutex_);
-    LOG(INFO) << "Attempting to download \"" << url << "\"";
-    if (!curl_) {
-      LOG(ERROR) << "curl was not initialized\n";
-      return {false, -1};
-    }
-    if (!callback(nullptr, 0)) {  // Signal start of data
-      LOG(ERROR) << "Callback failure\n";
-      return {false, -1};
-    }
-    curl_slist* curl_headers = build_slist(headers);
-    curl_easy_reset(curl_);
-    curl_easy_setopt(curl_, CURLOPT_CAINFO,
-                     "/etc/ssl/certs/ca-certificates.crt");
-    curl_easy_setopt(curl_, CURLOPT_HTTPHEADER, curl_headers);
-    curl_easy_setopt(curl_, CURLOPT_URL, url.c_str());
-    curl_easy_setopt(curl_, CURLOPT_WRITEFUNCTION, curl_to_function_cb);
-    curl_easy_setopt(curl_, CURLOPT_WRITEDATA, &callback);
-    char error_buf[CURL_ERROR_SIZE];
-    curl_easy_setopt(curl_, CURLOPT_ERRORBUFFER, error_buf);
-    curl_easy_setopt(curl_, CURLOPT_VERBOSE, 1L);
-    CURLcode res = curl_easy_perform(curl_);
-    if (curl_headers) {
-      curl_slist_free_all(curl_headers);
-    }
-    if (res != CURLE_OK) {
-      LOG(ERROR) << "curl_easy_perform() failed. "
-                 << "Code was \"" << res << "\". "
-                 << "Strerror was \"" << curl_easy_strerror(res) << "\". "
-                 << "Error buffer was \"" << error_buf << "\".";
-      return {false, -1};
-    }
-    long http_code = 0;
-    curl_easy_getinfo(curl_, CURLINFO_RESPONSE_CODE, &http_code);
-    return {true, http_code};
-  }
-
-  CurlResponse<std::string> DownloadToFile(
-      const std::string& url, const std::string& path,
-      const std::vector<std::string>& headers) {
-    LOG(INFO) << "Attempting to save \"" << url << "\" to \"" << path << "\"";
-    std::fstream stream;
-    auto callback = [&stream, path](char* data, size_t size) -> bool {
-      if (data == nullptr) {
-        stream.open(path, std::ios::out | std::ios::binary | std::ios::trunc);
-        return !stream.fail();
-      }
-      stream.write(data, size);
-      return !stream.fail();
-    };
-    auto callback_res = DownloadToCallback(callback, url, headers);
-    if (!callback_res.data) {
-      return {"", callback_res.http_code};
-    }
-    return {path, callback_res.http_code};
-    std::lock_guard<std::mutex> lock(mutex_);
-    if (!curl_) {
-      LOG(ERROR) << "curl was not initialized\n";
-      return {"", -1};
-    }
-  }
-
-  CurlResponse<std::string> DownloadToString(
-      const std::string& url, const std::vector<std::string>& headers) {
-    std::stringstream stream;
-    auto callback = [&stream](char* data, size_t size) -> bool {
-      if (data == nullptr) {
-        stream = std::stringstream();
-        return true;
-      }
-      stream.write(data, size);
-      return true;
-    };
-    auto callback_res = DownloadToCallback(callback, url, headers);
-    if (!callback_res.data) {
-      return {"", callback_res.http_code};
-    }
-    return {stream.str(), callback_res.http_code};
-  }
-
-  CurlResponse<Json::Value> DownloadToJson(
-      const std::string& url, const std::vector<std::string>& headers) {
-    CurlResponse<std::string> response = DownloadToString(url, headers);
-    const std::string& contents = response.data;
-    Json::CharReaderBuilder builder;
-    std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
-    Json::Value json;
-    std::string errorMessage;
-    if (!reader->parse(&*contents.begin(), &*contents.end(), &json,
-                       &errorMessage)) {
-      LOG(ERROR) << "Could not parse json: " << errorMessage;
-      json["error"] = "Failed to parse json.";
-      json["response"] = contents;
-    }
-    return {json, response.http_code};
-  }
-
-  CurlResponse<Json::Value> DeleteToJson(
-      const std::string& url,
-      const std::vector<std::string>& headers) override {
-    std::lock_guard<std::mutex> lock(mutex_);
-    LOG(INFO) << "Attempting to download \"" << url << "\"";
-    if (!curl_) {
-      LOG(ERROR) << "curl was not initialized\n";
-      return {"", -1};
-    }
-    curl_slist* curl_headers = build_slist(headers);
-    curl_easy_reset(curl_);
-    curl_easy_setopt(curl_, CURLOPT_CUSTOMREQUEST, "DELETE");
-    curl_easy_setopt(curl_, CURLOPT_CAINFO,
-                     "/etc/ssl/certs/ca-certificates.crt");
-    curl_easy_setopt(curl_, CURLOPT_HTTPHEADER, curl_headers);
-    curl_easy_setopt(curl_, CURLOPT_URL, url.c_str());
-    std::stringstream data_to_read;
-    curl_easy_setopt(curl_, CURLOPT_WRITEFUNCTION, file_write_callback);
-    curl_easy_setopt(curl_, CURLOPT_WRITEDATA, &data_to_read);
-    char error_buf[CURL_ERROR_SIZE];
-    curl_easy_setopt(curl_, CURLOPT_ERRORBUFFER, error_buf);
-    curl_easy_setopt(curl_, CURLOPT_VERBOSE, 1L);
-    CURLcode res = curl_easy_perform(curl_);
-    if (curl_headers) {
-      curl_slist_free_all(curl_headers);
-    }
-    if (res != CURLE_OK) {
-      LOG(ERROR) << "curl_easy_perform() failed. "
-                 << "Code was \"" << res << "\". "
-                 << "Strerror was \"" << curl_easy_strerror(res) << "\". "
-                 << "Error buffer was \"" << error_buf << "\".";
-      return {"", -1};
-    }
-    long http_code = 0;
-    curl_easy_getinfo(curl_, CURLINFO_RESPONSE_CODE, &http_code);
-
-    auto contents = data_to_read.str();
-    Json::CharReaderBuilder builder;
-    std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
-    Json::Value json;
-    std::string errorMessage;
-    if (!reader->parse(&*contents.begin(), &*contents.end(), &json,
-                       &errorMessage)) {
-      LOG(ERROR) << "Could not parse json: " << errorMessage;
-      json["error"] = "Failed to parse json.";
-      json["response"] = contents;
-    }
-    return {json, http_code};
-  }
-
-  std::string UrlEscape(const std::string& text) override {
-    char* escaped_str = curl_easy_escape(curl_, text.c_str(), text.size());
-    std::string ret{escaped_str};
-    curl_free(escaped_str);
-    return ret;
-  }
-
- private:
-  CURL* curl_;
-  std::mutex mutex_;
-};
-
-class CurlServerErrorRetryingWrapper : public CurlWrapper {
- public:
-  CurlServerErrorRetryingWrapper(CurlWrapper& inner, int retry_attempts,
-                                 std::chrono::milliseconds retry_delay)
-      : inner_curl_(inner),
-        retry_attempts_(retry_attempts),
-        retry_delay_(retry_delay) {}
-
-  CurlResponse<std::string> PostToString(
-      const std::string& url, const std::string& data,
-      const std::vector<std::string>& headers) override {
-    return RetryImpl<std::string>(
-        [&, this]() { return inner_curl_.PostToString(url, data, headers); });
-  }
-
-  CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const Json::Value& data,
-      const std::vector<std::string>& headers) override {
-    return RetryImpl<Json::Value>(
-        [&, this]() { return inner_curl_.PostToJson(url, data, headers); });
-  }
-
-  CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const std::string& data,
-      const std::vector<std::string>& headers) override {
-    return RetryImpl<Json::Value>(
-        [&, this]() { return inner_curl_.PostToJson(url, data, headers); });
-  }
-
-  CurlResponse<std::string> DownloadToFile(
-      const std::string& url, const std::string& path,
-      const std::vector<std::string>& headers) {
-    return RetryImpl<std::string>(
-        [&, this]() { return inner_curl_.DownloadToFile(url, path, headers); });
-  }
-
-  CurlResponse<std::string> DownloadToString(
-      const std::string& url, const std::vector<std::string>& headers) {
-    return RetryImpl<std::string>(
-        [&, this]() { return inner_curl_.DownloadToString(url, headers); });
-  }
-
-  CurlResponse<Json::Value> DownloadToJson(
-      const std::string& url, const std::vector<std::string>& headers) {
-    return RetryImpl<Json::Value>(
-        [&, this]() { return inner_curl_.DownloadToJson(url, headers); });
-  }
-
-  CurlResponse<bool> DownloadToCallback(
-      DataCallback cb, const std::string& url,
-      const std::vector<std::string>& hdrs) override {
-    return RetryImpl<bool>(
-        [&, this]() { return inner_curl_.DownloadToCallback(cb, url, hdrs); });
-  }
-  CurlResponse<Json::Value> DeleteToJson(
-      const std::string& url,
-      const std::vector<std::string>& headers) override {
-    return RetryImpl<Json::Value>(
-        [&, this]() { return inner_curl_.DeleteToJson(url, headers); });
-  }
-
-  std::string UrlEscape(const std::string& text) override {
-    return inner_curl_.UrlEscape(text);
-  }
-
- private:
-  template <typename T>
-  CurlResponse<T> RetryImpl(std::function<CurlResponse<T>()> attempt_fn) {
-    CurlResponse<T> response;
-    for (int attempt = 0; attempt != retry_attempts_; ++attempt) {
-      if (attempt != 0) {
-        std::this_thread::sleep_for(retry_delay_);
-      }
-      response = attempt_fn();
-      if (!response.HttpServerError()) {
-        return response;
-      }
-    }
-    return response;
-  }
-
- private:
-  CurlWrapper& inner_curl_;
-  int retry_attempts_;
-  std::chrono::milliseconds retry_delay_;
-};
-
-}  // namespace
-
-/* static */ std::unique_ptr<CurlWrapper> CurlWrapper::Create() {
-  return std::unique_ptr<CurlWrapper>(new CurlWrapperImpl());
-}
-
-/* static */ std::unique_ptr<CurlWrapper> CurlWrapper::WithServerErrorRetry(
-    CurlWrapper& inner, int retry_attempts,
-    std::chrono::milliseconds retry_delay) {
-  return std::unique_ptr<CurlWrapper>(
-      new CurlServerErrorRetryingWrapper(inner, retry_attempts, retry_delay));
-}
-
-CurlWrapper::~CurlWrapper() = default;
-}
diff --git a/host/libs/web/curl_wrapper.h b/host/libs/web/curl_wrapper.h
deleted file mode 100644
index 3e897a4..0000000
--- a/host/libs/web/curl_wrapper.h
+++ /dev/null
@@ -1,74 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#pragma once
-
-#include <chrono>
-#include <mutex>
-#include <string>
-
-#include <json/json.h>
-
-namespace cuttlefish {
-
-template <typename T>
-struct CurlResponse {
-  bool HttpInfo() { return http_code >= 100 && http_code <= 199; }
-  bool HttpSuccess() { return http_code >= 200 && http_code <= 299; }
-  bool HttpRedirect() { return http_code >= 300 && http_code <= 399; }
-  bool HttpClientError() { return http_code >= 400 && http_code <= 499; }
-  bool HttpServerError() { return http_code >= 500 && http_code <= 599; }
-
-  T data;
-  long http_code;
-};
-
-class CurlWrapper {
- public:
-  typedef std::function<bool(char*, size_t)> DataCallback;
-
-  static std::unique_ptr<CurlWrapper> Create();
-  static std::unique_ptr<CurlWrapper> WithServerErrorRetry(
-      CurlWrapper&, int retry_attempts, std::chrono::milliseconds retry_delay);
-  virtual ~CurlWrapper();
-
-  virtual CurlResponse<std::string> PostToString(
-      const std::string& url, const std::string& data,
-      const std::vector<std::string>& headers = {}) = 0;
-  virtual CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const std::string& data,
-      const std::vector<std::string>& headers = {}) = 0;
-  virtual CurlResponse<Json::Value> PostToJson(
-      const std::string& url, const Json::Value& data,
-      const std::vector<std::string>& headers = {}) = 0;
-
-  virtual CurlResponse<std::string> DownloadToFile(
-      const std::string& url, const std::string& path,
-      const std::vector<std::string>& headers = {}) = 0;
-  virtual CurlResponse<std::string> DownloadToString(
-      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
-  virtual CurlResponse<Json::Value> DownloadToJson(
-      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
-  virtual CurlResponse<bool> DownloadToCallback(
-      DataCallback callback, const std::string& url,
-      const std::vector<std::string>& headers = {}) = 0;
-
-  virtual CurlResponse<Json::Value> DeleteToJson(
-      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
-
-  virtual std::string UrlEscape(const std::string&) = 0;
-};
-
-}
diff --git a/host/libs/web/http_client/http_client.cc b/host/libs/web/http_client/http_client.cc
new file mode 100644
index 0000000..cbc6132
--- /dev/null
+++ b/host/libs/web/http_client/http_client.cc
@@ -0,0 +1,407 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/libs/web/http_client/http_client.h"
+
+#include <stdio.h>
+
+#include <fstream>
+#include <mutex>
+#include <sstream>
+#include <string>
+#include <thread>
+
+#include <android-base/logging.h>
+#include <android-base/strings.h>
+#include <curl/curl.h>
+#include <json/json.h>
+
+#include "common/libs/utils/json.h"
+#include "common/libs/utils/subprocess.h"
+
+namespace cuttlefish {
+namespace {
+
+enum class HttpMethod {
+  kGet,
+  kPost,
+  kDelete,
+};
+
+size_t curl_to_function_cb(char* ptr, size_t, size_t nmemb, void* userdata) {
+  HttpClient::DataCallback* callback = (HttpClient::DataCallback*)userdata;
+  if (!(*callback)(ptr, nmemb)) {
+    return 0;  // Signals error to curl
+  }
+  return nmemb;
+}
+
+Result<std::string> CurlUrlGet(CURLU* url, CURLUPart what, unsigned int flags) {
+  char* str_ptr = nullptr;
+  CF_EXPECT(curl_url_get(url, what, &str_ptr, flags) == CURLUE_OK);
+  std::string str(str_ptr);
+  curl_free(str_ptr);
+  return str;
+}
+
+using ManagedCurlSlist =
+    std::unique_ptr<curl_slist, decltype(&curl_slist_free_all)>;
+
+Result<ManagedCurlSlist> SlistFromStrings(
+    const std::vector<std::string>& strings) {
+  ManagedCurlSlist curl_headers(nullptr, curl_slist_free_all);
+  for (const auto& str : strings) {
+    curl_slist* temp = curl_slist_append(curl_headers.get(), str.c_str());
+    CF_EXPECT(temp != nullptr,
+              "curl_slist_append failed to add \"" << str << "\"");
+    (void)curl_headers.release();  // Memory is now owned by `temp`
+    curl_headers.reset(temp);
+  }
+  return curl_headers;
+}
+
+class CurlClient : public HttpClient {
+ public:
+  CurlClient(NameResolver resolver) : resolver_(std::move(resolver)) {
+    curl_ = curl_easy_init();
+    if (!curl_) {
+      LOG(ERROR) << "failed to initialize curl";
+      return;
+    }
+  }
+  ~CurlClient() { curl_easy_cleanup(curl_); }
+
+  Result<HttpResponse<std::string>> GetToString(
+      const std::string& url,
+      const std::vector<std::string>& headers) override {
+    return DownloadToString(HttpMethod::kGet, url, headers);
+  }
+
+  Result<HttpResponse<std::string>> PostToString(
+      const std::string& url, const std::string& data_to_write,
+      const std::vector<std::string>& headers) override {
+    return DownloadToString(HttpMethod::kPost, url, headers, data_to_write);
+  }
+
+  Result<HttpResponse<std::string>> DeleteToString(
+      const std::string& url,
+      const std::vector<std::string>& headers) override {
+    return DownloadToString(HttpMethod::kDelete, url, headers);
+  }
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const std::string& data_to_write,
+      const std::vector<std::string>& headers) override {
+    return DownloadToJson(HttpMethod::kPost, url, headers, data_to_write);
+  }
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const Json::Value& data_to_write,
+      const std::vector<std::string>& headers) override {
+    std::stringstream json_str;
+    json_str << data_to_write;
+    return DownloadToJson(HttpMethod::kPost, url, headers, json_str.str());
+  }
+
+  Result<HttpResponse<void>> DownloadToCallback(
+      DataCallback callback, const std::string& url,
+      const std::vector<std::string>& headers) {
+    return DownloadToCallback(HttpMethod::kGet, callback, url, headers);
+  }
+
+  Result<HttpResponse<std::string>> DownloadToFile(
+      const std::string& url, const std::string& path,
+      const std::vector<std::string>& headers) {
+    LOG(INFO) << "Attempting to save \"" << url << "\" to \"" << path << "\"";
+    std::fstream stream;
+    auto callback = [&stream, path](char* data, size_t size) -> bool {
+      if (data == nullptr) {
+        stream.open(path, std::ios::out | std::ios::binary | std::ios::trunc);
+        return !stream.fail();
+      }
+      stream.write(data, size);
+      return !stream.fail();
+    };
+    auto http_response = CF_EXPECT(DownloadToCallback(callback, url, headers));
+    return HttpResponse<std::string>{path, http_response.http_code};
+  }
+
+  Result<HttpResponse<Json::Value>> DownloadToJson(
+      const std::string& url, const std::vector<std::string>& headers) {
+    return DownloadToJson(HttpMethod::kGet, url, headers);
+  }
+
+  Result<HttpResponse<Json::Value>> DeleteToJson(
+      const std::string& url,
+      const std::vector<std::string>& headers) override {
+    return DownloadToJson(HttpMethod::kDelete, url, headers);
+  }
+
+  std::string UrlEscape(const std::string& text) override {
+    char* escaped_str = curl_easy_escape(curl_, text.c_str(), text.size());
+    std::string ret{escaped_str};
+    curl_free(escaped_str);
+    return ret;
+  }
+
+ private:
+  Result<ManagedCurlSlist> ManuallyResolveUrl(const std::string& url_str) {
+    if (!resolver_) {
+      return ManagedCurlSlist(nullptr, curl_slist_free_all);
+    }
+    LOG(INFO) << "Manually resolving \"" << url_str << "\"";
+    std::stringstream resolve_line;
+    std::unique_ptr<CURLU, decltype(&curl_url_cleanup)> url(curl_url(),
+                                                            curl_url_cleanup);
+    CF_EXPECT(curl_url_set(url.get(), CURLUPART_URL, url_str.c_str(), 0) ==
+              CURLUE_OK);
+    auto hostname = CF_EXPECT(CurlUrlGet(url.get(), CURLUPART_HOST, 0));
+    resolve_line << "+" << hostname;
+    auto port =
+        CF_EXPECT(CurlUrlGet(url.get(), CURLUPART_PORT, CURLU_DEFAULT_PORT));
+    resolve_line << ":" << port << ":";
+    resolve_line << android::base::Join(CF_EXPECT(resolver_(hostname)), ",");
+    auto slist = CF_EXPECT(SlistFromStrings({resolve_line.str()}));
+    return slist;
+  }
+
+  Result<HttpResponse<Json::Value>> DownloadToJson(
+      HttpMethod method, const std::string& url,
+      const std::vector<std::string>& headers,
+      const std::string& data_to_write = "") {
+    auto response =
+        CF_EXPECT(DownloadToString(method, url, headers, data_to_write));
+    auto result = ParseJson(response.data);
+    if (!result.ok()) {
+      Json::Value error_json;
+      LOG(ERROR) << "Could not parse json: " << result.error().Message();
+      error_json["error"] = "Failed to parse json: " + result.error().Message();
+      error_json["response"] = response.data;
+      return HttpResponse<Json::Value>{error_json, response.http_code};
+    }
+    return HttpResponse<Json::Value>{*result, response.http_code};
+  }
+
+  Result<HttpResponse<std::string>> DownloadToString(
+      HttpMethod method, const std::string& url,
+      const std::vector<std::string>& headers,
+      const std::string& data_to_write = "") {
+    std::stringstream stream;
+    auto callback = [&stream](char* data, size_t size) -> bool {
+      if (data == nullptr) {
+        stream = std::stringstream();
+        return true;
+      }
+      stream.write(data, size);
+      return true;
+    };
+    auto http_response = CF_EXPECT(
+        DownloadToCallback(method, callback, url, headers, data_to_write));
+    return HttpResponse<std::string>{stream.str(), http_response.http_code};
+  }
+
+  Result<HttpResponse<void>> DownloadToCallback(
+      HttpMethod method, DataCallback callback, const std::string& url,
+      const std::vector<std::string>& headers,
+      const std::string& data_to_write = "") {
+    std::lock_guard<std::mutex> lock(mutex_);
+    auto extra_cache_entries = CF_EXPECT(ManuallyResolveUrl(url));
+    curl_easy_setopt(curl_, CURLOPT_RESOLVE, extra_cache_entries.get());
+    LOG(INFO) << "Attempting to download \"" << url << "\"";
+    CF_EXPECT(data_to_write.empty() || method == HttpMethod::kPost,
+              "data must be empty for non POST requests");
+    CF_EXPECT(curl_ != nullptr, "curl was not initialized");
+    CF_EXPECT(callback(nullptr, 0) /* Signal start of data */,
+              "callback failure");
+    auto curl_headers = CF_EXPECT(SlistFromStrings(headers));
+    curl_easy_reset(curl_);
+    if (method == HttpMethod::kDelete) {
+      curl_easy_setopt(curl_, CURLOPT_CUSTOMREQUEST, "DELETE");
+    }
+    curl_easy_setopt(curl_, CURLOPT_CAINFO,
+                     "/etc/ssl/certs/ca-certificates.crt");
+    curl_easy_setopt(curl_, CURLOPT_HTTPHEADER, curl_headers.get());
+    curl_easy_setopt(curl_, CURLOPT_URL, url.c_str());
+    if (method == HttpMethod::kPost) {
+      curl_easy_setopt(curl_, CURLOPT_POSTFIELDSIZE, data_to_write.size());
+      curl_easy_setopt(curl_, CURLOPT_POSTFIELDS, data_to_write.c_str());
+    }
+    curl_easy_setopt(curl_, CURLOPT_WRITEFUNCTION, curl_to_function_cb);
+    curl_easy_setopt(curl_, CURLOPT_WRITEDATA, &callback);
+    char error_buf[CURL_ERROR_SIZE];
+    curl_easy_setopt(curl_, CURLOPT_ERRORBUFFER, error_buf);
+    curl_easy_setopt(curl_, CURLOPT_VERBOSE, 1L);
+    CURLcode res = curl_easy_perform(curl_);
+    CF_EXPECT(res == CURLE_OK,
+              "curl_easy_perform() failed. "
+                  << "Code was \"" << res << "\". "
+                  << "Strerror was \"" << curl_easy_strerror(res) << "\". "
+                  << "Error buffer was \"" << error_buf << "\".");
+    long http_code = 0;
+    curl_easy_getinfo(curl_, CURLINFO_RESPONSE_CODE, &http_code);
+    return HttpResponse<void>{{}, http_code};
+  }
+
+  CURL* curl_;
+  NameResolver resolver_;
+  std::mutex mutex_;
+};
+
+class ServerErrorRetryClient : public HttpClient {
+ public:
+  ServerErrorRetryClient(HttpClient& inner, int retry_attempts,
+                         std::chrono::milliseconds retry_delay)
+      : inner_client_(inner),
+        retry_attempts_(retry_attempts),
+        retry_delay_(retry_delay) {}
+
+  Result<HttpResponse<std::string>> GetToString(
+      const std::string& url, const std::vector<std::string>& headers) {
+    auto fn = [&, this]() { return inner_client_.GetToString(url, headers); };
+    return CF_EXPECT(RetryImpl<std::string>(fn));
+  }
+
+  Result<HttpResponse<std::string>> PostToString(
+      const std::string& url, const std::string& data,
+      const std::vector<std::string>& headers) override {
+    auto fn = [&, this]() {
+      return inner_client_.PostToString(url, data, headers);
+    };
+    return CF_EXPECT(RetryImpl<std::string>(fn));
+  }
+
+  Result<HttpResponse<std::string>> DeleteToString(
+      const std::string& url, const std::vector<std::string>& headers) {
+    auto fn = [&, this]() {
+      return inner_client_.DeleteToString(url, headers);
+    };
+    return CF_EXPECT(RetryImpl<std::string>(fn));
+  }
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const Json::Value& data,
+      const std::vector<std::string>& headers) override {
+    auto fn = [&, this]() {
+      return inner_client_.PostToJson(url, data, headers);
+    };
+    return CF_EXPECT(RetryImpl<Json::Value>(fn));
+  }
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const std::string& data,
+      const std::vector<std::string>& headers) override {
+    auto fn = [&, this]() {
+      return inner_client_.PostToJson(url, data, headers);
+    };
+    return CF_EXPECT(RetryImpl<Json::Value>(fn));
+  }
+
+  Result<HttpResponse<std::string>> DownloadToFile(
+      const std::string& url, const std::string& path,
+      const std::vector<std::string>& headers) {
+    auto fn = [&, this]() {
+      return inner_client_.DownloadToFile(url, path, headers);
+    };
+    return CF_EXPECT(RetryImpl<std::string>(fn));
+  }
+
+  Result<HttpResponse<Json::Value>> DownloadToJson(
+      const std::string& url, const std::vector<std::string>& headers) {
+    auto fn = [&, this]() {
+      return inner_client_.DownloadToJson(url, headers);
+    };
+    return CF_EXPECT(RetryImpl<Json::Value>(fn));
+  }
+
+  Result<HttpResponse<void>> DownloadToCallback(
+      DataCallback cb, const std::string& url,
+      const std::vector<std::string>& hdrs) override {
+    auto fn = [&, this]() {
+      return inner_client_.DownloadToCallback(cb, url, hdrs);
+    };
+    return CF_EXPECT(RetryImpl<void>(fn));
+  }
+
+  Result<HttpResponse<Json::Value>> DeleteToJson(
+      const std::string& url,
+      const std::vector<std::string>& headers) override {
+    auto fn = [&, this]() { return inner_client_.DeleteToJson(url, headers); };
+    return CF_EXPECT(RetryImpl<Json::Value>(fn));
+  }
+
+  std::string UrlEscape(const std::string& text) override {
+    return inner_client_.UrlEscape(text);
+  }
+
+ private:
+  template <typename T>
+  Result<HttpResponse<T>> RetryImpl(
+      std::function<Result<HttpResponse<T>>()> attempt_fn) {
+    HttpResponse<T> response;
+    for (int attempt = 0; attempt != retry_attempts_; ++attempt) {
+      if (attempt != 0) {
+        std::this_thread::sleep_for(retry_delay_);
+      }
+      response = CF_EXPECT(attempt_fn());
+      if (!response.HttpServerError()) {
+        return response;
+      }
+    }
+    return response;
+  }
+
+ private:
+  HttpClient& inner_client_;
+  int retry_attempts_;
+  std::chrono::milliseconds retry_delay_;
+};
+
+}  // namespace
+
+Result<std::vector<std::string>> GetEntDnsResolve(const std::string& host) {
+  Command command("/bin/getent");
+  command.AddParameter("hosts");
+  command.AddParameter(host);
+
+  std::string out;
+  std::string err;
+  CF_EXPECT(RunWithManagedStdio(std::move(command), nullptr, &out, &err) == 0,
+            "`getent hosts " << host << "` failed: out = \"" << out
+                             << "\", err = \"" << err << "\"");
+  auto lines = android::base::Tokenize(out, "\n");
+  for (auto& line : lines) {
+    auto line_split = android::base::Tokenize(line, " \t");
+    CF_EXPECT(line_split.size() == 2,
+              "unexpected line format: \"" << line << "\"");
+    line = line_split[0];
+  }
+  return lines;
+}
+
+/* static */ std::unique_ptr<HttpClient> HttpClient::CurlClient(
+    NameResolver resolver) {
+  return std::unique_ptr<HttpClient>(new class CurlClient(std::move(resolver)));
+}
+
+/* static */ std::unique_ptr<HttpClient> HttpClient::ServerErrorRetryClient(
+    HttpClient& inner, int retry_attempts,
+    std::chrono::milliseconds retry_delay) {
+  return std::unique_ptr<HttpClient>(
+      new class ServerErrorRetryClient(inner, retry_attempts, retry_delay));
+}
+
+HttpClient::~HttpClient() = default;
+
+}  // namespace cuttlefish
diff --git a/host/libs/web/http_client/http_client.h b/host/libs/web/http_client/http_client.h
new file mode 100644
index 0000000..1129cc1
--- /dev/null
+++ b/host/libs/web/http_client/http_client.h
@@ -0,0 +1,102 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include <chrono>
+#include <mutex>
+#include <string>
+#include <type_traits>
+
+#include <json/json.h>
+
+#include "common/libs/utils/result.h"
+
+namespace cuttlefish {
+
+static inline bool IsHttpSuccess(int http_code) {
+  return http_code >= 200 && http_code <= 299;
+};
+
+struct HttpVoidResponse {};
+
+template <typename T>
+struct HttpResponse {
+  bool HttpInfo() { return http_code >= 100 && http_code <= 199; }
+  bool HttpSuccess() { return IsHttpSuccess(http_code); }
+  bool HttpRedirect() { return http_code >= 300 && http_code <= 399; }
+  bool HttpClientError() { return http_code >= 400 && http_code <= 499; }
+  bool HttpServerError() { return http_code >= 500 && http_code <= 599; }
+
+  typename std::conditional<std::is_void_v<T>, HttpVoidResponse, T>::type data;
+  long http_code;
+};
+
+using NameResolver =
+    std::function<Result<std::vector<std::string>>(const std::string&)>;
+
+Result<std::vector<std::string>> GetEntDnsResolve(const std::string& host);
+
+class HttpClient {
+ public:
+  typedef std::function<bool(char*, size_t)> DataCallback;
+
+  static std::unique_ptr<HttpClient> CurlClient(
+      NameResolver resolver = NameResolver());
+  static std::unique_ptr<HttpClient> ServerErrorRetryClient(
+      HttpClient&, int retry_attempts, std::chrono::milliseconds retry_delay);
+
+  virtual ~HttpClient();
+
+  virtual Result<HttpResponse<std::string>> GetToString(
+      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
+  virtual Result<HttpResponse<std::string>> PostToString(
+      const std::string& url, const std::string& data,
+      const std::vector<std::string>& headers = {}) = 0;
+  virtual Result<HttpResponse<std::string>> DeleteToString(
+      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
+
+  // Returns the json object contained in the response's body.
+  //
+  // NOTE: In case of a parsing error a successful `result` will be returned
+  // with the relevant http status code and a json object with the next format:
+  // {
+  //   "error": "Failed to parse json",
+  //   "response: "<THE RESPONSE BODY>"
+  // }
+  virtual Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const std::string& data,
+      const std::vector<std::string>& headers = {}) = 0;
+  virtual Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string& url, const Json::Value& data,
+      const std::vector<std::string>& headers = {}) = 0;
+  virtual Result<HttpResponse<Json::Value>> DownloadToJson(
+      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
+  virtual Result<HttpResponse<Json::Value>> DeleteToJson(
+      const std::string& url, const std::vector<std::string>& headers = {}) = 0;
+
+  virtual Result<HttpResponse<std::string>> DownloadToFile(
+      const std::string& url, const std::string& path,
+      const std::vector<std::string>& headers = {}) = 0;
+
+  // Returns response's status code.
+  virtual Result<HttpResponse<void>> DownloadToCallback(
+      DataCallback callback, const std::string& url,
+      const std::vector<std::string>& headers = {}) = 0;
+
+  virtual std::string UrlEscape(const std::string&) = 0;
+};
+
+}  // namespace cuttlefish
diff --git a/host/libs/web/http_client/sso_client.cc b/host/libs/web/http_client/sso_client.cc
new file mode 100644
index 0000000..04bc616
--- /dev/null
+++ b/host/libs/web/http_client/sso_client.cc
@@ -0,0 +1,142 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/libs/web/http_client/sso_client.h"
+
+#include <iostream>
+#include <regex>
+
+#include "common/libs/utils/subprocess.h"
+
+namespace cuttlefish {
+namespace http_client {
+namespace {
+
+constexpr char kSsoClientBin[] = "/usr/bin/sso_client";
+
+// Matches the sso_client's standard output when it succeeds expecting a valid
+// http response.
+const std::regex kStdoutRegex(
+    "HTTP/\\d+\\.\\d+\\s(\\d+)\\s.+\r\n" /* status */
+    "(?:.+\r\n)+\r\n"                    /* headers */
+    "(.+)?"                              /* body */
+    "\n?" /* new line added by the sso_client if a body exists */);
+
+enum class HttpMethod {
+  kGet,
+  kPost,
+  kDelete,
+};
+
+const char* kHttpMethodStrings[] = {"GET", "POST", "DELETE"};
+
+Result<HttpResponse<std::string>> MakeRequest(
+    ExecCmdFunc exec_cmd_func_, const std::string& url,
+    HttpMethod method = HttpMethod::kGet, const std::string& data = "") {
+  Command sso_client_cmd(kSsoClientBin);
+  sso_client_cmd.AddParameter("--use_master_cookie");
+  sso_client_cmd.AddParameter("--request_timeout=300");  // 5 minutes
+  sso_client_cmd.AddParameter("--dump_header");
+  sso_client_cmd.AddParameter("--url=" + url);
+  sso_client_cmd.AddParameter("--method=" +
+                              std::string(kHttpMethodStrings[(int)method]));
+  if (method == HttpMethod::kPost) {
+    if (!data.empty()) {
+      sso_client_cmd.AddParameter("--data=" + data);
+    }
+  }
+  std::string stdout_, stderr_;
+  int ret = exec_cmd_func_(std::move(sso_client_cmd), nullptr, &stdout_,
+                           &stderr_, SubprocessOptions());
+  CF_EXPECT(ret == 0,
+            "`sso_client` execution failed with combined stdout and stderr: "
+                << stdout_ << stderr_);
+  CF_EXPECT(std::regex_match(stdout_, kStdoutRegex),
+            "Failed parsing `sso_client` output. Output:\n"
+                << stdout_);
+  std::smatch match;
+  std::regex_search(stdout_, match, kStdoutRegex);
+  long status_code = std::atol(match[1].str().data());
+  std::string body = "";
+  if (match.size() == 3) {
+    body = match[2];
+  }
+  return HttpResponse<std::string>{body, status_code};
+}
+}  // namespace
+
+SsoClient::SsoClient() : exec_cmd_func_(&RunWithManagedStdio) {}
+
+SsoClient::SsoClient(ExecCmdFunc exec_cmd_func)
+    : exec_cmd_func_(exec_cmd_func) {}
+
+SsoClient::~SsoClient() {}
+
+Result<HttpResponse<std::string>> SsoClient::GetToString(
+    const std::string& url, const std::vector<std::string>& headers) {
+  // TODO(b/250670329): Handle request headers.
+  CF_EXPECT(headers.empty(), "headers are not handled yet");
+  return MakeRequest(exec_cmd_func_, url);
+}
+
+Result<HttpResponse<std::string>> SsoClient::PostToString(
+    const std::string& url, const std::string& data,
+    const std::vector<std::string>& headers) {
+  // TODO(b/250670329): Handle request headers.
+  CF_EXPECT(headers.empty(), "headers are not handled yet");
+  return MakeRequest(exec_cmd_func_, url, HttpMethod::kPost, data);
+};
+
+Result<HttpResponse<std::string>> SsoClient::DeleteToString(
+    const std::string& url, const std::vector<std::string>& headers) {
+  // TODO(b/250670329): Handle request headers.
+  CF_EXPECT(headers.empty(), "headers are not handled yet");
+  return MakeRequest(exec_cmd_func_, url, HttpMethod::kDelete);
+}
+
+Result<HttpResponse<Json::Value>> SsoClient::PostToJson(
+    const std::string&, const std::string&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+Result<HttpResponse<Json::Value>> SsoClient::PostToJson(
+    const std::string&, const Json::Value&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+Result<HttpResponse<std::string>> SsoClient::DownloadToFile(
+    const std::string&, const std::string&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+Result<HttpResponse<Json::Value>> SsoClient::DownloadToJson(
+    const std::string&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+Result<HttpResponse<void>> SsoClient::DownloadToCallback(
+    DataCallback, const std::string&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+Result<HttpResponse<Json::Value>> SsoClient::DeleteToJson(
+    const std::string&, const std::vector<std::string>&) {
+  return CF_ERR("Not implemented");
+}
+
+std::string SsoClient::UrlEscape(const std::string&) { return ""; }
+
+}  // namespace http_client
+}  // namespace cuttlefish
diff --git a/host/libs/web/http_client/sso_client.h b/host/libs/web/http_client/sso_client.h
new file mode 100644
index 0000000..846823b
--- /dev/null
+++ b/host/libs/web/http_client/sso_client.h
@@ -0,0 +1,79 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#pragma once
+
+#include "common/libs/utils/subprocess.h"
+#include "host/libs/web/http_client/http_client.h"
+
+namespace cuttlefish {
+namespace http_client {
+
+typedef std::function<int(Command&&, const std::string*, std::string*,
+                          std::string*, SubprocessOptions)>
+    ExecCmdFunc;
+
+class SsoClient : public HttpClient {
+ public:
+  SsoClient();
+
+  SsoClient(ExecCmdFunc);
+
+  ~SsoClient();
+
+  Result<HttpResponse<std::string>> GetToString(
+      const std::string& url,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<std::string>> PostToString(
+      const std::string&, const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<std::string>> DeleteToString(
+      const std::string& url,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string&, const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<Json::Value>> PostToJson(
+      const std::string&, const Json::Value&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<std::string>> DownloadToFile(
+      const std::string&, const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<Json::Value>> DownloadToJson(
+      const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<void>> DownloadToCallback(
+      DataCallback, const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  Result<HttpResponse<Json::Value>> DeleteToJson(
+      const std::string&,
+      const std::vector<std::string>& headers = {}) override;
+
+  std::string UrlEscape(const std::string&) override;
+
+ private:
+  ExecCmdFunc exec_cmd_func_;
+};
+
+}  // namespace http_client
+}  // namespace cuttlefish
diff --git a/host/libs/web/http_client/unittest/main_test.cc b/host/libs/web/http_client/unittest/main_test.cc
new file mode 100644
index 0000000..d2ceeb7
--- /dev/null
+++ b/host/libs/web/http_client/unittest/main_test.cc
@@ -0,0 +1,21 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <gtest/gtest.h>
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}
diff --git a/host/libs/web/http_client/unittest/sso_client_test.cc b/host/libs/web/http_client/unittest/sso_client_test.cc
new file mode 100644
index 0000000..84e98c2
--- /dev/null
+++ b/host/libs/web/http_client/unittest/sso_client_test.cc
@@ -0,0 +1,209 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "host/libs/web/http_client/sso_client.h"
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+namespace cuttlefish {
+namespace http_client {
+
+TEST(SsoClientTest, GetToStringSucceeds) {
+  std::string stdout_ =
+      "HTTP/1.1 222 Bad Request\r\n"
+      "Content-Type: application/json\r\n"
+      "Vary: Accept-Encoding\r\n"
+      "Date: Tue, 19 Jul 2022 00:00:54 GMT\r\n"
+      "Pragma: no-cache\r\n"
+      "Expires: Fri, 01 Jan 1990 00:00:00 GMT\r\n"
+      "Cache-Control: no-cache, must-revalidate\r\n"
+      "\r\n"
+      "foo"
+      "\n";
+  auto exec = [&](Command&&, const std::string*, std::string* out, std::string*,
+                  SubprocessOptions) {
+    *out = stdout_;
+    return 0;
+  };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_TRUE(result.ok()) << result.error().Trace();
+  EXPECT_EQ(result->data, "foo");
+  EXPECT_EQ(result->http_code, 222);
+}
+
+TEST(SsoClientTest, GetToStringSucceedsEmptyBody) {
+  std::string stdout_ =
+      "HTTP/1.1 222 OK\r\n"
+      "Content-Type: application/json\r\n"
+      "\r\n"
+      "\n";
+  auto exec = [&](Command&&, const std::string*, std::string* out, std::string*,
+                  SubprocessOptions) {
+    *out = stdout_;
+    return 0;
+  };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_TRUE(result.ok()) << result.error().Trace();
+  EXPECT_EQ(result->data, "");
+  EXPECT_EQ(result->http_code, 222);
+}
+
+TEST(SsoClientTest, GetToStringNoBody) {
+  std::string stdout_ =
+      "HTTP/1.1 502 Bad Gateway\r\n"
+      "Content-Type: application/json\r\n"
+      "\r\n";
+  auto exec = [&](Command&&, const std::string*, std::string* out, std::string*,
+                  SubprocessOptions) {
+    *out = stdout_;
+    return 0;
+  };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_TRUE(result.ok()) << result.error().Trace();
+  EXPECT_EQ(result->data, "");
+  EXPECT_EQ(result->http_code, 502);
+}
+
+constexpr char kBashScriptPrefix[] = R"(#!/bin/bash
+
+/usr/bin/sso_client \
+--use_master_cookie \
+--request_timeout=300 \
+--dump_header \)";
+
+TEST(SsoClientTest, GetToStringVerifyCommandArgs) {
+  std::string cmd_as_bash_script;
+  auto exec = [&](Command&& cmd, const std::string*, std::string*, std::string*,
+                  SubprocessOptions) {
+    cmd_as_bash_script = cmd.AsBashScript();
+    return 0;
+  };
+  SsoClient client(exec);
+
+  client.GetToString("https://some.url");
+
+  std::string expected = std::string(kBashScriptPrefix) + R"(
+--url=https://some.url \
+--method=GET)";
+  EXPECT_EQ(cmd_as_bash_script, expected);
+}
+
+TEST(SsoClientTest, PostToStringVerifyCommandArgs) {
+  std::string cmd_as_bash_script;
+  auto exec = [&](Command&& cmd, const std::string*, std::string*, std::string*,
+                  SubprocessOptions) {
+    cmd_as_bash_script = cmd.AsBashScript();
+    return 0;
+  };
+  SsoClient client(exec);
+
+  client.PostToString("https://some.url", "foo");
+
+  std::string expected = std::string(kBashScriptPrefix) + R"(
+--url=https://some.url \
+--method=POST \
+--data=foo)";
+  EXPECT_EQ(cmd_as_bash_script, expected);
+}
+
+TEST(SsoClientTest, PostToStringEmptyDataVerifyCommandArgs) {
+  std::string cmd_as_bash_script;
+  auto exec = [&](Command&& cmd, const std::string*, std::string*, std::string*,
+                  SubprocessOptions) {
+    cmd_as_bash_script = cmd.AsBashScript();
+    return 0;
+  };
+  SsoClient client(exec);
+
+  client.PostToString("https://some.url", "");
+
+  std::string expected = std::string(kBashScriptPrefix) + R"(
+--url=https://some.url \
+--method=POST)";
+  EXPECT_EQ(cmd_as_bash_script, expected);
+}
+
+TEST(SsoClientTest, DeleteToStringVerifyCommandArgs) {
+  std::string cmd_as_bash_script;
+  auto exec = [&](Command&& cmd, const std::string*, std::string*, std::string*,
+                  SubprocessOptions) {
+    cmd_as_bash_script = cmd.AsBashScript();
+    return 0;
+  };
+  SsoClient client(exec);
+
+  client.DeleteToString("https://some.url");
+
+  std::string expected = std::string(kBashScriptPrefix) + R"(
+--url=https://some.url \
+--method=DELETE)";
+  EXPECT_EQ(cmd_as_bash_script, expected);
+}
+
+TEST(SsoClientTest, GetToStringFailsInvalidResponseFormat) {
+  std::string stdout_ = "E0719 13:45:32.891177 2702210 foo failed";
+  auto exec = [&](Command&&, const std::string*, std::string* out, std::string*,
+                  SubprocessOptions) {
+    *out = stdout_;
+    return 0;
+  };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_FALSE(result.ok());
+}
+
+TEST(SsoClientTest, GetToStringFailsEmptyStdout) {
+  auto exec = [&](Command&&, const std::string*, std::string*, std::string*,
+                  SubprocessOptions) { return 0; };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_FALSE(result.ok());
+}
+
+TEST(SsoClientTest, GetToStringFailsExecutionFails) {
+  std::string stdout_ = "foo";
+  std::string stderr_ = "bar";
+  auto exec = [&](Command&&, const std::string*, std::string* out,
+                  std::string* err, SubprocessOptions) {
+    *out = stdout_;
+    *err = stderr_;
+    return -1;
+  };
+  SsoClient client(exec);
+
+  auto result = client.GetToString("https://some.url");
+
+  EXPECT_FALSE(result.ok());
+  EXPECT_TRUE(result.error().Message().find(stdout_) != std::string::npos);
+  EXPECT_TRUE(result.error().Message().find(stderr_) != std::string::npos);
+}
+
+}  // namespace http_client
+}  // namespace cuttlefish
diff --git a/host/libs/web/install_zip.cc b/host/libs/web/install_zip.cc
deleted file mode 100644
index 624c419..0000000
--- a/host/libs/web/install_zip.cc
+++ /dev/null
@@ -1,54 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "install_zip.h"
-
-#include <stdlib.h>
-
-#include <string>
-#include <vector>
-
-#include <android-base/strings.h>
-#include <android-base/logging.h>
-
-#include "common/libs/utils/archive.h"
-#include "common/libs/utils/subprocess.h"
-
-std::vector<std::string> ExtractImages(const std::string& archive_file,
-                                       const std::string& target_directory,
-                                       const std::vector<std::string>& images) {
-  cuttlefish::Archive archive(archive_file);
-  bool extracted =
-      images.size() > 0
-          ? archive.ExtractFiles(images, target_directory)
-          : archive.ExtractAll(target_directory);
-  if (!extracted) {
-    LOG(ERROR) << "Unable to extract images.";
-    return {};
-  }
-
-  std::vector<std::string> files =
-      images.size() > 0 ? std::move(images) : archive.Contents();
-  auto it = files.begin();
-  while (it != files.end()) {
-    if (*it == "" || android::base::EndsWith(*it, "/")) {
-      it = files.erase(it);
-    } else {
-      *it = target_directory + "/" + *it;
-      it++;
-    }
-  }
-  return files;
-}
diff --git a/host/libs/web/install_zip.h b/host/libs/web/install_zip.h
deleted file mode 100644
index c59ff33..0000000
--- a/host/libs/web/install_zip.h
+++ /dev/null
@@ -1,23 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#pragma once
-
-#include <string>
-#include <vector>
-
-std::vector<std::string> ExtractImages(const std::string& archive,
-                                       const std::string& target_directory,
-                                       const std::vector<std::string>& images);
diff --git a/host/libs/wmediumd_controller/wmediumd_api_protocol.cpp b/host/libs/wmediumd_controller/wmediumd_api_protocol.cpp
index 61ce771..a54fdc5 100644
--- a/host/libs/wmediumd_controller/wmediumd_api_protocol.cpp
+++ b/host/libs/wmediumd_controller/wmediumd_api_protocol.cpp
@@ -20,12 +20,16 @@
 #include <android-base/strings.h>
 
 #include <cstdlib>
+#include <iomanip>
 #include <iostream>
 #include <string>
 #include <vector>
 
 #include "common/libs/fs/shared_buf.h"
 
+#define MAC_ADDR_LEN 6
+#define STR_MAC_ADDR_LEN 17
+
 template <class T>
 static void AppendBinaryRepresentation(std::string& buf, const T& data) {
   std::copy(reinterpret_cast<const char*>(&data),
@@ -33,8 +37,62 @@
             std::back_inserter(buf));
 }
 
+static std::array<uint8_t, 6> ParseMacAddress(const std::string& macAddr) {
+  if (!cuttlefish::ValidMacAddr(macAddr)) {
+    LOG(FATAL) << "invalid mac address " << macAddr;
+  }
+
+  auto split_mac = android::base::Split(macAddr, ":");
+  std::array<uint8_t, 6> mac;
+  for (int i = 0; i < 6; i++) {
+    char* end_ptr;
+    mac[i] = (uint8_t)strtol(split_mac[i].c_str(), &end_ptr, 16);
+  }
+
+  return mac;
+}
+
 namespace cuttlefish {
 
+bool ValidMacAddr(const std::string& macAddr) {
+  if (macAddr.size() != STR_MAC_ADDR_LEN) {
+    return false;
+  }
+
+  if (macAddr[2] != ':' || macAddr[5] != ':' || macAddr[8] != ':' ||
+      macAddr[11] != ':' || macAddr[14] != ':') {
+    return false;
+  }
+
+  for (int i = 0; i < STR_MAC_ADDR_LEN; ++i) {
+    if ((i - 2) % 3 == 0) continue;
+    char c = macAddr[i];
+
+    if (isupper(c)) {
+      c = tolower(c);
+    }
+
+    if ((c < '0' || c > '9') && (c < 'a' || c > 'f')) return false;
+  }
+
+  return true;
+}
+
+std::string MacToString(const char* macAddr) {
+  std::stringstream result;
+
+  for (int i = 0; i < MAC_ADDR_LEN; i++) {
+    result << std::setfill('0') << std::setw(2) << std::right << std::hex
+           << static_cast<int>(static_cast<uint8_t>(macAddr[i]));
+
+    if (i != 5) {
+      result << ":";
+    }
+  }
+
+  return result.str();
+}
+
 std::string WmediumdMessage::Serialize(void) const {
   std::string result;
 
@@ -57,30 +115,8 @@
 WmediumdMessageSetSnr::WmediumdMessageSetSnr(const std::string& node1,
                                              const std::string& node2,
                                              uint8_t snr) {
-  auto splitted_mac1 = android::base::Split(node1, ":");
-  auto splitted_mac2 = android::base::Split(node2, ":");
-
-  if (splitted_mac1.size() != 6) {
-    LOG(FATAL) << "invalid mac address length " << node1;
-  }
-
-  if (splitted_mac2.size() != 6) {
-    LOG(FATAL) << "invalid mac address length " << node2;
-  }
-
-  for (int i = 0; i < 6; i++) {
-    char* end_ptr;
-    node1_mac_[i] = (uint8_t)strtol(splitted_mac1[i].c_str(), &end_ptr, 16);
-    if (end_ptr != splitted_mac1[i].c_str() + splitted_mac1[i].size()) {
-      LOG(FATAL) << "cannot parse " << splitted_mac1[i] << " of " << node1;
-    }
-
-    node2_mac_[i] = (uint8_t)strtol(splitted_mac2[i].c_str(), &end_ptr, 16);
-    if (end_ptr != splitted_mac2[i].c_str() + splitted_mac2[i].size()) {
-      LOG(FATAL) << "cannot parse " << splitted_mac2[i] << " of " << node1;
-    }
-  }
-
+  node1_mac_ = ParseMacAddress(node1);
+  node2_mac_ = ParseMacAddress(node2);
   snr_ = snr;
 }
 
@@ -104,6 +140,44 @@
   buf.push_back('\0');
 }
 
+WmediumdMessageSetPosition::WmediumdMessageSetPosition(const std::string& node,
+                                                       double x, double y) {
+  mac_ = ParseMacAddress(node);
+  x_ = x;
+  y_ = y;
+}
+
+void WmediumdMessageSetPosition::SerializeBody(std::string& buf) const {
+  std::copy(std::begin(mac_), std::end(mac_), std::back_inserter(buf));
+  AppendBinaryRepresentation(buf, x_);
+  AppendBinaryRepresentation(buf, y_);
+}
+
+WmediumdMessageSetLci::WmediumdMessageSetLci(const std::string& node,
+                                             const std::string& lci) {
+  mac_ = ParseMacAddress(node);
+  lci_ = lci;
+}
+
+void WmediumdMessageSetLci::SerializeBody(std::string& buf) const {
+  std::copy(std::begin(mac_), std::end(mac_), std::back_inserter(buf));
+  std::copy(std::begin(lci_), std::end(lci_), std::back_inserter(buf));
+  buf.push_back('\0');
+}
+
+WmediumdMessageSetCivicloc::WmediumdMessageSetCivicloc(
+    const std::string& node, const std::string& civicloc) {
+  mac_ = ParseMacAddress(node);
+  civicloc_ = civicloc;
+}
+
+void WmediumdMessageSetCivicloc::SerializeBody(std::string& buf) const {
+  std::copy(std::begin(mac_), std::end(mac_), std::back_inserter(buf));
+  std::copy(std::begin(civicloc_), std::end(civicloc_),
+            std::back_inserter(buf));
+  buf.push_back('\0');
+}
+
 std::optional<WmediumdMessageStationsList> WmediumdMessageStationsList::Parse(
     const WmediumdMessageReply& reply) {
   size_t pos = 0;
@@ -131,8 +205,14 @@
       LOG(ERROR) << "invalid response size";
       return std::nullopt;
     }
-    result.station_list_.push_back(
-        *reinterpret_cast<const wmediumd_station_info*>(data + pos));
+
+    const wmediumd_station_info* station =
+        reinterpret_cast<const wmediumd_station_info*>(data + pos);
+    std::string lci((char*)station + station->lci_offset);
+    std::string civicloc((char*)station + station->civicloc_offset);
+    result.station_list_.emplace_back(station->addr, station->hwaddr,
+                                      station->x, station->y, lci, civicloc,
+                                      station->tx_power);
     pos += sizeof(wmediumd_station_info);
   }
 
diff --git a/host/libs/wmediumd_controller/wmediumd_api_protocol.h b/host/libs/wmediumd_controller/wmediumd_api_protocol.h
index b5cedd6..a26e104 100644
--- a/host/libs/wmediumd_controller/wmediumd_api_protocol.h
+++ b/host/libs/wmediumd_controller/wmediumd_api_protocol.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <array>
 #include <cstdint>
 #include <memory>
 #include <optional>
@@ -28,22 +29,49 @@
 namespace cuttlefish {
 
 enum class WmediumdMessageType : uint32_t {
-  kInvalid = 0,
-  kAck = 1,
-  kRegister = 2,
-  kUnregister = 3,
-  kNetlink = 4,
-  kSetControl = 5,
-  kTxStart = 6,
-  kGetStations = 7,
-  kSetSnr = 8,
-  kReloadConfig = 9,
-  kReloadCurrentConfig = 10,
-  kStartPcap = 11,
-  kStopPcap = 12,
-  kStationsList = 13,
+  kInvalid = WMEDIUMD_MSG_INVALID,
+  kAck = WMEDIUMD_MSG_ACK,
+  kRegister = WMEDIUMD_MSG_REGISTER,
+  kUnregister = WMEDIUMD_MSG_UNREGISTER,
+  kNetlink = WMEDIUMD_MSG_NETLINK,
+  kSetControl = WMEDIUMD_MSG_SET_CONTROL,
+  kTxStart = WMEDIUMD_MSG_TX_START,
+  kGetStations = WMEDIUMD_MSG_GET_STATIONS,
+  kSetSnr = WMEDIUMD_MSG_SET_SNR,
+  kReloadConfig = WMEDIUMD_MSG_RELOAD_CONFIG,
+  kReloadCurrentConfig = WMEDIUMD_MSG_RELOAD_CURRENT_CONFIG,
+  kStartPcap = WMEDIUMD_MSG_START_PCAP,
+  kStopPcap = WMEDIUMD_MSG_STOP_PCAP,
+  kStationsList = WMEDIUMD_MSG_STATIONS_LIST,
+  kSetPosition = WMEDIUMD_MSG_SET_POSITION,
+  kSetLci = WMEDIUMD_MSG_SET_LCI,
+  kSetCivicloc = WMEDIUMD_MSG_SET_CIVICLOC,
 };
 
+struct WmediumdStationInfo {
+  char addr[ETH_ALEN];
+  char hwaddr[ETH_ALEN];
+
+  double x;
+  double y;
+
+  std::string lci;
+  std::string civicloc;
+
+  int tx_power;
+
+  WmediumdStationInfo(const char addr[ETH_ALEN], const char hwaddr[ETH_ALEN],
+                      double x, double y, const std::string& lci,
+                      const std::string& civicloc, int tx_power)
+      : x(x), y(y), lci(lci), civicloc(civicloc), tx_power(tx_power) {
+    memcpy(this->addr, addr, sizeof(this->addr));
+    memcpy(this->hwaddr, hwaddr, sizeof(this->hwaddr));
+  }
+};
+
+bool ValidMacAddr(const std::string& macAddr);
+std::string MacToString(const char* macAddr);
+
 class WmediumdMessage {
  public:
   virtual ~WmediumdMessage() {}
@@ -81,8 +109,8 @@
  private:
   void SerializeBody(std::string& out) const override;
 
-  uint8_t node1_mac_[6];
-  uint8_t node2_mac_[6];
+  std::array<uint8_t, 6> node1_mac_;
+  std::array<uint8_t, 6> node2_mac_;
   uint8_t snr_;
 };
 
@@ -169,12 +197,59 @@
     return WmediumdMessageType::kStationsList;
   }
 
-  const std::vector<wmediumd_station_info>& GetStations() const {
+  const std::vector<WmediumdStationInfo>& GetStations() const {
     return station_list_;
   }
 
  private:
-  std::vector<wmediumd_station_info> station_list_;
+  std::vector<WmediumdStationInfo> station_list_;
+};
+
+class WmediumdMessageSetPosition : public WmediumdMessage {
+ public:
+  WmediumdMessageSetPosition(const std::string& node, double x, double y);
+
+  WmediumdMessageType Type() const override {
+    return WmediumdMessageType::kSetPosition;
+  }
+
+ private:
+  void SerializeBody(std::string& out) const override;
+
+  std::array<uint8_t, 6> mac_;
+  double x_;
+  double y_;
+};
+
+class WmediumdMessageSetLci : public WmediumdMessage {
+ public:
+  WmediumdMessageSetLci(const std::string& node, const std::string& lci);
+
+  WmediumdMessageType Type() const override {
+    return WmediumdMessageType::kSetLci;
+  }
+
+ private:
+  void SerializeBody(std::string& out) const override;
+
+  std::array<uint8_t, 6> mac_;
+  std::string lci_;
+};
+
+class WmediumdMessageSetCivicloc : public WmediumdMessage {
+ public:
+  WmediumdMessageSetCivicloc(const std::string& node,
+                             const std::string& civicloc);
+
+  WmediumdMessageType Type() const override {
+    return WmediumdMessageType::kSetCivicloc;
+  }
+
+ private:
+  void SerializeBody(std::string& out) const override;
+
+  std::array<uint8_t, 6> mac_;
+  std::string civicloc_;
 };
 
 }  // namespace cuttlefish
diff --git a/host/libs/wmediumd_controller/wmediumd_controller.cpp b/host/libs/wmediumd_controller/wmediumd_controller.cpp
index a1e2d10..fa38cdc 100644
--- a/host/libs/wmediumd_controller/wmediumd_controller.cpp
+++ b/host/libs/wmediumd_controller/wmediumd_controller.cpp
@@ -89,6 +89,21 @@
   return WmediumdMessageStationsList::Parse(*reply);
 }
 
+bool WmediumdController::SetPosition(const std::string& node, double x,
+                                     double y) {
+  return SendMessage(WmediumdMessageSetPosition(node, x, y));
+}
+
+bool WmediumdController::SetLci(const std::string& node,
+                                const std::string& lci) {
+  return SendMessage(WmediumdMessageSetLci(node, lci));
+}
+
+bool WmediumdController::SetCivicloc(const std::string& node,
+                                     const std::string& civicloc) {
+  return SendMessage(WmediumdMessageSetCivicloc(node, civicloc));
+}
+
 bool WmediumdController::SendMessage(const WmediumdMessage& message) {
   auto reply = SendMessageWithReply(message);
 
diff --git a/host/libs/wmediumd_controller/wmediumd_controller.h b/host/libs/wmediumd_controller/wmediumd_controller.h
index 10fd577..2613c57 100644
--- a/host/libs/wmediumd_controller/wmediumd_controller.h
+++ b/host/libs/wmediumd_controller/wmediumd_controller.h
@@ -44,6 +44,9 @@
   bool StartPcap(const std::string& pcapPath);
   bool StopPcap(void);
   std::optional<WmediumdMessageStationsList> GetStations(void);
+  bool SetPosition(const std::string& node, double x, double y);
+  bool SetLci(const std::string& node, const std::string& lci);
+  bool SetCivicloc(const std::string& node, const std::string& civicloc);
 
  private:
   WmediumdController() {}
diff --git a/host_package.mk b/host_package.mk
index e1afedf..aeb3fed 100644
--- a/host_package.mk
+++ b/host_package.mk
@@ -1,17 +1,23 @@
-cvd_host_packages := $(HOST_OUT)/cvd-host_package.tar.gz
+cvd_host_packages := $(HOST_OUT)/cvd-host_package
 ifeq ($(HOST_CROSS_OS)_$(HOST_CROSS_ARCH),linux_bionic_arm64)
-  cvd_host_packages += $(OUT_DIR)/host/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)/cvd-host_package.tar.gz
+  cvd_host_packages += $(OUT_DIR)/host/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)/cvd-host_package
 endif
 
-.PHONY: hosttar
-hosttar: $(cvd_host_packages)
+cvd_host_dir_stamps := $(addsuffix .stamp,$(cvd_host_packages))
+cvd_host_tarballs := $(addsuffix .tar.gz,$(cvd_host_packages))
 
-# Build this by default when a developer types make
-droidcore: $(cvd_host_packages)
+.PHONY: hosttar
+hosttar: $(cvd_host_tarballs)
+
+# Build this by default when a developer types make.
+# Skip the tarballs by default as it is time consuming.
+droidcore: $(cvd_host_dir_stamps)
 
 # Dist
 # Note that only the last package is dist'ed. It would be from x86 in case of cf_x86_phone,
 # and from arm64 in case of cf_arm64_phone.
-$(call dist-for-goals, dist_files,$(word $(words $(cvd_host_packages)), $(cvd_host_packages)))
+$(call dist-for-goals, dist_files,$(word $(words $(cvd_host_tarballs)), $(cvd_host_tarballs)))
 
+cvd_host_dir_stamps :=
 cvd_host_packages :=
+cvd_host_tarballs :=
diff --git a/iwyu.imp b/iwyu.imp
index 0c618ca..189b9f2 100644
--- a/iwyu.imp
+++ b/iwyu.imp
@@ -6,6 +6,12 @@
   { include: ["\"json/reader.h\"", "private", "<json/json.h>", "public"] },
   { include: ["\"json/value.h\"", "private", "<json/json.h>", "public"] },
   { include: ["\"json/writer.h\"", "private", "<json/json.h>", "public"] },
+  { include: ["\"gmock/gmock-actions.h\"", "private", "<gmock/gmock.h>", "public"] },
+  { include: ["\"gmock/gmock-matchers.h\"", "private", "<gmock/gmock.h>", "public"] },
+  { include: ["\"gtest/gtest-matchers.h\"", "private", "<gtest/gtest.h>", "public"] },
+  { include: ["\"gtest/gtest-message.h\"", "private", "<gtest/gtest.h>", "public"] },
+  { include: ["\"gtest/gtest-test-part.h\"", "private", "<gtest/gtest.h>", "public"] },
+  { include: ["\"gtest/gtest_pred_impl.h\"", "private", "<gtest/gtest.h>", "public"] },
   { symbol: ["std::forward", "private", "<utility>", "public" ] },
   { symbol: ["std::ifstream", "private", "<fstream>", "public" ] },
   { symbol: ["std::less", "private", "<functional>", "public" ] },
diff --git a/multiarch-howto.md b/multiarch-howto.md
deleted file mode 100644
index fe24a66..0000000
--- a/multiarch-howto.md
+++ /dev/null
@@ -1,57 +0,0 @@
-# Adjusting APT Sources for Multiarch
-
-The Cuttlefish host Debian packages can also be built and used on an `arm64`
-based system. However, because certain parts of it are still `amd64`, the
-APT sources of the system need to be adjusted for multiarch so that package
-dependencies can be correctly looked up and installed.
-
-For detailed context, see [Multiarch HOWTO](https://wiki.debian.org/Multiarch/HOWTO), and this document will use Ubuntu 21.04 (Hirsute) as an example for
-making such adjustments.
-
-The basic idea is to first limit the existing APT sources to `arm64` only,
-so that when a new architecture like `amd64` is added, APT won't try to
-fetch packages for the new architecture from the existing repository, as
-`arm64` packages are in "ports", while `amd64` ones are in the main
-repository. So a line in `/etc/apt/sources.list` such as:
-
-```
-deb http://ports.ubuntu.com/ubuntu-ports hirsute main restricted
-```
-
-would be changed to:
-
-```
-deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports hirsute main restricted
-```
-
-Next, each line of config like the above will be duplicated and modified into
-an entry that corresponds to what's in the main repository, with its
-architecture limited to `amd64`. For example, for the same line as shown above,
-a new entry will be added like this:
-
-```
-deb [arch=amd64] http://archive.ubuntu.com/ubuntu hirsute main restricted
-```
-
-The script below might be handy for this task:
-```bash
-#!/bin/bash
-cp /etc/apt/sources.list ~/sources.list.bak
-(
-  (grep ^deb /etc/apt/sources.list | sed 's/deb /deb [arch=arm64] /') && \
-  (grep ^deb /etc/apt/sources.list | sed 's/deb /deb [arch=amd64] /g; s/ports\.ubuntu/archive.ubuntu/g; s/ubuntu-ports/ubuntu/g') \
-) | tee /tmp/sources.list
-mv /tmp/sources.list /etc/apt/sources.list
-```
-**Note:** please run the above script as `root`, and adjust for differences in
-Ubuntu releases or location prefixed repositories for faster download (e.g.
-`us.archive.ubuntu.com` instead of `archive.ubuntu.com`).
-
-Finally, add the new architecture and do an APT update with:
-```bash
-sudo dpkg --add-architecture amd64
-sudo apt update
-```
-Make sure there's no errors or warnings in the output of `apt update`. To
-restore the previous APT sources list, use the backup file `sources.list.bak`
-saved by the script in your home directory.
diff --git a/required_images b/required_images
index e6f0616..7ae2fef 100644
--- a/required_images
+++ b/required_images
@@ -5,4 +5,5 @@
 userdata.img
 vbmeta.img
 vbmeta_system.img
+vbmeta_vendor_dlkm.img
 vendor_boot.img
diff --git a/shared/BoardConfig.mk b/shared/BoardConfig.mk
index b59b39c..133454c 100644
--- a/shared/BoardConfig.mk
+++ b/shared/BoardConfig.mk
@@ -18,12 +18,55 @@
 # Common BoardConfig for all supported architectures.
 #
 
+TARGET_KERNEL_USE ?= 6.1
+TARGET_KERNEL_ARCH ?= $(TARGET_ARCH)
+SYSTEM_DLKM_SRC ?= kernel/prebuilts/$(TARGET_KERNEL_USE)/$(TARGET_KERNEL_ARCH)
+TARGET_KERNEL_PATH ?= $(SYSTEM_DLKM_SRC)/kernel-$(TARGET_KERNEL_USE)
+KERNEL_MODULES_PATH ?= \
+    kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/$(subst _,-,$(TARGET_KERNEL_ARCH))
+PRODUCT_COPY_FILES += $(TARGET_KERNEL_PATH):kernel
+
+# The list of modules strictly/only required either to reach second stage
+# init, OR for recovery. Do not use this list to workaround second stage
+# issues.
+RAMDISK_KERNEL_MODULES := \
+    failover.ko \
+    nd_virtio.ko \
+    net_failover.ko \
+    virtio_blk.ko \
+    virtio_console.ko \
+    virtio_dma_buf.ko \
+    virtio-gpu.ko \
+    virtio_input.ko \
+    virtio_net.ko \
+    virtio_pci.ko \
+    virtio-rng.ko \
+    vmw_vsock_virtio_transport.ko \
+
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES := \
+    $(patsubst %,$(KERNEL_MODULES_PATH)/%,$(RAMDISK_KERNEL_MODULES))
+
+# GKI >5.15 will have and require virtio_pci_legacy_dev.ko
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard $(KERNEL_MODULES_PATH)/virtio_pci_legacy_dev.ko)
+# GKI >5.10 will have and require virtio_pci_modern_dev.ko
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard $(KERNEL_MODULES_PATH)/virtio_pci_modern_dev.ko)
+
+ALL_KERNEL_MODULES := $(wildcard $(KERNEL_MODULES_PATH)/*.ko)
+BOARD_VENDOR_KERNEL_MODULES := \
+    $(filter-out $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES),\
+                 $(wildcard $(KERNEL_MODULES_PATH)/*.ko))
+
 # TODO(b/170639028): Back up TARGET_NO_BOOTLOADER
 __TARGET_NO_BOOTLOADER := $(TARGET_NO_BOOTLOADER)
 include build/make/target/board/BoardConfigMainlineCommon.mk
 TARGET_NO_BOOTLOADER := $(__TARGET_NO_BOOTLOADER)
 
+BOARD_VENDOR_KERNEL_MODULES_BLOCKLIST_FILE := \
+    device/google/cuttlefish/shared/modules.blocklist
+
+ifndef TARGET_BOOTLOADER_BOARD_NAME
 TARGET_BOOTLOADER_BOARD_NAME := cutf
+endif
 
 BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE := $(TARGET_RO_FILE_SYSTEM_TYPE)
 
@@ -75,6 +118,7 @@
 BOARD_USES_SYSTEM_DLKMIMAGE := true
 BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := $(TARGET_RO_FILE_SYSTEM_TYPE)
 TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
+BOARD_SYSTEM_KERNEL_MODULES := $(wildcard $(SYSTEM_DLKM_SRC)/*.ko)
 
 # Enable AVB
 BOARD_AVB_ENABLE := true
@@ -100,6 +144,15 @@
 BOARD_AVB_INIT_BOOT_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
 BOARD_AVB_INIT_BOOT_ROLLBACK_INDEX_LOCATION := 3
 
+# Enabled chained vbmeta for vendor_dlkm
+BOARD_AVB_VBMETA_CUSTOM_PARTITIONS := vendor_dlkm
+BOARD_AVB_VBMETA_VENDOR_DLKM := vendor_dlkm
+BOARD_AVB_VBMETA_VENDOR_DLKM_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_VBMETA_VENDOR_DLKM_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_VBMETA_VENDOR_DLKM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_VBMETA_VENDOR_DLKM_ROLLBACK_INDEX_LOCATION := 4
+
+
 # Using sha256 for dm-verity partitions. b/178983355
 # system, system_other, product.
 TARGET_AVB_SYSTEM_HASHTREE_ALGORITHM ?= sha256
@@ -132,18 +185,16 @@
 # The compiler will occasionally generate movaps, etc.
 BOARD_MALLOC_ALIGNMENT := 16
 
-# Disable sparse on all filesystem images
-TARGET_USERIMAGES_SPARSE_EROFS_DISABLED ?= true
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED ?= true
-TARGET_USERIMAGES_SPARSE_F2FS_DISABLED ?= true
+# Enable sparse on all filesystem images
+TARGET_USERIMAGES_SPARSE_EROFS_DISABLED ?= false
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED ?= false
+TARGET_USERIMAGES_SPARSE_F2FS_DISABLED ?= false
 
 # Make the userdata partition 6G to accommodate ASAN and CTS
 BOARD_USERDATAIMAGE_PARTITION_SIZE := $(TARGET_USERDATAIMAGE_PARTITION_SIZE)
 BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE := $(TARGET_USERDATAIMAGE_FILE_SYSTEM_TYPE)
 TARGET_USERIMAGES_USE_F2FS := true
 
-BOARD_GPU_DRIVERS := virgl
-
 # Enable goldfish's encoder.
 # TODO(b/113617962) Remove this if we decide to use
 # device/generic/opengl-transport to generate the encoder
@@ -163,14 +214,10 @@
 USE_OPENGL_RENDERER := true
 
 # Wifi.
-ifeq ($(PRODUCT_ENFORCE_MAC80211_HWSIM),true)
 BOARD_WLAN_DEVICE           := emulator
 BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated_cf
 WIFI_HIDL_FEATURE_DUAL_INTERFACE := true
 WIFI_HAL_INTERFACE_COMBINATIONS := {{{STA}, 1}, {{AP}, 1}, {{P2P}, 1}}
-else
-BOARD_WLAN_DEVICE           := wlan0
-endif
 BOARD_HOSTAPD_DRIVER        := NL80211
 BOARD_WPA_SUPPLICANT_DRIVER := NL80211
 BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated_cf
@@ -204,7 +251,7 @@
 
 TARGET_RECOVERY_PIXEL_FORMAT := ABGR_8888
 TARGET_RECOVERY_UI_LIB := librecovery_ui_cuttlefish
-TARGET_RECOVERY_FSTAB ?= device/google/cuttlefish/shared/config/fstab.f2fs
+TARGET_RECOVERY_FSTAB_GENRULE := gen_fstab_cf_f2fs_cts
 
 BOARD_SUPER_PARTITION_SIZE := 7516192768  # 7GiB
 BOARD_SUPER_PARTITION_GROUPS := google_system_dynamic_partitions google_vendor_dynamic_partitions
@@ -247,6 +294,11 @@
 BOARD_KERNEL_CMDLINE += loop.max_part=7
 BOARD_KERNEL_CMDLINE += init=/init
 
+# Enable KUnit for userdebug and eng builds
+ifneq (,$(filter userdebug eng, $(TARGET_BUILD_VARIANT)))
+  BOARD_KERNEL_CMDLINE += kunit.enable=1
+endif
+
 BOARD_BOOTCONFIG += androidboot.hardware=cutf_cvm
 
 # TODO(b/182417593): Move all of these module options to modules.options
@@ -257,7 +309,6 @@
     kernel.vmw_vsock_virtio_transport_common.virtio_transport_max_vsock_pkt_buf_size=16384
 
 BOARD_BOOTCONFIG += \
-    androidboot.vendor.apex.com.android.wifi.hal=com.google.cf.wifi_hwsim \
     androidboot.vendor.apex.com.google.emulated.camera.provider.hal=com.google.emulated.camera.provider.hal \
 
 BOARD_INCLUDE_DTB_IN_BOOTIMG := true
@@ -271,8 +322,6 @@
     device/google/cuttlefish/dtb.img:dtb.img \
     device/google/cuttlefish/required_images:required_images \
 
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := false
-
 # Cuttlefish doesn't support ramdump feature yet, exclude the ramdump debug tool.
 EXCLUDE_BUILD_RAMDUMP_UPLOADER_DEBUG_TOOL := true
 
@@ -280,7 +329,7 @@
 BOARD_USES_GENERIC_KERNEL_IMAGE := true
 ifdef TARGET_DEDICATED_RECOVERY
   BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE := true
-else
+else ifneq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE), false)
   BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT := true
 endif
 BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT := true
@@ -288,3 +337,54 @@
 BOARD_GENERIC_RAMDISK_KERNEL_MODULES_LOAD := dm-user.ko
 
 BOARD_HAVE_BLUETOOTH := true
+
+# Enable the new fingerprint format on cuttlefish
+BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT := true
+
+# Set AB OTA partitions based on the build configuration
+AB_OTA_UPDATER := true
+
+ifneq ($(PRODUCT_BUILD_VENDOR_IMAGE), false)
+AB_OTA_PARTITIONS += vendor
+AB_OTA_PARTITIONS += vendor_dlkm
+ifneq ($(BOARD_AVB_VBMETA_VENDOR_DLKM),)
+AB_OTA_PARTITIONS += vbmeta_vendor_dlkm
+endif
+endif
+
+ifneq ($(PRODUCT_BUILD_BOOT_IMAGE), false)
+AB_OTA_PARTITIONS += boot
+endif
+
+ifneq ($(PRODUCT_BUILD_INIT_BOOT_IMAGE), false)
+AB_OTA_PARTITIONS += init_boot
+endif
+
+ifneq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE), false)
+AB_OTA_PARTITIONS += vendor_boot
+endif
+
+ifneq ($(PRODUCT_BUILD_ODM_IMAGE), false)
+AB_OTA_PARTITIONS += odm
+AB_OTA_PARTITIONS += odm_dlkm
+endif
+
+ifneq ($(PRODUCT_BUILD_PRODUCT_IMAGE), false)
+AB_OTA_PARTITIONS += product
+endif
+
+ifneq ($(PRODUCT_BUILD_SYSTEM_IMAGE), false)
+AB_OTA_PARTITIONS += system
+AB_OTA_PARTITIONS += system_dlkm
+ifneq ($(PRODUCT_BUILD_VBMETA_IMAGE), false)
+AB_OTA_PARTITIONS += vbmeta_system
+endif
+endif
+
+ifneq ($(PRODUCT_BUILD_SYSTEM_EXT_IMAGE), false)
+AB_OTA_PARTITIONS += system_ext
+endif
+
+ifneq ($(PRODUCT_BUILD_VBMETA_IMAGE), false)
+AB_OTA_PARTITIONS += vbmeta
+endif
diff --git a/shared/angle/BoardConfig.mk b/shared/angle/BoardConfig.mk
new file mode 100644
index 0000000..8fa99c6
--- /dev/null
+++ b/shared/angle/BoardConfig.mk
@@ -0,0 +1,17 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/ANGLE/sepolicy
diff --git a/shared/angle/device_vendor.mk b/shared/angle/device_vendor.mk
new file mode 100644
index 0000000..d0602b1
--- /dev/null
+++ b/shared/angle/device_vendor.mk
@@ -0,0 +1,25 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifeq ($(TARGET_VULKAN_SUPPORT),true)
+
+# ANGLE provides an OpenGL implementation built on top of Vulkan.
+PRODUCT_PACKAGES += \
+    libEGL_angle \
+    libGLESv1_CM_angle \
+    libGLESv2_angle \
+
+endif
diff --git a/shared/angle/sepolicy/OWNERS b/shared/angle/sepolicy/OWNERS
new file mode 100644
index 0000000..9b37b0e
--- /dev/null
+++ b/shared/angle/sepolicy/OWNERS
@@ -0,0 +1,4 @@
+include platform/system/sepolicy:/OWNERS
+
+adelva@google.com
+rurumihong@google.com
diff --git a/shared/angle/sepolicy/file_contexts b/shared/angle/sepolicy/file_contexts
new file mode 100644
index 0000000..3fdf816
--- /dev/null
+++ b/shared/angle/sepolicy/file_contexts
@@ -0,0 +1,4 @@
+/vendor/lib(64)?/libEGL_angle\.so              u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv1_CM_angle\.so        u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv2_angle\.so           u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libfeature_support_angle\.so  u:object_r:same_process_hal_file:s0
diff --git a/shared/api_level.h b/shared/api_level.h
new file mode 100644
index 0000000..36b3a87
--- /dev/null
+++ b/shared/api_level.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#define PRODUCT_SHIPPING_API_LEVEL 34
diff --git a/shared/auto/OWNERS b/shared/auto/OWNERS
index 1b990ef..f97912a 100644
--- a/shared/auto/OWNERS
+++ b/shared/auto/OWNERS
@@ -1,5 +1,4 @@
 # Android Auto leads
+include platform/packages/services/Car:/OWNERS
 ankitarora@google.com
 egranata@google.com
-gurunagarajan@google.com
-keunyoung@google.com
diff --git a/shared/auto/audio_policy_configuration.xml b/shared/auto/audio_policy_configuration.xml
index f1ac5ad..dedc831 100644
--- a/shared/auto/audio_policy_configuration.xml
+++ b/shared/auto/audio_policy_configuration.xml
@@ -29,6 +29,7 @@
         <attachedDevices>
             <item>Speaker</item>
             <item>Built-In Mic</item>
+            <item>FM Tuner</item>
         </attachedDevices>
         <defaultOutputDevice>Speaker</defaultOutputDevice>
         <mixPorts>
@@ -40,6 +41,10 @@
                 <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                     samplingRates="8000,16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
             </mixPort>
+            <mixPort name="mixport_tuner0" role="sink">
+                <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+            </mixPort>
         </mixPorts>
         <devicePorts>
             <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_BUS"
@@ -54,12 +59,24 @@
 
             <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
             </devicePort>
+
+            <devicePort tagName="FM Tuner" type="AUDIO_DEVICE_IN_FM_TUNER" role="source"
+                address="tuner0">
+                <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                <gains>
+                    <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                        minValueMB="-3200" maxValueMB="600" defaultValueMB="0" stepValueMB="100"/>
+                </gains>
+            </devicePort>
         </devicePorts>
         <routes>
             <route type="mix" sink="Speaker"
                 sources="primary output"/>
             <route type="mix" sink="primary input"
                 sources="Built-In Mic"/>
+            <route type="mix" sink="mixport_tuner0"
+                sources="FM Tuner"/>
         </routes>
       </module>
 
diff --git a/shared/auto/car_audio_configuration.xml b/shared/auto/car_audio_configuration.xml
index 53ca217..482726e 100644
--- a/shared/auto/car_audio_configuration.xml
+++ b/shared/auto/car_audio_configuration.xml
@@ -21,27 +21,31 @@
     - Volume groups
   in the car environment.
 -->
-<carAudioConfiguration version="2">
+<carAudioConfiguration version="3">
     <zones>
-        <zone name="primary zone" isPrimary="true">
-            <volumeGroups>
-                <group>
-                    <device address="Speaker">
-                        <context context="music"/>
-                        <context context="navigation"/>
-                        <context context="voice_command"/>
-                        <context context="call_ring"/>
-                        <context context="call"/>
-                        <context context="alarm"/>
-                        <context context="notification"/>
-                        <context context="system_sound"/>
-                        <context context="emergency"/>
-                        <context context="safety"/>
-                        <context context="vehicle_status"/>
-                        <context context="announcement"/>
-                    </device>
-                </group>
-            </volumeGroups>
+        <zone name="Primary zone" isPrimary="true" occupantZoneId="0">
+            <zoneConfigs>
+                <zoneConfig name="Config 0" isDefault="true">
+                    <volumeGroups>
+                        <group>
+                            <device address="Speaker">
+                                <context context="music"/>
+                                <context context="navigation"/>
+                                <context context="voice_command"/>
+                                <context context="call_ring"/>
+                                <context context="call"/>
+                                <context context="alarm"/>
+                                <context context="notification"/>
+                                <context context="system_sound"/>
+                                <context context="emergency"/>
+                                <context context="safety"/>
+                                <context context="vehicle_status"/>
+                                <context context="announcement"/>
+                            </device>
+                        </group>
+                    </volumeGroups>
+                </zoneConfig>
+            </zoneConfigs>
         </zone>
     </zones>
 </carAudioConfiguration>
diff --git a/shared/auto/device_vendor.mk b/shared/auto/device_vendor.mk
index 277eaa3..d32b5eb 100644
--- a/shared/auto/device_vendor.mk
+++ b/shared/auto/device_vendor.mk
@@ -14,33 +14,19 @@
 # limitations under the License.
 #
 
-DEVICE_MANIFEST_FILE += device/google/cuttlefish/shared/auto/manifest.xml
 PRODUCT_MANIFEST_FILES += device/google/cuttlefish/shared/config/product_manifest.xml
 SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
-$(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
 $(call inherit-product, packages/services/Car/car_product/build/car.mk)
+
+$(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
-PRODUCT_VENDOR_PROPERTIES += \
-    keyguard.no_require_sim=true \
-    ro.cdma.home.operator.alpha=Android \
-    ro.cdma.home.operator.numeric=302780 \
-    ro.com.android.dataroaming=true \
-    ro.telephony.default_network=9 \
-
-# Cuttlefish RIL support
-TARGET_USES_CF_RILD ?= true
-ifeq ($(TARGET_USES_CF_RILD),true)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-PRODUCT_PACKAGES += \
-    libcuttlefish-ril-2 \
-    libcuttlefish-rild
-else
-TARGET_NO_TELEPHONY := true
-endif
-
 # Extend cuttlefish common sepolicy with auto-specific functionality
 BOARD_SEPOLICY_DIRS += device/google/cuttlefish/shared/auto/sepolicy/vendor
 
@@ -88,8 +74,14 @@
 endif
 PRODUCT_PACKAGES += $(LOCAL_VHAL_PRODUCT_PACKAGE)
 
+# Remote access HAL
+PRODUCT_PACKAGES += android.hardware.automotive.remoteaccess@V1-default-service
+
 # Broadcast Radio
-PRODUCT_PACKAGES += android.hardware.broadcastradio@2.0-service
+PRODUCT_PACKAGES += android.hardware.broadcastradio-service.default
+
+# IVN HAL
+PRODUCT_PACKAGES += android.hardware.automotive.ivn@V1-default-service
 
 # AudioControl HAL
 ifeq ($(LOCAL_AUDIOCONTROL_HAL_PRODUCT_PACKAGE),)
@@ -99,11 +91,16 @@
 PRODUCT_PACKAGES += $(LOCAL_AUDIOCONTROL_HAL_PRODUCT_PACKAGE)
 
 # CAN bus HAL
-PRODUCT_PACKAGES += android.hardware.automotive.can@1.0-service
+PRODUCT_PACKAGES += android.hardware.automotive.can-service
 PRODUCT_PACKAGES_DEBUG += canhalctrl \
     canhaldump \
     canhalsend
 
+# Occupant Awareness HAL
+PRODUCT_PACKAGES += android.hardware.automotive.occupant_awareness@1.0-service
+include packages/services/Car/car_product/occupant_awareness/OccupantAwareness.mk
+BOARD_SEPOLICY_DIRS += packages/services/Car/car_product/occupant_awareness/sepolicy
+
 # EVS
 # By default, we enable EvsManager, a sample EVS app, and a mock EVS HAL implementation.
 # If you want to use your own EVS HAL implementation, please set ENABLE_MOCK_EVSHAL as false
@@ -115,11 +112,14 @@
 ENABLE_MOCK_EVSHAL ?= true
 ENABLE_CAREVSSERVICE_SAMPLE ?= true
 ENABLE_SAMPLE_EVS_APP ?= true
+ENABLE_CARTELEMETRY_SERVICE ?= true
 
 ifeq ($(ENABLE_MOCK_EVSHAL), true)
 CUSTOMIZE_EVS_SERVICE_PARAMETER := true
-PRODUCT_PACKAGES += android.hardware.automotive.evs@1.1-service \
-    android.frameworks.automotive.display@1.0-service
+PRODUCT_PACKAGES += \
+    android.hardware.automotive.evs-aidl-default-service \
+    cardisplayproxyd
+
 PRODUCT_COPY_FILES += \
     device/google/cuttlefish/shared/auto/evs/init.evs.rc:$(TARGET_COPY_OUT_VENDOR)/etc/init/init.evs.rc
 BOARD_SEPOLICY_DIRS += device/google/cuttlefish/shared/auto/sepolicy/evs
@@ -128,8 +128,8 @@
 ifeq ($(ENABLE_SAMPLE_EVS_APP), true)
 PRODUCT_PACKAGES += evs_app
 PRODUCT_COPY_FILES += \
-    device/google/cuttlefish/shared/auto/evs/evs_app_config.json:$(TARGET_COPY_OUT_SYSTEM)/etc/automotive/evs/config_override.json
-BOARD_SEPOLICY_DIRS += packages/services/Car/cpp/evs/apps/sepolicy/private
+    device/google/cuttlefish/shared/auto/evs/evs_app_config.json:$(TARGET_COPY_OUT_VENDOR)/etc/automotive/evs/config_override.json
+include packages/services/Car/cpp/evs/apps/sepolicy/evsapp.mk
 endif
 
 BOARD_IS_AUTOMOTIVE := true
diff --git a/shared/auto/evs/init.evs.rc b/shared/auto/evs/init.evs.rc
index f7dace5..d0b997a 100644
--- a/shared/auto/evs/init.evs.rc
+++ b/shared/auto/evs/init.evs.rc
@@ -1,6 +1,6 @@
 on late-init
-    start automotive_display
-    start vendor.evs-hal-mock
+    start cardisplayproxyd
+    start vendor.evs-hal-cf
     start evs_manager_cf
 
 service evs_manager_cf /system/bin/evsmanagerd --target hw/0
@@ -9,3 +9,12 @@
     user automotive_evs
     group automotive_evs system
     disabled # will not automatically start with its class; must be explicitly started.
+
+service vendor.evs-hal-cf /vendor/bin/hw/android.hardware.automotive.evs-aidl-default-service
+    class hal
+    priority -20
+    user graphics
+    group automotive_evs camera
+    onrestart restart cardisplayproxyd
+    onrestart restart evs_manager_cf
+    disabled # will not automatically start with its class; must be explicitly started.
diff --git a/shared/auto/manifest.xml b/shared/auto/manifest.xml
deleted file mode 100644
index e1691a1..0000000
--- a/shared/auto/manifest.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
-/*
-** Copyright 2018, The Android Open Source Project.
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** limitations under the License.
-*/
--->
-<!-- Android Auto Embedded specific HALs-->
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.automotive.can</name>
-        <transport>hwbinder</transport>
-        <fqname>@1.0::ICanBus/test</fqname>
-    </hal>
-</manifest>
diff --git a/shared/auto/overlay/frameworks/base/core/res/res/values/config.xml b/shared/auto/overlay/frameworks/base/core/res/res/values/config.xml
index bf07d05..93e1734 100644
--- a/shared/auto/overlay/frameworks/base/core/res/res/values/config.xml
+++ b/shared/auto/overlay/frameworks/base/core/res/res/values/config.xml
@@ -34,4 +34,14 @@
       Handle volume keys directly in CarAudioService without passing them to the foreground app
     -->
     <bool name="config_handleVolumeKeysInWindowManager">true</bool>
+
+    <!-- Show the "Adaptive Brightness" toggle. -->
+    <bool name="config_automatic_brightness_available">true</bool>
+
+    <!-- Controls if local secondary displays should be private or not. Value specified in the array
+         represents physical port address of each display and display in this list will be marked
+         as private. {@see android.view.Display#FLAG_PRIVATE} -->
+    <integer-array translatable="false" name="config_localPrivateDisplayPorts">
+        <item>1</item> <!-- ClusterDisplay -->
+    </integer-array>
 </resources>
diff --git a/shared/auto/preinstalled-packages-product-car-cuttlefish.xml b/shared/auto/preinstalled-packages-product-car-cuttlefish.xml
index 9f22200..90ba3b6 100644
--- a/shared/auto/preinstalled-packages-product-car-cuttlefish.xml
+++ b/shared/auto/preinstalled-packages-product-car-cuttlefish.xml
@@ -30,10 +30,6 @@
         <install-in user-type="FULL" />
         <install-in user-type="SYSTEM" />
     </install-in-user-type>
-    <install-in-user-type package="com.android.car.hvac">
-        <install-in user-type="FULL" />
-        <install-in user-type="SYSTEM" />
-    </install-in-user-type>
     <install-in-user-type package="com.android.phone">
         <install-in user-type="FULL" />
         <install-in user-type="SYSTEM" />
@@ -46,11 +42,6 @@
         <install-in user-type="FULL" />
         <install-in user-type="SYSTEM" />
     </install-in-user-type>
-    <!-- Android remote display which need to work for all users-->
-    <install-in-user-type package="com.android.car.acast.source">
-        <install-in user-type="FULL" />
-        <install-in user-type="SYSTEM" />
-    </install-in-user-type>
     <!-- This application is needed in ModuleInfoProvider -->
     <install-in-user-type package="com.android.modulemetadata">
         <install-in user-type="FULL" />
@@ -94,10 +85,6 @@
     <install-in-user-type package="com.android.cameraextensions">
         <install-in user-type="SYSTEM" />
         <install-in user-type="FULL" />
-   </install-in-user-type>
-   <install-in-user-type package="com.android.car.messenger">
-	<install-in user-type="FULL" />
-        <install-in user-type="SYSTEM" />
     </install-in-user-type>
     <install-in-user-type package="com.android.apps.tag">
         <install-in user-type="FULL" />
@@ -124,9 +111,6 @@
     <install-in-user-type package="com.android.car.datacenter">
         <install-in user-type="FULL" />
     </install-in-user-type>
-    <install-in-user-type package="com.android.car.dialer">
-        <install-in user-type="FULL" />
-    </install-in-user-type>
     <install-in-user-type package="com.android.car.goldilocks">
         <install-in user-type="FULL" />
     </install-in-user-type>
diff --git a/shared/auto/rro_overlay/CarServiceOverlay/res/values/config.xml b/shared/auto/rro_overlay/CarServiceOverlay/res/values/config.xml
index 6cfd2cc..e889df3 100644
--- a/shared/auto/rro_overlay/CarServiceOverlay/res/values/config.xml
+++ b/shared/auto/rro_overlay/CarServiceOverlay/res/values/config.xml
@@ -61,22 +61,27 @@
         occupant.
 
         Some examples are:
-        <item>displayPort=0,displayType=MAIN,occupantZoneId=0</item>
-        <item>displayPort=1,displayType=INSTRUMENT_CLUSTER,occupantZoneId=0</item>
-        <item>displayPort=2,displayType=MAIN,occupantZoneId=1</item>
-        <item>displayPort=3,displayType=MAIN,occupantZoneId=2</item>
-        <item>displayPort=4,displayType=MAIN,occupantZoneId=3</item>
+        <item>displayPort=0,displayType=MAIN,occupantZoneId=0,inputTypes=DPAD_KEYS|
+            NAVIGATE_KEYS|ROTARY_NAVIGATION</item>
+        <item>displayPort=1,displayType=INSTRUMENT_CLUSTER,occupantZoneId=0,
+            inputTypes=DPAD_KEYS</item>
+        <item>displayPort=2,displayType=MAIN,occupantZoneId=1,
+            inputTypes=NAVIGATE_KEYS</item>
+        <item>displayPort=3,displayType=MAIN,occupantZoneId=2,
+            inputTypes=NAVIGATE_KEYS</item>
+        <item>displayUniqueId=virtual:com.example:MainD,displayType=MAIN,occupantZoneId=3,
+            inputTypes=NAVIGATE_KEYS</item>
 
         displayPort: Unique port id for the display.
         displayType: Display type for the display. Use * part from
                        CarOccupantZoneManager.DISPLAY_TYPE_* like MAIN, INSTRUMENT_CLUSTER and
                        etc.
         occupantZoneId: occupantZoneId specified from config_occupant_zones.
-
+        inputTypes: supported input types for the corresponding display.
     -->
     <string-array translatable="false" name="config_occupant_display_mapping">
-        <item>displayPort=0,displayType=MAIN,occupantZoneId=0</item>
-        <item>displayPort=1,displayType=INSTRUMENT_CLUSTER,occupantZoneId=0</item>
+        <item>displayPort=0,displayType=MAIN,occupantZoneId=0,inputTypes=TOUCH_SCREEN|DPAD_KEYS|NAVIGATE_KEYS|ROTARY_NAVIGATION</item>
+        <item>displayPort=1,displayType=INSTRUMENT_CLUSTER,occupantZoneId=0,inputTypes=DPAD_KEYS</item>
     </string-array>
 
     <!-- A name of a camera device that provides the rearview through EVS service -->
diff --git a/shared/auto/sepolicy/vendor/hal_remoteaccess_default.te b/shared/auto/sepolicy/vendor/hal_remoteaccess_default.te
new file mode 100644
index 0000000..b734334
--- /dev/null
+++ b/shared/auto/sepolicy/vendor/hal_remoteaccess_default.te
@@ -0,0 +1,13 @@
+# Allow remoteaccess HAL to communicate with remote wakeup client via local
+# socket.
+typeattribute hal_remoteaccess_default hal_automotive_socket_exemption;
+net_domain(hal_remoteaccess_default)
+allow hal_remoteaccess_default self:vsock_socket write;
+
+# Allow accessing VHAL.
+binder_use(hal_remoteaccess_default)
+hal_client_domain(hal_remoteaccess_default, hal_vehicle)
+
+# Allow debug dump
+allow hal_remoteaccess_default shell:fd use;
+allow hal_remoteaccess_default shell:fifo_file write;
diff --git a/shared/auto_md/OWNERS b/shared/auto_md/OWNERS
new file mode 100644
index 0000000..d3a1f82
--- /dev/null
+++ b/shared/auto_md/OWNERS
@@ -0,0 +1,2 @@
+include device/google/cuttlefish:/shared/auto/OWNERS
+ycheo@google.com
diff --git a/shared/auto_md/android-info.txt b/shared/auto_md/android-info.txt
new file mode 100644
index 0000000..ac7c0e7
--- /dev/null
+++ b/shared/auto_md/android-info.txt
@@ -0,0 +1 @@
+config=auto_md
diff --git a/shared/auto_md/display_settings.xml b/shared/auto_md/display_settings.xml
new file mode 100644
index 0000000..1922d42
--- /dev/null
+++ b/shared/auto_md/display_settings.xml
@@ -0,0 +1,42 @@
+<?xml version='1.0' encoding='utf-8' standalone='yes' ?>
+<!--
+/*
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<display-settings>
+
+  <!-- Use physical port number instead of local id -->
+  <config identifier="1" />
+
+  <!-- Display settings for cluster -->
+  <display name="port:1"
+      forcedDensity="120"
+      dontMoveToTop="true"/>
+
+  <!-- Display settings for 1st passenger display / 2nd Home -->
+  <display name="port:2"
+      shouldShowSystemDecors="true"
+      shouldShowIme="true"
+      forcedDensity="120" />
+
+  <!-- Display settings for 2nd passenger display / 3rd Home -->
+  <display name="port:3"
+      shouldShowSystemDecors="true"
+      shouldShowIme="true"
+      forcedDensity="120" />
+
+</display-settings>
diff --git a/shared/auto_md/overlay/frameworks/base/core/res/res/values/config.xml b/shared/auto_md/overlay/frameworks/base/core/res/res/values/config.xml
new file mode 100644
index 0000000..2ffb57a
--- /dev/null
+++ b/shared/auto_md/overlay/frameworks/base/core/res/res/values/config.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+/*
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<resources xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
+    <!-- Whether the system enables per-display focus. If the system has the input method for each
+         display, this value should be true. -->
+    <bool name="config_perDisplayFocusEnabled">true</bool>
+
+    <!--  Maximum number of supported users -->
+    <integer name="config_multiuserMaximumUsers">10</integer>
+
+    <!-- Maximum number of users we allow to be running at a time -->
+    <integer name="config_multiuserMaxRunningUsers">5</integer>
+
+    <!-- True if the device supports system decorations on secondary displays. -->
+    <bool name="config_supportsSystemDecorsOnSecondaryDisplays">true</bool>
+    <!-- This is the default launcher package with an activity to use on secondary displays that
+         support system decorations.
+         This launcher package must have an activity that supports multiple instances and has
+         corresponding launch mode set in AndroidManifest.
+         {@see android.view.Display#FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS} -->
+    <string name="config_secondaryHomePackage" translatable="false">com.android.car.multidisplay</string>
+    <!-- Whether to only install system packages on a user if they're whitelisted for that user
+         type. These are flags and can be freely combined.
+         0  - disable whitelist (install all system packages; no logging)
+         1  - enforce (only install system packages if they are whitelisted)
+         2  - log (log non-whitelisted packages)
+         4  - any package not mentioned in the whitelist file is implicitly whitelisted on all users
+         8  - same as 4, but just for the SYSTEM user
+         16 - ignore OTAs (don't install system packages during OTAs)
+         Common scenarios:
+          - to enable feature (fully enforced) for a complete whitelist: 1
+          - to enable feature for an incomplete whitelist (so use implicit whitelist mode): 5
+          - to enable feature but implicitly whitelist for SYSTEM user to ease local development: 9
+          - to disable feature completely if it had never been enabled: 16
+          - to henceforth disable feature and try to undo its previous effects: 0
+        Note: This list must be kept current with PACKAGE_WHITELIST_MODE_PROP in
+        frameworks/base/services/core/java/com/android/server/pm/UserSystemPackageInstaller.java
+        Package whitelist disabled for testing profile user as default whitelist does not
+        support PROFILE user. -->
+    <integer name="config_userTypePackageWhitelistMode">2</integer>
+
+    <!-- Whether the device allows users to start in background visible on displays.
+         Should be false for most devices, except automotive vehicle with passenger displays. -->
+    <bool name="config_multiuserVisibleBackgroundUsers">true</bool>
+
+    <!-- Enable multi-user IME sessions -->
+    <string translatable="false" name="config_deviceSpecificInputMethodManagerService">com.android.server.inputmethod.InputMethodManagerServiceProxy$Lifecycle</string>
+
+</resources>
diff --git a/shared/auto_mdnd/overlay/frameworks/base/core/res/res/values/config.xml b/shared/auto_mdnd/overlay/frameworks/base/core/res/res/values/config.xml
new file mode 100644
index 0000000..b23af7a
--- /dev/null
+++ b/shared/auto_mdnd/overlay/frameworks/base/core/res/res/values/config.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+/*
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<resources xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
+    <!-- Whether the device allows users to start in background visible on the default display.
+         Should be false for most devices, except passenger-only automotive build (i.e., when
+         Android runs in a separate system in the back seat to manage the passenger displays) -->
+    <bool name="config_multiuserVisibleBackgroundUsersOnDefaultDisplay">true</bool>
+</resources>
diff --git a/shared/auto_portrait/OWNERS b/shared/auto_portrait/OWNERS
new file mode 100644
index 0000000..5bc897b
--- /dev/null
+++ b/shared/auto_portrait/OWNERS
@@ -0,0 +1,4 @@
+include device/google/cuttlefish:/shared/auto/OWNERS
+babakbo@google.com
+calhuang@google.com
+priyanksingh@google.com
diff --git a/shared/auto_portrait/android-info.txt b/shared/auto_portrait/android-info.txt
new file mode 100644
index 0000000..60b759e
--- /dev/null
+++ b/shared/auto_portrait/android-info.txt
@@ -0,0 +1 @@
+config=auto_portrait
diff --git a/shared/auto_portrait/display_settings.xml b/shared/auto_portrait/display_settings.xml
new file mode 100644
index 0000000..54a1508
--- /dev/null
+++ b/shared/auto_portrait/display_settings.xml
@@ -0,0 +1,25 @@
+<?xml version='1.0' encoding='utf-8' standalone='yes' ?>
+<!--
+/*
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<display-settings>
+
+  <!-- Use physical port number instead of local id -->
+  <config identifier="1" />
+
+</display-settings>
diff --git a/shared/camera/BoardConfig.mk b/shared/camera/BoardConfig.mk
new file mode 100644
index 0000000..5a80f77
--- /dev/null
+++ b/shared/camera/BoardConfig.mk
@@ -0,0 +1,17 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/camera/sepolicy
diff --git a/shared/camera/device_vendor.mk b/shared/camera/device_vendor.mk
new file mode 100644
index 0000000..93dda4a
--- /dev/null
+++ b/shared/camera/device_vendor.mk
@@ -0,0 +1,58 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.camerax.extensions.enabled=true
+
+# Enable Camera Extension sample
+ifeq ($(TARGET_USE_CAMERA_ADVANCED_EXTENSION_SAMPLE),true)
+PRODUCT_PACKAGES += \
+    androidx.camera.extensions.impl.advanced advancedSample_camera_extensions.xml \
+    libencoderjpeg_jni
+else
+PRODUCT_PACKAGES += androidx.camera.extensions.impl sample_camera_extensions.xml
+endif
+
+PRODUCT_SOONG_NAMESPACES += hardware/google/camera
+PRODUCT_SOONG_NAMESPACES += hardware/google/camera/devices/EmulatedCamera
+
+# TODO(b/257379485): 3A is incrementally enabling cuttlefish build for native
+# code coverage support, temporary require separate namespace for folders that
+# can be built successfully.
+PRODUCT_SOONG_NAMESPACES += vendor/google/camera/google_3a/libs_v4/g3ABase
+PRODUCT_SOONG_NAMESPACES += vendor/google/camera/google_3a/libs_v4/gAF
+PRODUCT_SOONG_NAMESPACES += vendor/google/camera/google_3a/libs_v4/gHAWB/native_coverage
+
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.hardware.camera.concurrent.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.concurrent.xml \
+    frameworks/native/data/etc/android.hardware.camera.flash-autofocus.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.flash-autofocus.xml \
+    frameworks/native/data/etc/android.hardware.camera.front.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.front.xml \
+    frameworks/native/data/etc/android.hardware.camera.full.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.full.xml \
+    frameworks/native/data/etc/android.hardware.camera.raw.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.raw.xml \
+    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_back.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_back.json \
+    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_front.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_front.json \
+    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_depth.json
+
+ifeq ($(TARGET_USE_VSOCK_CAMERA_HAL_IMPL),true)
+PRODUCT_PACKAGES += \
+    android.hardware.camera.provider@2.7-external-vsock-service \
+    android.hardware.camera.provider@2.7-impl-cuttlefish
+DEVICE_MANIFEST_FILE += \
+    device/google/cuttlefish/guest/hals/camera/manifest.xml
+else
+PRODUCT_PACKAGES += com.google.emulated.camera.provider.hal
+PRODUCT_PACKAGES += com.google.emulated.camera.provider.hal.fastscenecycle
+endif
diff --git a/shared/camera/sepolicy/OWNERS b/shared/camera/sepolicy/OWNERS
new file mode 100644
index 0000000..9b37b0e
--- /dev/null
+++ b/shared/camera/sepolicy/OWNERS
@@ -0,0 +1,4 @@
+include platform/system/sepolicy:/OWNERS
+
+adelva@google.com
+rurumihong@google.com
diff --git a/shared/camera/sepolicy/bug_map b/shared/camera/sepolicy/bug_map
new file mode 100644
index 0000000..8e29068
--- /dev/null
+++ b/shared/camera/sepolicy/bug_map
@@ -0,0 +1 @@
+gmscore_app hal_camera_prop file b/156287758
diff --git a/shared/camera/sepolicy/file_contexts b/shared/camera/sepolicy/file_contexts
new file mode 100644
index 0000000..6e7490a
--- /dev/null
+++ b/shared/camera/sepolicy/file_contexts
@@ -0,0 +1,4 @@
+/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-external-vsock-service u:object_r:hal_camera_default_exec:s0
+/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google u:object_r:hal_camera_default_exec:s0
+/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google-lazy u:object_r:hal_camera_default_exec:s0
+
diff --git a/shared/camera/sepolicy/hal_camera_default.te b/shared/camera/sepolicy/hal_camera_default.te
new file mode 100644
index 0000000..d123017
--- /dev/null
+++ b/shared/camera/sepolicy/hal_camera_default.te
@@ -0,0 +1,18 @@
+vndbinder_use(hal_camera_default)
+
+hal_client_domain(hal_camera_default, hal_graphics_allocator)
+
+# For camera hal to talk with sensor service
+binder_call(hal_camera_default, sensor_service_server)
+binder_call(sensor_service_server, hal_camera_default)
+
+# Allow the Camera HAL to communicate with the thermal HAL.
+hal_client_domain(hal_camera_default, hal_thermal)
+
+# Vsocket camera
+allow hal_camera_default self:vsock_socket { accept bind create getopt listen read write };
+
+set_prop(hal_camera_default, vendor_camera_prop)
+
+# For observing apex file changes
+allow hal_camera_default apex_info_file:file r_file_perms;
diff --git a/shared/camera/sepolicy/property.te b/shared/camera/sepolicy/property.te
new file mode 100644
index 0000000..bb7a5b1
--- /dev/null
+++ b/shared/camera/sepolicy/property.te
@@ -0,0 +1 @@
+vendor_internal_prop(vendor_camera_prop)
diff --git a/shared/camera/sepolicy/property_contexts b/shared/camera/sepolicy/property_contexts
new file mode 100644
index 0000000..3d6ebfb
--- /dev/null
+++ b/shared/camera/sepolicy/property_contexts
@@ -0,0 +1,2 @@
+persist.vendor.camera.  u:object_r:vendor_camera_prop:s0
+vendor.camera.          u:object_r:vendor_camera_prop:s0
diff --git a/shared/config/Android.bp b/shared/config/Android.bp
index f2d94e4..3f8ba18 100644
--- a/shared/config/Android.bp
+++ b/shared/config/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -22,6 +37,18 @@
 }
 
 prebuilt_etc_host {
+    name: "cvd_config_auto_md.json",
+    src: "config_auto_md.json",
+    sub_dir: "cvd_config",
+}
+
+prebuilt_etc_host {
+    name: "cvd_config_auto_portrait.json",
+    src: "config_auto_portrait.json",
+    sub_dir: "cvd_config",
+}
+
+prebuilt_etc_host {
     name: "cvd_config_foldable.json",
     src: "config_foldable.json",
     sub_dir: "cvd_config",
@@ -34,6 +61,12 @@
 }
 
 prebuilt_etc_host {
+    name: "cvd_config_minidroid.json",
+    src: "config_minidroid.json",
+    sub_dir: "cvd_config",
+}
+
+prebuilt_etc_host {
     name: "cvd_config_phone.json",
     src: "config_phone.json",
     sub_dir: "cvd_config",
@@ -63,12 +96,6 @@
     sub_dir: "cvd_config",
 }
 
-prebuilt_etc_host {
-    name: "grub.cfg",
-    src: "grub.cfg",
-    sub_dir: "grub",
-}
-
 prebuilt_etc {
     name: "wpa_supplicant_overlay.conf.cf",
     src: "wpa_supplicant_overlay.conf",
@@ -89,3 +116,63 @@
     name: "manifest_android.hardware.bluetooth@1.1-service.xml",
     srcs: ["manifest_android.hardware.bluetooth@1.1-service.xml"]
 }
+
+genrule {
+    name: "gen_fstab_cf_f2fs_hctr2",
+    srcs: ["fstab.in"],
+    out: ["fstab.cf.f2fs.hctr2"],
+    tool_files: [ "sed.f2fs", "sed.hctr2" ],
+    cmd: "sed -f $(location sed.f2fs) -f $(location sed.hctr2) $(in) > $(out)",
+}
+
+genrule {
+    name: "gen_fstab_cf_f2fs_cts",
+    srcs: ["fstab.in"],
+    out: ["fstab.cf.f2fs.cts"],
+    tool_files: [ "sed.f2fs", "sed.cts" ],
+    cmd: "sed -f $(location sed.f2fs) -f $(location sed.cts) $(in) > $(out)",
+}
+
+genrule {
+    name: "gen_fstab_cf_ext4_hctr2",
+    srcs: ["fstab.in"],
+    out: ["fstab.cf.ext4.hctr2"],
+    tool_files: [ "sed.ext4", "sed.hctr2" ],
+    cmd: "sed -f $(location sed.ext4) -f $(location sed.hctr2) $(in) > $(out)",
+}
+
+genrule {
+    name: "gen_fstab_cf_ext4_cts",
+    srcs: ["fstab.in"],
+    out: ["fstab.cf.ext4.cts"],
+    tool_files: [ "sed.ext4", "sed.cts" ],
+    cmd: "sed -f $(location sed.ext4) -f $(location sed.cts) $(in) > $(out)",
+}
+
+prebuilt_etc {
+    name: "fstab.cf.f2fs.hctr2",
+    src: ":gen_fstab_cf_f2fs_hctr2",
+    vendor: true,
+    vendor_ramdisk_available: true,
+}
+
+prebuilt_etc {
+    name: "fstab.cf.f2fs.cts",
+    src: ":gen_fstab_cf_f2fs_cts",
+    vendor: true,
+    vendor_ramdisk_available: true,
+}
+
+prebuilt_etc {
+    name: "fstab.cf.ext4.hctr2",
+    src: ":gen_fstab_cf_ext4_hctr2",
+    vendor: true,
+    vendor_ramdisk_available: true,
+}
+
+prebuilt_etc {
+    name: "fstab.cf.ext4.cts",
+    src: ":gen_fstab_cf_ext4_cts",
+    vendor: true,
+    vendor_ramdisk_available: true,
+}
\ No newline at end of file
diff --git a/shared/config/config_auto_md.json b/shared/config/config_auto_md.json
new file mode 100644
index 0000000..7eb22c3
--- /dev/null
+++ b/shared/config/config_auto_md.json
@@ -0,0 +1,7 @@
+{
+	"display0": "width=1080,height=600,dpi=120",
+	"display1": "width=400,height=600,dpi=120",
+	"display2": "width=800,height=600,dpi=120",
+	"display3": "width=800,height=600,dpi=120",
+	"memory_mb" : 4096
+}
diff --git a/shared/config/config_auto_portrait.json b/shared/config/config_auto_portrait.json
new file mode 100644
index 0000000..565f3dc
--- /dev/null
+++ b/shared/config/config_auto_portrait.json
@@ -0,0 +1,4 @@
+{
+	"display0": "width=1224,height=2175,dpi=140",
+	"memory_mb" : 4096
+}
diff --git a/shared/config/config_minidroid.json b/shared/config/config_minidroid.json
new file mode 100644
index 0000000..3d90b94
--- /dev/null
+++ b/shared/config/config_minidroid.json
@@ -0,0 +1,10 @@
+{
+	"enable_audio": false,
+	"enable_host_bluetooth": false,
+	"enable_modem_simulator": false,
+	"gpu_mode" : "none",
+	"memory_mb" : 256,
+	"start_gnss_proxy": false,
+	"start_webrtc": false,
+	"use_sdcard" : false
+}
diff --git a/shared/config/config_phone.json b/shared/config/config_phone.json
index 69ad977..b46e962 100644
--- a/shared/config/config_phone.json
+++ b/shared/config/config_phone.json
@@ -2,5 +2,6 @@
 	"x_res" : 720,
 	"y_res" : 1280,
 	"dpi" : 320,
-	"memory_mb" : 2048
+	"memory_mb" : 4096,
+	"ddr_mem_mb" : 4915
 }
diff --git a/shared/config/fstab.ext4 b/shared/config/fstab.ext4
deleted file mode 100644
index 4d3fe9b..0000000
--- a/shared/config/fstab.ext4
+++ /dev/null
@@ -1,30 +0,0 @@
-# Non-dynamic, boot critical partitions
-/dev/block/by-name/boot /boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=boot
-/dev/block/by-name/init_boot /init_boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=init_boot
-/dev/block/by-name/vendor_boot /vendor_boot emmc defaults recoveryonly,slotselect
-system /system erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
-system /system ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
-# Add all non-dynamic partitions except system, after this comment
-/dev/block/by-name/userdata /data ext4 nodev,noatime,nosuid,errors=panic latemount,wait,check,quota,formattable,fileencryption=aes-256-xts:aes-256-cts,keydirectory=/metadata/vold/metadata_encryption,checkpoint=block
-/dev/block/by-name/metadata /metadata ext4 nodev,noatime,nosuid,errors=panic wait,formattable,first_stage_mount,check
-/dev/block/by-name/misc /misc emmc defaults defaults
-# Add all dynamic partitions except system, after this comment
-odm /odm erofs ro wait,logical,first_stage_mount,slotselect,avb
-odm /odm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-product /product erofs ro wait,logical,first_stage_mount,slotselect,avb
-product /product ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-system_ext /system_ext erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
-system_ext /system_ext ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
-vendor /vendor erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
-vendor /vendor ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
-vendor_dlkm /vendor_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb
-vendor_dlkm /vendor_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-odm_dlkm /odm_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb
-odm_dlkm /odm_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-system_dlkm /system_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
-system_dlkm /system_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
-# ZRAM, SD-Card and virtiofs shares
-/dev/block/zram0 none swap defaults zramsize=75%
-/dev/block/vdc1 /sdcard vfat defaults recoveryonly
-/devices/*/block/vdc auto auto defaults voldmanaged=sdcard1:auto,encryptable=userdata
-shared /mnt/vendor/shared virtiofs nosuid,nodev,noatime nofail
diff --git a/shared/config/fstab.f2fs b/shared/config/fstab.f2fs
deleted file mode 100644
index 41162ed..0000000
--- a/shared/config/fstab.f2fs
+++ /dev/null
@@ -1,30 +0,0 @@
-# Non-dynamic, boot critical partitions
-/dev/block/by-name/boot /boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=boot
-/dev/block/by-name/init_boot /init_boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=init_boot
-/dev/block/by-name/vendor_boot /vendor_boot emmc defaults recoveryonly,slotselect
-system /system erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
-system /system ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
-# Add all non-dynamic partitions except system, after this comment
-/dev/block/by-name/userdata /data f2fs nodev,noatime,nosuid,inlinecrypt,reserve_root=32768 latemount,wait,check,quota,formattable,fileencryption=aes-256-xts:aes-256-cts:v2+inlinecrypt_optimized,fscompress,keydirectory=/metadata/vold/metadata_encryption,checkpoint=fs
-/dev/block/by-name/metadata /metadata ext4 nodev,noatime,nosuid,errors=panic wait,formattable,first_stage_mount,check
-/dev/block/by-name/misc /misc emmc defaults defaults
-# Add all dynamic partitions except system, after this comment
-odm /odm erofs ro wait,logical,first_stage_mount,slotselect,avb
-odm /odm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-product /product erofs ro wait,logical,first_stage_mount,slotselect,avb
-product /product ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-system_ext /system_ext erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
-system_ext /system_ext ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
-vendor /vendor erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
-vendor /vendor ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
-vendor_dlkm /vendor_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb
-vendor_dlkm /vendor_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-odm_dlkm /odm_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb
-odm_dlkm /odm_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
-system_dlkm /system_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
-system_dlkm /system_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
-# ZRAM, SD-Card and virtiofs shares
-/dev/block/zram0 none swap defaults zramsize=75%
-/dev/block/vdc1 /sdcard vfat defaults recoveryonly
-/devices/*/block/vdc auto auto defaults voldmanaged=sdcard1:auto,encryptable=userdata
-shared /mnt/vendor/shared virtiofs nosuid,nodev,noatime nofail
diff --git a/shared/config/fstab.in b/shared/config/fstab.in
new file mode 100644
index 0000000..baea40d
--- /dev/null
+++ b/shared/config/fstab.in
@@ -0,0 +1,30 @@
+# Non-dynamic, boot critical partitions
+/dev/block/by-name/boot /boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=boot
+/dev/block/by-name/init_boot /init_boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=init_boot
+/dev/block/by-name/vendor_boot /vendor_boot emmc defaults recoveryonly,slotselect
+system /system erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
+system /system ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system,avb_keys=/avb
+# Add all non-dynamic partitions except system, after this comment
+/dev/block/by-name/userdata /data @userdata_fs_type@ nodev,noatime,nosuid,@userdata_mount_flags@ latemount,wait,check,quota,formattable,keydirectory=/metadata/vold/metadata_encryption,@userdata_fsmgr_flags@
+/dev/block/by-name/metadata /metadata f2fs nodev,noatime,nosuid wait,check,formattable,first_stage_mount
+/dev/block/by-name/misc /misc emmc defaults defaults
+# Add all dynamic partitions except system, after this comment
+odm /odm erofs ro wait,logical,first_stage_mount,slotselect,avb
+odm /odm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
+product /product erofs ro wait,logical,first_stage_mount,slotselect,avb
+product /product ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
+system_ext /system_ext erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
+system_ext /system_ext ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_system
+vendor /vendor erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
+vendor /vendor ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
+vendor_dlkm /vendor_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta_vendor_dlkm
+vendor_dlkm /vendor_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta_vendor_dlkm
+odm_dlkm /odm_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb
+odm_dlkm /odm_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb
+system_dlkm /system_dlkm erofs ro wait,logical,first_stage_mount,slotselect,avb=vbmeta
+system_dlkm /system_dlkm ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,slotselect,avb=vbmeta
+# ZRAM, SD-Card and virtiofs shares
+/dev/block/zram0 none swap defaults zramsize=75%
+/dev/block/vdc1 /sdcard vfat defaults recoveryonly
+/devices/*/block/vdc auto auto defaults voldmanaged=sdcard1:auto,encryptable=userdata
+shared /mnt/vendor/shared virtiofs nosuid,nodev,noatime nofail
diff --git a/shared/config/graphics/init_graphics.vendor.rc b/shared/config/graphics/init_graphics.vendor.rc
new file mode 100644
index 0000000..1605c3d
--- /dev/null
+++ b/shared/config/graphics/init_graphics.vendor.rc
@@ -0,0 +1,15 @@
+on early-init
+#    loglevel 8
+
+    setprop ro.sf.lcd_density ${ro.boot.lcd_density}
+    setprop ro.hardware.egl ${ro.boot.hardware.egl}
+    setprop debug.sf.vsync_reactor_ignore_present_fences true
+    setprop ro.hardware.gralloc ${ro.boot.hardware.gralloc}
+    setprop ro.hardware.hwcomposer ${ro.boot.hardware.hwcomposer}
+    setprop ro.vendor.hwcomposer.display_finder_mode ${ro.boot.hardware.hwcomposer.display_finder_mode}
+    setprop ro.vendor.hwcomposer.mode ${ro.boot.hardware.hwcomposer.mode}
+    setprop ro.hardware.vulkan ${ro.boot.hardware.vulkan}
+    setprop ro.cpuvulkan.version ${ro.boot.cpuvulkan.version}
+    setprop ro.opengles.version ${ro.boot.opengles.version}
+    setprop debug.angle.feature_overrides_enabled ${ro.boot.hardware.angle_feature_overrides_enabled}
+    setprop debug.angle.feature_overrides_disabled ${ro.boot.hardware.angle_feature_overrides_disabled}
\ No newline at end of file
diff --git a/shared/config/grub.cfg b/shared/config/grub.cfg
deleted file mode 100644
index d15eb8e..0000000
--- a/shared/config/grub.cfg
+++ /dev/null
@@ -1,43 +0,0 @@
-# Root grub.cfg used either to boot raw kernel and/or initramfs.img, or to
-# chain to an installed distro's GRUB configuration file
-
-# These options are accessible to chain-loaded configurations as well:
-#
-# pnpacpi=off      Disable on QEMU; allows serdev to claim platform serial
-# acpi=noirq       Do not configure IRQ routing using ACPI tables
-# reboot=k         Reboot using keyboard method, rather than ACPI
-# noexec=off       Some kernels panic when setting up NX
-# noefi            Some kernels panic when trying to use U-Boot EFI
-# panic=-1         Don't reboot on panic
-# console=hvc0     Switch kernel logging to virtio-console once available
-# console=ttyAMA0  QEMU on ARM64 uses alternative serial implementation
-#
-if [ "$grub_cpu" = "i386" ]; then
-  set cmdline="pnpacpi=off acpi=noirq reboot=k noexec=off console=ttyS0 noefi panic=-1 console=hvc0"
-elif [ "$grub_cpu" = "arm64" ]; then
-  set cmdline="console=ttyS0 console=ttyAMA0 noefi panic=-1 console=hvc0"
-else
-  echo "Warning: No architecture found for ${grub_cpu}"
-fi
-
-# Root filesystem is on a GUID partition with label "otheros_root"
-set rootfs="/dev/vda14"
-
-# Root filesystem with grub installed
-search --file --set root /boot/grub/grub.cfg --hint (hd0)
-if [ $? = 0 ]; then
-  set prefix=($root)/boot/grub
-  export cmdline
-  export rootfs
-  configfile $prefix/grub.cfg
-  normal_exit
-fi
-
-# Fall back if we couldn't chain to another GRUB install
-set timeout=0
-menuentry "Linux" {
-  linux /vmlinuz $cmdline root=$rootfs
-  if [ -e /initrd.img ]; then
-    initrd /initrd.img
-  fi
-}
diff --git a/shared/config/init.product.rc b/shared/config/init.product.rc
index 22371a3..d17444d 100644
--- a/shared/config/init.product.rc
+++ b/shared/config/init.product.rc
@@ -1,6 +1,9 @@
 on early-init
     setprop ro.setupwizard.mode ${ro.boot.setupwizard_mode}
 
+on early-init && property:ro.boot.enable_bootanimation=0
+    setprop debug.sf.nobootanimation 1
+
 service tombstone_transmit /product/bin/tombstone_transmit
     # Start tombstone_transmit after /data is mounted.
     class late_start
diff --git a/shared/config/init.vendor.rc b/shared/config/init.vendor.rc
index 8861ca3..7e886c5 100644
--- a/shared/config/init.vendor.rc
+++ b/shared/config/init.vendor.rc
@@ -1,19 +1,14 @@
 on early-init
 #    loglevel 8
 
-    setprop ro.sf.lcd_density ${ro.boot.lcd_density}
-    setprop ro.hardware.egl ${ro.boot.hardware.egl}
-    setprop debug.sf.vsync_reactor_ignore_present_fences true
-    setprop ro.hardware.gralloc ${ro.boot.hardware.gralloc}
-    setprop ro.hardware.hwcomposer ${ro.boot.hardware.hwcomposer}
-    setprop ro.vendor.hwcomposer.mode ${ro.boot.hardware.hwcomposer.mode}
-    setprop ro.hardware.vulkan ${ro.boot.hardware.vulkan}
-    setprop ro.cpuvulkan.version ${ro.boot.cpuvulkan.version}
     setprop ro.hw_timeout_multiplier ${ro.boot.hw_timeout_multiplier}
-    setprop ro.opengles.version ${ro.boot.opengles.version}
+    setprop hypervisor.memory_reclaim.supported ${ro.boot.hypervisor.vm.supported}
 
+    start metrics_helper
     # start module load in the background
     start vendor.dlkm_loader
+    # specially load zram as it is a "leaf" GKI module
+    exec u:r:modprobe:s0 -- /system/bin/modprobe -a -d /system/lib/modules zram.ko
 
 on init
     # ZRAM setup
@@ -50,11 +45,7 @@
     # So GceBootReporter can print to kmsg
     chmod 622 /dev/kmsg
 
-on post-fs
-    # set RLIMIT_MEMLOCK to 64MB
-    setrlimit 8 67108864 67108864
-
-on post-fs-data
+on post-fs-data && property:ro.vendor.disable_rename_eth0=
     # works around framework netiface enumeration issue
     # TODO(b/202731768): Add this `start rename_eth0` command to the init.rc for rename_netiface
     start rename_eth0
@@ -106,7 +97,12 @@
     disabled
     oneshot
 
-service socket_vsock_proxy /vendor/bin/socket_vsock_proxy -server=vsock -tcp_port=5555 -vsock_port=6520
+service metrics_helper /vendor/bin/metrics_helper
+    group system
+    user root
+
+service socket_vsock_proxy /vendor/bin/socket_vsock_proxy -server_type=vsock -server_vsock_port=6520 -client_type=tcp -client_tcp_host=0.0.0.0 -client_tcp_port=5555
+    user root
 
 service seriallogging /system/bin/logcat -b all -v threadtime -f /dev/hvc2 *:V
     class main
@@ -115,6 +111,7 @@
 
 service vsoc_input_service /vendor/bin/vsoc_input_service -touch_port=${ro.boot.vsock_touch_port} -keyboard_port=${ro.boot.vsock_keyboard_port}
     group root uhid
+    user root
     disabled
 
 on early-boot && property:ro.boot.vsock_touch_port=*
@@ -127,6 +124,7 @@
 
 service bugreport /system/bin/dumpstate -d -p -z
     class main
+    user root
     disabled
     oneshot
     keycodes 30 48
@@ -136,3 +134,7 @@
     class early_hal # Start together with system_suspend HAL
     group system
     user root
+
+on boot && property:ro.boot.enable_confirmationui=*
+   enable vendor.confirmationui_default
+   start vendor.confirmationui_default
diff --git a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_0.idc b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_0.idc
index a4e2b19..cf2cc5d 100644
--- a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_0.idc
+++ b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_0.idc
@@ -3,4 +3,4 @@
 touch.deviceType = touchScreen
 touch.orientationAware = 1
 
-touch.displayId = local:4619827259835644672
+touch.displayId = local:4619827353912518656
diff --git a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_1.idc b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_1.idc
index a375e58..4ff845c 100644
--- a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_1.idc
+++ b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_1.idc
@@ -3,4 +3,4 @@
 touch.deviceType = touchScreen
 touch.orientationAware = 1
 
-touch.displayId = local:4619827551948147201
+touch.displayId = local:4619827353912518657
diff --git a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_2.idc b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_2.idc
index db17d3c..c5e95ba 100644
--- a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_2.idc
+++ b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_2.idc
@@ -3,4 +3,4 @@
 touch.deviceType = touchScreen
 touch.orientationAware = 1
 
-touch.displayId = local:4619827124781842690
+touch.displayId = local:4619827353912518658
diff --git a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_3.idc b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_3.idc
index b910ad3..3402cb0 100644
--- a/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_3.idc
+++ b/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_3.idc
@@ -3,4 +3,4 @@
 touch.deviceType = touchScreen
 touch.orientationAware = 1
 
-touch.displayId = local:4619827540095559171
+touch.displayId = local:4619827353912518659
diff --git a/shared/config/manifest.xml b/shared/config/manifest.xml
index 126557f..ceece96 100644
--- a/shared/config/manifest.xml
+++ b/shared/config/manifest.xml
@@ -16,221 +16,8 @@
 ** limitations under the License.
 */
 -->
-<manifest version="1.0" type="device" target-level="7">
-    <hal format="hidl">
-        <name>android.hardware.audio.effect</name>
-        <transport>hwbinder</transport>
-        <version>7.0</version>
-        <interface>
-            <name>IEffectsFactory</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    <hal format="hidl">
-        <name>android.hardware.authsecret</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IAuthSecret</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    <!-- TODO (b/130076570):
-    <hal format="hidl">
-        <name>android.hardware.biometrics.face</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IBiometricsFace</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079316):
-    <hal format="hidl">
-        <name>android.hardware.biometrics.fingerprint</name>
-        <transport>hwbinder</transport>
-        <version>2.1</version>
-        <interface>
-            <name>IBiometricsFingerprint</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130078386):
-    <hal format="hidl">
-        <name>android.hardware.confirmationui</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IConfirmationUI</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130077250):
-    <hal format="hidl">
-        <name>android.hardware.contexthub</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IContexthub</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130075874):
-    <hal format="hidl">
-        <name>android.hardware.ir</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IConsumerIr</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079342):
-    <hal format="hidl">
-        <name>android.hardware.memtrack</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IMemtrack</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130080415):
-    <hal format="hidl">
-        <name>android.hardware.nfc</name>
-        <transport>hwbinder</transport>
-        <version>1.1</version>
-        <interface>
-            <name>INfc</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079343):
-    <hal format="hidl">
-        <name>android.hardware.oemlock</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IOemLock</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079239):
-    <hal format="hidl">
-        <name>android.hardware.secure_element</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>ISecureElement</name>
-            <instance>eSE1</instance>
-        </interface>
-    </hal>
-    -->
-    <!--
-    <hal format="hidl">
-        <name>android.hardware.soundtrigger</name>
-        <transport>hwbinder</transport>
-        <version>2.3</version>
-        <interface>
-            <name>ISoundTriggerHw</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079321):
-    <hal format="hidl">
-        <name>android.hardware.tetheroffload.config</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IOffloadConfig</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130080416):
-    <hal format="hidl">
-        <name>android.hardware.tetheroffload.control</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IOffloadControl</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130076572):
-    <hal format="hidl">
-        <name>android.hardware.usb.gadget</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IUsbGadget</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079219):
-    <hal format="hidl">
-        <name>android.hardware.vr</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IVr</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079240):
-    <hal format="hidl">
-        <name>android.hardware.weaver</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IWeaver</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079638):
-    <hal format="hidl">
-        <name>android.hardware.wifi</name>
-        <transport>hwbinder</transport>
-        <version>1.3</version>
-        <interface>
-            <name>IWifi</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130079936):
-    <hal format="hidl">
-        <name>android.hardware.wifi.hostapd</name>
-        <transport>hwbinder</transport>
-        <version>1.1</version>
-        <interface>
-            <name>IHostapd</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
-    <!-- TODO (b/130080335):
-    <hal format="hidl">
-        <name>android.hardware.wifi.offload</name>
-        <transport>hwbinder</transport>
-        <version>1.0</version>
-        <interface>
-            <name>IOffload</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-    -->
+<manifest version="1.0" type="device" target-level="8">
+
+    <!-- DO NOT ADD MORE - use vintf_fragments -->
+
 </manifest>
diff --git a/shared/config/media_codecs.xml b/shared/config/media_codecs.xml
index b6088e8..08c50c8 100644
--- a/shared/config/media_codecs.xml
+++ b/shared/config/media_codecs.xml
@@ -85,6 +85,7 @@
         <Setting name="max-video-encoder-input-buffers" value="12" />
     </Settings>
 
+    <Include href="media_codecs_google_tv.xml" />
     <Include href="media_codecs_google_audio.xml" />
     <Include href="media_codecs_google_telephony.xml" />
     <Include href="media_codecs_google_video.xml" />
diff --git a/shared/config/media_codecs_google_tv.xml b/shared/config/media_codecs_google_tv.xml
new file mode 100644
index 0000000..f684bda
--- /dev/null
+++ b/shared/config/media_codecs_google_tv.xml
@@ -0,0 +1,87 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!--
+/*
+** Copyright 2023, The Android Open Source Project.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<!--
+<!DOCTYPE MediaCodecs [
+<!ELEMENT Include EMPTY>
+<!ATTLIST Include href CDATA #REQUIRED>
+<!ELEMENT MediaCodecs (Decoders|Encoders|Include)*>
+<!ELEMENT Decoders (MediaCodec|Include)*>
+<!ELEMENT Encoders (MediaCodec|Include)*>
+<!ELEMENT MediaCodec (Type|Quirk|Include)*>
+<!ATTLIST MediaCodec name CDATA #REQUIRED>
+<!ATTLIST MediaCodec type CDATA>
+<!ELEMENT Type EMPTY>
+<!ATTLIST Type name CDATA #REQUIRED>
+<!ELEMENT Quirk EMPTY>
+<!ATTLIST Quirk name CDATA #REQUIRED>
+]>
+
+There's a simple and a complex syntax to declare the availability of a
+media codec:
+
+A codec that properly follows the OpenMax spec and therefore doesn't have any
+quirks and that only supports a single content type can be declared like so:
+
+    <MediaCodec name="OMX.foo.bar" type="something/interesting" />
+
+If a codec has quirks OR supports multiple content types, the following syntax
+can be used:
+
+    <MediaCodec name="OMX.foo.bar" >
+        <Type name="something/interesting" />
+        <Type name="something/else" />
+        ...
+        <Quirk name="requires-allocate-on-input-ports" />
+        <Quirk name="requires-allocate-on-output-ports" />
+        <Quirk name="output-buffers-are-unreadable" />
+    </MediaCodec>
+
+Only the three quirks included above are recognized at this point:
+
+"requires-allocate-on-input-ports"
+    must be advertised if the component does not properly support specification
+    of input buffers using the OMX_UseBuffer(...) API but instead requires
+    OMX_AllocateBuffer to be used.
+
+"requires-allocate-on-output-ports"
+    must be advertised if the component does not properly support specification
+    of output buffers using the OMX_UseBuffer(...) API but instead requires
+    OMX_AllocateBuffer to be used.
+
+"output-buffers-are-unreadable"
+    must be advertised if the emitted output buffers of a decoder component
+    are not readable, i.e. use a custom format even though abusing one of
+    the official OMX colorspace constants.
+    Clients of such decoders will not be able to access the decoded data,
+    naturally making the component much less useful. The only use for
+    a component with this quirk is to render the output to the screen.
+    Audio decoders MUST NOT advertise this quirk.
+    Video decoders that advertise this quirk must be accompanied by a
+    corresponding color space converter for thumbnail extraction,
+    matching surfaceflinger support that can render the custom format to
+    a texture and possibly other code, so just DON'T USE THIS QUIRK.
+
+-->
+
+<MediaCodecs>
+    <Settings>
+      <Domain name="tv" enabled="true" />
+    </Settings>
+</MediaCodecs>
diff --git a/shared/config/media_codecs_performance.xml b/shared/config/media_codecs_performance.xml
index a85067f..f3c5c48 100644
--- a/shared/config/media_codecs_performance.xml
+++ b/shared/config/media_codecs_performance.xml
@@ -113,6 +113,14 @@
             <!-- MANUALLY ADJUSTED -->
             <Limit name="measured-frame-rate-1280x720" range="23-23" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1.encoder" type="video/av01" update="true">
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-320x240" range="84-130" />
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-720x480" range="20-43" />
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-1280x720" range="8-21" />
+        </MediaCodec>
     </Encoders>
     <Decoders>
         <MediaCodec name="c2.android.avc.decoder" type="video/avc" update="true">
@@ -167,6 +175,16 @@
             <!-- 4 runs, min 65 max 88 gmean 77 -->
             <Limit name="measured-frame-rate-1920x1080" range="65-88" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" update="true">
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-320x180" range="156-362" />
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-640x360" range="63-162" />
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-1280x720" range="40-110" />
+            <!-- MANUALLY ADJUSTED -->
+            <Limit name="measured-frame-rate-1920x1080" range="17-54" />
+        </MediaCodec>
         <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp" update="true">
             <!-- 3 runs, min 1246 max 1390 gmean 1342 -->
             <Limit name="measured-frame-rate-176x144" range="1246-1390" />
diff --git a/shared/config/sed.cts b/shared/config/sed.cts
new file mode 100644
index 0000000..19c37ee
--- /dev/null
+++ b/shared/config/sed.cts
@@ -0,0 +1 @@
+s/@filename_encryption_mode@/aes-256-cts/
diff --git a/shared/config/sed.ext4 b/shared/config/sed.ext4
new file mode 100644
index 0000000..0205090
--- /dev/null
+++ b/shared/config/sed.ext4
@@ -0,0 +1,3 @@
+s/@userdata_fs_type@/ext4/
+s/@userdata_mount_flags@/errors=panic/
+s/@userdata_fsmgr_flags@/fileencryption=aes-256-xts:@filename_encryption_mode@,checkpoint=block/
diff --git a/shared/config/sed.f2fs b/shared/config/sed.f2fs
new file mode 100644
index 0000000..580a8c3
--- /dev/null
+++ b/shared/config/sed.f2fs
@@ -0,0 +1,3 @@
+s/@userdata_fs_type@/f2fs/
+s/@userdata_mount_flags@/inlinecrypt,reserve_root=32768/
+s/@userdata_fsmgr_flags@/fileencryption=aes-256-xts:@filename_encryption_mode@:inlinecrypt_optimized,fscompress,checkpoint=fs/
diff --git a/shared/config/sed.hctr2 b/shared/config/sed.hctr2
new file mode 100644
index 0000000..23e320b
--- /dev/null
+++ b/shared/config/sed.hctr2
@@ -0,0 +1 @@
+s/@filename_encryption_mode@/aes-256-hctr2/
diff --git a/shared/config/ueventd.rc b/shared/config/ueventd.rc
index 9af7b8d..4e80bda 100644
--- a/shared/config/ueventd.rc
+++ b/shared/config/ueventd.rc
@@ -32,5 +32,11 @@
 /dev/gnss0 0666 system system
 /dev/gnss1 0666 system system
 
+# confirmation UI
+/dev/hvc8 0666 system system
+
+# uwb
+/dev/hvc9 0660 uwb uwb
+
 # Factory Reset Protection
 /dev/block/by-name/frp 0660 system system
diff --git a/shared/config/wpa_supplicant.rc b/shared/config/wpa_supplicant.rc
index 50c5faa..c8c19d6 100644
--- a/shared/config/wpa_supplicant.rc
+++ b/shared/config/wpa_supplicant.rc
@@ -5,5 +5,6 @@
     interface aidl android.hardware.wifi.supplicant.ISupplicant/default
     socket wpa_wlan0 dgram 660 wifi wifi
     group system wifi inet
+    user root
     disabled
     oneshot
diff --git a/shared/device.mk b/shared/device.mk
index 04dedbd..d860dda 100644
--- a/shared/device.mk
+++ b/shared/device.mk
@@ -35,10 +35,9 @@
 PRODUCT_VENDOR_PROPERTIES += \
     ro.vendor.boot_security_patch=$(BOOT_SECURITY_PATCH)
 
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for vulkan
 PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for audio and wifi
 
-PRODUCT_SHIPPING_API_LEVEL := 33
+PRODUCT_SHIPPING_API_LEVEL := 34
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 DISABLE_RILD_OEM_HOOK := true
 
@@ -49,40 +48,20 @@
 
 PRODUCT_SET_DEBUGFS_RESTRICTIONS := true
 
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for vulkan
-
 PRODUCT_FS_COMPRESSION := 1
-TARGET_RO_FILE_SYSTEM_TYPE ?= ext4
+TARGET_RO_FILE_SYSTEM_TYPE ?= erofs
 TARGET_USERDATAIMAGE_FILE_SYSTEM_TYPE ?= f2fs
 TARGET_USERDATAIMAGE_PARTITION_SIZE ?= 6442450944
 
 TARGET_VULKAN_SUPPORT ?= true
-TARGET_ENABLE_HOST_BLUETOOTH_EMULATION ?= true
-TARGET_USE_BTLINUX_HAL_IMPL ?= true
-
-# TODO(b/65201432): Swiftshader needs to create executable memory.
-PRODUCT_REQUIRES_INSECURE_EXECMEM_FOR_SWIFTSHADER := true
-
-AB_OTA_UPDATER := true
-AB_OTA_PARTITIONS += \
-    boot \
-    init_boot \
-    odm \
-    odm_dlkm \
-    product \
-    system \
-    system_dlkm \
-    system_ext \
-    vbmeta \
-    vbmeta_system \
-    vendor \
-    vendor_boot \
-    vendor_dlkm \
 
 # Enable Virtual A/B
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/android_t_baseline.mk)
 PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD := gz
 
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
+
 # Enable Scoped Storage related
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulated_storage.mk)
 
@@ -90,10 +69,12 @@
 # partition, instead of the vendor partition, and do not need vendor
 # sepolicy
 PRODUCT_PRODUCT_PROPERTIES += \
+    remote_provisioning.enable_rkpd=true \
+    remote_provisioning.hostname=staging-remoteprovisioning.sandbox.googleapis.com \
     persist.adb.tcp.port=5555 \
     ro.com.google.locationfeatures=1 \
     persist.sys.fuse.passthrough.enable=true \
-    persist.sys.fuse.bpf.enable=false \
+    remote_provisioning.tee.rkp_only=1
 
 # Until we support adb keys on user builds, and fix logcat over serial,
 # spawn adbd by default without authorization for "adb logcat"
@@ -117,10 +98,8 @@
     persist.sys.zram_enabled=1 \
     ro.hardware.keystore_desede=true \
     ro.rebootescrow.device=/dev/block/pmem0 \
-    ro.vendor.hwcomposer.pmem=/dev/block/pmem1 \
     ro.incremental.enable=1 \
-    debug.c2.use_dmabufheaps=1 \
-    ro.camerax.extensions.enabled=true \
+    debug.c2.use_dmabufheaps=1
 
 LOCAL_BT_PROPERTIES ?= \
  vendor.ser.bt-uart?=/dev/hvc5 \
@@ -132,52 +111,37 @@
 PRODUCT_VENDOR_PROPERTIES += \
     wlan.driver.status=ok
 
-ifneq ($(LOCAL_DISABLE_OMX),true)
-# Codec 1.0 requires the OMX services
-DEVICE_MANIFEST_FILE += \
-    device/google/cuttlefish/shared/config/android.hardware.media.omx@1.0.xml
-endif
-
 PRODUCT_VENDOR_PROPERTIES += \
     debug.stagefright.c2inputsurface=-1
 
 # Enforce privapp permissions control.
 PRODUCT_VENDOR_PROPERTIES += ro.control_privapp_permissions?=enforce
 
-# aes-256-heh default is not supported in standard kernels.
-PRODUCT_VENDOR_PROPERTIES += ro.crypto.volume.filenames_mode=aes-256-cts
-
 # Copy preopted files from system_b on first boot
 PRODUCT_VENDOR_PROPERTIES += ro.cp_system_other_odex=1
 
 AB_OTA_POSTINSTALL_CONFIG += \
     RUN_POSTINSTALL_system=true \
     POSTINSTALL_PATH_system=system/bin/otapreopt_script \
-    FILESYSTEM_TYPE_system=ext4 \
+    FILESYSTEM_TYPE_system=erofs \
     POSTINSTALL_OPTIONAL_system=true
 
 AB_OTA_POSTINSTALL_CONFIG += \
     RUN_POSTINSTALL_vendor=true \
     POSTINSTALL_PATH_vendor=bin/checkpoint_gc \
-    FILESYSTEM_TYPE_vendor=ext4 \
+    FILESYSTEM_TYPE_vendor=erofs \
     POSTINSTALL_OPTIONAL_vendor=true
 
 # Userdata Checkpointing OTA GC
 PRODUCT_PACKAGES += \
     checkpoint_gc
 
-# Enable CameraX extension sample
-PRODUCT_PACKAGES += androidx.camera.extensions.impl sample_camera_extensions.xml
-
 # DRM service opt-in
 PRODUCT_VENDOR_PROPERTIES += drm.service.enabled=true
 
 # Call deleteAllKeys if vold detects a factory reset
 PRODUCT_VENDOR_PROPERTIES += ro.crypto.metadata_init_delete_all_keys.enabled=true
 
-PRODUCT_SOONG_NAMESPACES += hardware/google/camera
-PRODUCT_SOONG_NAMESPACES += hardware/google/camera/devices/EmulatedCamera
-
 #
 # Packages for various GCE-specific utilities
 #
@@ -189,8 +153,9 @@
     tombstone_producer \
     suspend_blocker \
     vsoc_input_service \
+    metrics_helper \
 
-$(call soong_config_append,cvd,launch_configs,cvd_config_auto.json cvd_config_foldable.json cvd_config_go.json cvd_config_phone.json cvd_config_slim.json cvd_config_tablet.json cvd_config_tv.json cvd_config_wear.json)
+$(call soong_config_append,cvd,launch_configs,cvd_config_auto.json cvd_config_auto_portrait.json cvd_config_auto_md.json cvd_config_foldable.json cvd_config_go.json cvd_config_phone.json cvd_config_slim.json cvd_config_tablet.json cvd_config_tv.json cvd_config_wear.json)
 $(call soong_config_append,cvd,grub_config,grub.cfg)
 
 #
@@ -204,41 +169,16 @@
     wificond \
 
 #
-# Packages for the OpenGL implementation
+# Package for AOSP QNS
 #
-
-# ANGLE provides an OpenGL implementation built on top of Vulkan.
 PRODUCT_PACKAGES += \
-    libEGL_angle \
-    libGLESv1_CM_angle \
-    libGLESv2_angle
-
-# GL implementation for virgl
-PRODUCT_PACKAGES += \
-    libGLES_mesa \
+    QualifiedNetworksService
 
 #
-# Packages for the Vulkan implementation
+# Package for AOSP GBA
 #
-ifeq ($(TARGET_VULKAN_SUPPORT),true)
 PRODUCT_PACKAGES += \
-    vulkan.ranchu \
-    libvulkan_enc \
-    vulkan.pastel
-endif
-
-# GL/Vk implementation for gfxstream
-PRODUCT_PACKAGES += \
-    libandroidemu \
-    libOpenglCodecCommon \
-    libOpenglSystemCommon \
-    libGLESv1_CM_emulation \
-    lib_renderControl_enc \
-    libEGL_emulation \
-    libGLESv2_enc \
-    libGLESv2_emulation \
-    libGLESv1_enc \
-    libGoldfishProfiler \
+    GbaService
 
 #
 # Packages for testing
@@ -258,13 +198,19 @@
 
 endif
 
+#
+# Satellite vendor service for CF
+#
+PRODUCT_PACKAGES += CFSatelliteService
+
 # PRODUCT_AAPT_CONFIG and PRODUCT_AAPT_PREF_CONFIG are intentionally not set to
 # pick up every density resources.
 
 #
 # Common manifest for all targets
 #
-DEVICE_MANIFEST_FILE += device/google/cuttlefish/shared/config/manifest.xml
+LOCAL_DEVICE_FCM_MANIFEST_FILE ?= device/google/cuttlefish/shared/config/manifest.xml
+DEVICE_MANIFEST_FILE += $(LOCAL_DEVICE_FCM_MANIFEST_FILE)
 
 #
 # General files
@@ -288,11 +234,6 @@
 PRODUCT_COPY_FILES += \
     device/google/cuttlefish/shared/permissions/cuttlefish_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/cuttlefish_excluded_hardware.xml \
     frameworks/native/data/etc/android.hardware.audio.low_latency.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.audio.low_latency.xml \
-    frameworks/native/data/etc/android.hardware.camera.concurrent.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.concurrent.xml \
-    frameworks/native/data/etc/android.hardware.camera.flash-autofocus.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.flash-autofocus.xml \
-    frameworks/native/data/etc/android.hardware.camera.front.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.front.xml \
-    frameworks/native/data/etc/android.hardware.camera.full.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.full.xml \
-    frameworks/native/data/etc/android.hardware.camera.raw.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.raw.xml \
     frameworks/native/data/etc/android.hardware.ethernet.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.ethernet.xml \
     frameworks/native/data/etc/android.hardware.location.gps.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.location.gps.xml \
     frameworks/native/data/etc/android.hardware.reboot_escrow.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.reboot_escrow.xml \
@@ -303,10 +244,8 @@
     frameworks/native/data/etc/android.hardware.wifi.passpoint.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.wifi.passpoint.xml \
     frameworks/native/data/etc/android.software.ipsec_tunnels.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.ipsec_tunnels.xml \
     frameworks/native/data/etc/android.software.sip.voip.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.sip.voip.xml \
-    frameworks/native/data/etc/android.software.verified_boot.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.verified_boot.xml \
-    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_back.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_back.json \
-    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_front.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_front.json \
-    hardware/google/camera/devices/EmulatedCamera/hwl/configs/emu_camera_depth.json:$(TARGET_COPY_OUT_VENDOR)/etc/config/emu_camera_depth.json
+    frameworks/native/data/etc/android.software.verified_boot.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.verified_boot.xml
+
 endif
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/android.hardware.consumerir.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.consumerir.xml \
@@ -319,15 +258,17 @@
     device/google/cuttlefish/shared/config/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles_V1_0.xml \
     device/google/cuttlefish/shared/permissions/privapp-permissions-cuttlefish.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/privapp-permissions-cuttlefish.xml \
     frameworks/av/media/libeffects/data/audio_effects.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects.xml \
+    hardware/interfaces/audio/aidl/default/audio_effects_config.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects_config.xml \
     frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
     frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
-    frameworks/av/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml:$(TARGET_COPY_OUT_VENDOR)/etc/a2dp_in_audio_policy_configuration_7_0.xml \
     frameworks/av/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml:$(TARGET_COPY_OUT_VENDOR)/etc/bluetooth_audio_policy_configuration_7_0.xml \
+    frameworks/av/services/audiopolicy/config/usb_audio_policy_configuration.xml:$(TARGET_COPY_OUT_VENDOR)/etc/usb_audio_policy_configuration.xml \
     frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:$(TARGET_COPY_OUT_VENDOR)/etc/r_submix_audio_policy_configuration.xml \
     frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_policy_volumes.xml \
     frameworks/av/services/audiopolicy/config/default_volume_tables.xml:$(TARGET_COPY_OUT_VENDOR)/etc/default_volume_tables.xml \
     frameworks/av/services/audiopolicy/config/surround_sound_configuration_5_0.xml:$(TARGET_COPY_OUT_VENDOR)/etc/surround_sound_configuration_5_0.xml \
     device/google/cuttlefish/shared/config/task_profiles.json:$(TARGET_COPY_OUT_VENDOR)/etc/task_profiles.json \
+    frameworks/native/data/etc/android.software.credentials.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.credentials.xml \
 
 ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
 PRODUCT_PACKAGES += com.google.cf.input.config
@@ -339,43 +280,30 @@
     device/google/cuttlefish/shared/config/input/Crosvm_Virtio_Multitouch_Touchscreen_3.idc:$(TARGET_COPY_OUT_VENDOR)/usr/idc/Crosvm_Virtio_Multitouch_Touchscreen_3.idc
 endif
 
-PRODUCT_COPY_FILES += \
-    device/google/cuttlefish/shared/config/fstab.f2fs:$(TARGET_COPY_OUT_VENDOR_RAMDISK)/first_stage_ramdisk/fstab.f2fs \
-    device/google/cuttlefish/shared/config/fstab.f2fs:$(TARGET_COPY_OUT_VENDOR)/etc/fstab.f2fs \
-    device/google/cuttlefish/shared/config/fstab.f2fs:$(TARGET_COPY_OUT_RECOVERY)/root/first_stage_ramdisk/fstab.f2fs \
-    device/google/cuttlefish/shared/config/fstab.ext4:$(TARGET_COPY_OUT_VENDOR_RAMDISK)/first_stage_ramdisk/fstab.ext4 \
-    device/google/cuttlefish/shared/config/fstab.ext4:$(TARGET_COPY_OUT_VENDOR)/etc/fstab.ext4 \
-    device/google/cuttlefish/shared/config/fstab.ext4:$(TARGET_COPY_OUT_RECOVERY)/root/first_stage_ramdisk/fstab.ext4
-
-ifeq ($(TARGET_VULKAN_SUPPORT),true)
-ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.hardware.vulkan.level-0.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.vulkan.level.xml \
-    frameworks/native/data/etc/android.hardware.vulkan.version-1_0_3.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.vulkan.version.xml \
-    frameworks/native/data/etc/android.software.vulkan.deqp.level-2022-03-01.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.vulkan.deqp.level.xml \
-    frameworks/native/data/etc/android.software.opengles.deqp.level-2022-03-01.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.opengles.deqp.level.xml
-endif
-endif
+PRODUCT_PACKAGES += \
+    fstab.cf.f2fs.hctr2 \
+    fstab.cf.f2fs.hctr2.vendor_ramdisk \
+    fstab.cf.f2fs.cts \
+    fstab.cf.f2fs.cts.vendor_ramdisk \
+    fstab.cf.ext4.hctr2 \
+    fstab.cf.ext4.hctr2.vendor_ramdisk \
+    fstab.cf.ext4.cts \
+    fstab.cf.ext4.cts.vendor_ramdisk \
 
 # Packages for HAL implementations
 
 #
-# Atrace HAL
-#
-PRODUCT_PACKAGES += \
-    android.hardware.atrace@1.0-service
-
-#
 # Weaver aidl HAL
 #
-PRODUCT_PACKAGES += \
-    android.hardware.weaver-service.example
+# TODO(b/262418065) Add a real weaver implementation
 
 #
 # IR aidl HAL
 #
 PRODUCT_PACKAGES += \
-	android.hardware.ir-service.example
+	android.hardware.ir-service.example \
+	consumerir.default
+
 
 #
 # OemLock aidl HAL
@@ -384,65 +312,22 @@
     android.hardware.oemlock-service.example
 
 #
-# Authsecret HAL
-#
-PRODUCT_PACKAGES += \
-    android.hardware.authsecret@1.0-service
-
-#
 # Authsecret AIDL HAL
 #
 PRODUCT_PACKAGES += \
     android.hardware.authsecret-service.example
-#
-# Hardware Composer HAL
-#
-# The device needs to avoid having both hwcomposer2.4 and hwcomposer3
-# services running at the same time so make the user manually enables
-# in order to run with --gpu_mode=drm.
-ifeq ($(TARGET_ENABLE_DRMHWCOMPOSER),true)
-DEVICE_MANIFEST_FILE += \
-    device/google/cuttlefish/shared/config/manifest_android.hardware.graphics.composer@2.4-service.xml
-
-PRODUCT_PACKAGES += \
-    android.hardware.graphics.composer@2.4-service \
-    hwcomposer.drm
-else
-PRODUCT_PACKAGES += \
-    android.hardware.graphics.composer3-service.ranchu
-endif
-
-#
-# Gralloc HAL
-#
-PRODUCT_PACKAGES += \
-    android.hardware.graphics.allocator-V1-service.minigbm \
-    android.hardware.graphics.mapper@4.0-impl.minigbm
 
 #
 # Bluetooth HAL and Compatibility Bluetooth library (for older revs).
 #
 ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
-ifeq ($(LOCAL_BLUETOOTH_PRODUCT_PACKAGE),)
-ifeq ($(TARGET_ENABLE_HOST_BLUETOOTH_EMULATION),true)
-ifeq ($(TARGET_USE_BTLINUX_HAL_IMPL),true)
-    LOCAL_BLUETOOTH_PRODUCT_PACKAGE := android.hardware.bluetooth@1.1-service.btlinux
-else
-    LOCAL_BLUETOOTH_PRODUCT_PACKAGE := android.hardware.bluetooth@1.1-service.remote
-endif
-else
-    LOCAL_BLUETOOTH_PRODUCT_PACKAGE := android.hardware.bluetooth@1.1-service.sim
-endif
-    DEVICE_MANIFEST_FILE += device/google/cuttlefish/shared/config/manifest_android.hardware.bluetooth@1.1-service.xml
-endif
-
 PRODUCT_COPY_FILES +=\
     frameworks/native/data/etc/android.hardware.bluetooth.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.bluetooth.xml \
     frameworks/native/data/etc/android.hardware.bluetooth_le.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.bluetooth_le.xml
 
-PRODUCT_PACKAGES += $(LOCAL_BLUETOOTH_PRODUCT_PACKAGE)
-
-PRODUCT_PACKAGES += android.hardware.bluetooth.audio@2.1-impl  bt_vhci_forwarder
+PRODUCT_PACKAGES += \
+    android.hardware.bluetooth-service.default \
+    bt_vhci_forwarder
 
 # Bluetooth initialization configuration is copied to the init folder here instead of being added
 # as an init_rc attribute of the bt_vhci_forward binary.  The bt_vhci_forward binary is used by
@@ -451,7 +336,7 @@
     device/google/cuttlefish/guest/commands/bt_vhci_forwarder/bt_vhci_forwarder.rc:$(TARGET_COPY_OUT_VENDOR)/etc/init/bt_vhci_forwarder.rc
 
 else
-PRODUCT_PACKAGES += com.google.cf.bt android.hardware.bluetooth.audio@2.1-impl
+PRODUCT_PACKAGES += com.google.cf.bt
 endif
 
 #
@@ -462,12 +347,38 @@
 
 #
 # Audio HAL
+# Note: aidl services are loaded, however they are not fully functional yet,
+#       and are not used by the framework, only by VTS tests.
 #
 ifndef LOCAL_AUDIO_PRODUCT_PACKAGE
 LOCAL_AUDIO_PRODUCT_PACKAGE := \
     android.hardware.audio.service \
     android.hardware.audio@7.1-impl.ranchu \
-    android.hardware.audio.effect@7.0-impl
+    android.hardware.audio.effect@7.0-impl \
+    android.hardware.audio.service-aidl.example \
+    android.hardware.audio.effect.service-aidl.example \
+    libaecsw \
+    libagc1sw \
+    libagc2sw \
+    libbassboostsw \
+    libbundleaidl \
+    libdownmixaidl \
+    libdynamicsprocessingaidl \
+    libenvreverbsw \
+    libequalizersw \
+    libextensioneffect \
+    libhapticgeneratoraidl \
+    libloudnessenhanceraidl \
+    libnssw \
+    libpreprocessingaidl \
+    libpresetreverbsw \
+    libreverbaidl \
+    libtinyxml2 \
+    libvirtualizersw \
+    libvisualizeraidl \
+    libvolumesw
+DEVICE_MANIFEST_FILE += \
+    device/google/cuttlefish/guest/hals/audio/effects/manifest.xml
 endif
 
 ifndef LOCAL_AUDIO_PRODUCT_COPY_FILES
@@ -477,7 +388,8 @@
     frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:$(TARGET_COPY_OUT_VENDOR)/etc/r_submix_audio_policy_configuration.xml \
     frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_policy_volumes.xml \
     frameworks/av/services/audiopolicy/config/default_volume_tables.xml:$(TARGET_COPY_OUT_VENDOR)/etc/default_volume_tables.xml \
-    frameworks/av/media/libeffects/data/audio_effects.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects.xml
+    frameworks/av/media/libeffects/data/audio_effects.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects.xml \
+    hardware/interfaces/audio/aidl/default/audio_effects_config.xml:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects_config.xml
 endif
 
 PRODUCT_PACKAGES += $(LOCAL_AUDIO_PRODUCT_PACKAGE)
@@ -485,18 +397,6 @@
 DEVICE_PACKAGE_OVERLAYS += $(LOCAL_AUDIO_DEVICE_PACKAGE_OVERLAYS)
 
 #
-# BiometricsFace HAL (HIDL)
-#
-PRODUCT_PACKAGES += \
-    android.hardware.biometrics.face@1.0-service.example
-
-#
-# BiometricsFingerprint HAL (HIDL)
-#
-PRODUCT_PACKAGES += \
-    android.hardware.biometrics.fingerprint@2.2-service.example
-
-#
 # BiometricsFace HAL (AIDL)
 #
 PRODUCT_PACKAGES += \
@@ -518,14 +418,15 @@
 # Drm HAL
 #
 PRODUCT_PACKAGES += \
-    android.hardware.drm@latest-service.clearkey \
-    android.hardware.drm@latest-service.widevine
+    android.hardware.drm@latest-service.clearkey
+
+-include vendor/widevine/libwvdrmengine/apex/device/device.mk
 
 #
 # Confirmation UI HAL
 #
 ifeq ($(LOCAL_CONFIRMATIONUI_PRODUCT_PACKAGE),)
-    LOCAL_CONFIRMATIONUI_PRODUCT_PACKAGE := android.hardware.confirmationui@1.0-service.cuttlefish
+    LOCAL_CONFIRMATIONUI_PRODUCT_PACKAGE := android.hardware.confirmationui-service.cuttlefish
 endif
 PRODUCT_PACKAGES += $(LOCAL_CONFIRMATIONUI_PRODUCT_PACKAGE)
 
@@ -538,30 +439,10 @@
 PRODUCT_PACKAGES += $(LOCAL_DUMPSTATE_PRODUCT_PACKAGE)
 
 #
-# Camera
-#
-ifeq ($(TARGET_USE_VSOCK_CAMERA_HAL_IMPL),true)
-PRODUCT_PACKAGES += \
-    android.hardware.camera.provider@2.7-external-vsock-service \
-    android.hardware.camera.provider@2.7-impl-cuttlefish
-DEVICE_MANIFEST_FILE += \
-    device/google/cuttlefish/guest/hals/camera/manifest.xml
-else
-ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_PACKAGES += com.google.emulated.camera.provider.hal
-PRODUCT_PACKAGES += com.google.emulated.camera.provider.hal.fastscenecycle
-endif
-PRODUCT_PACKAGES += \
-    android.hardware.camera.provider@2.7-service-google \
-    libgooglecamerahwl_impl \
-    android.hardware.camera.provider@2.7-impl-google \
-
-endif
-#
 # Gatekeeper
 #
 ifeq ($(LOCAL_GATEKEEPER_PRODUCT_PACKAGE),)
-       LOCAL_GATEKEEPER_PRODUCT_PACKAGE := android.hardware.gatekeeper@1.0-service.remote
+       LOCAL_GATEKEEPER_PRODUCT_PACKAGE := android.hardware.gatekeeper-service.remote
 endif
 PRODUCT_PACKAGES += \
     $(LOCAL_GATEKEEPER_PRODUCT_PACKAGE)
@@ -602,23 +483,14 @@
 # Sensors
 #
 ifeq ($(LOCAL_SENSOR_PRODUCT_PACKAGE),)
-# TODO(b/210883464): Convert the sensors APEX to use the new AIDL impl.
-#ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-#       LOCAL_SENSOR_PRODUCT_PACKAGE := com.android.hardware.sensors
-#else
+ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
+       LOCAL_SENSOR_PRODUCT_PACKAGE := com.android.hardware.sensors
+else
        LOCAL_SENSOR_PRODUCT_PACKAGE := android.hardware.sensors-service.example
-#endif
+endif
 endif
 PRODUCT_PACKAGES += \
     $(LOCAL_SENSOR_PRODUCT_PACKAGE)
-#
-# Thermal (mock)
-#
-ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_PACKAGES += com.android.hardware.thermal.mock
-else
-PRODUCT_PACKAGES += android.hardware.thermal@2.0-service.mock
-endif
 
 #
 # Lights
@@ -630,25 +502,21 @@
 # KeyMint HAL
 #
 ifeq ($(LOCAL_KEYMINT_PRODUCT_PACKAGE),)
-       LOCAL_KEYMINT_PRODUCT_PACKAGE := android.hardware.security.keymint-service.remote
-# Indicate that this KeyMint includes support for the ATTEST_KEY key purpose.
+    LOCAL_KEYMINT_PRODUCT_PACKAGE := android.hardware.security.keymint-service.rust
+endif
+
+ifeq ($(LOCAL_KEYMINT_PRODUCT_PACKAGE),android.hardware.security.keymint-service.rust)
+    # KeyMint HAL has been overridden to force use of the Rust reference implementation.
+    # Set the build config for secure_env to match.
+    $(call soong_config_set,secure_env,keymint_impl,rust)
+endif
+
+PRODUCT_PACKAGES += \
+    $(LOCAL_KEYMINT_PRODUCT_PACKAGE) \
+
+# Indicate that KeyMint includes support for the ATTEST_KEY key purpose.
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/android.hardware.keystore.app_attest_key.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.keystore.app_attest_key.xml
-endif
- PRODUCT_PACKAGES += \
-    $(LOCAL_KEYMINT_PRODUCT_PACKAGE)
-
-# Keymint configuration
-ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.device_id_attestation.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.device_id_attestation.xml
-endif
-
-#
-# Dice HAL
-#
-PRODUCT_PACKAGES += \
-    android.hardware.security.dice-service.non-secure-software
 
 #
 # Power and PowerStats HALs
@@ -663,25 +531,32 @@
 endif
 
 #
+# Tetheroffload HAL
+#
+PRODUCT_PACKAGES += \
+    android.hardware.tetheroffload-service.example
+
+#
+# Thermal HAL
+#
+LOCAL_THERMAL_HAL_PRODUCT_PACKAGE ?= android.hardware.thermal-service.example
+PRODUCT_PACKAGES += $(LOCAL_THERMAL_HAL_PRODUCT_PACKAGE)
+
+#
 # NeuralNetworks HAL
 #
 PRODUCT_PACKAGES += \
-    android.hardware.neuralnetworks@1.3-service-sample-all \
-    android.hardware.neuralnetworks@1.3-service-sample-limited \
     android.hardware.neuralnetworks-service-sample-all \
     android.hardware.neuralnetworks-service-sample-limited \
     android.hardware.neuralnetworks-shim-service-sample
 
-#
 # USB
-# TODO(b/227791019): Convert USB AIDL HAL to APEX
-# ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-# PRODUCT_PACKAGES += \
-#    com.android.hardware.usb
-#else
 PRODUCT_PACKAGES += \
-    android.hardware.usb-service.example
-#endif
+    com.android.hardware.usb
+
+# USB Gadget
+PRODUCT_PACKAGES += \
+    android.hardware.usb.gadget-service.example
 
 # Vibrator HAL
 ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
@@ -693,9 +568,9 @@
 
 # BootControl HAL
 PRODUCT_PACKAGES += \
-    android.hardware.boot@1.2-impl \
-    android.hardware.boot@1.2-impl.recovery \
-    android.hardware.boot@1.2-service
+    android.hardware.boot-service.default \
+    android.hardware.boot-service.default_recovery
+
 
 # RebootEscrow HAL
 PRODUCT_PACKAGES += \
@@ -732,89 +607,69 @@
 
 # wifi
 ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-ifneq ($(PRODUCT_ENFORCE_MAC80211_HWSIM),true)
+# Add com.android.hardware.wifi for android.hardware.wifi-service
+PRODUCT_PACKAGES += com.android.hardware.wifi
+# Add com.google.cf.wifi for hostapd, wpa_supplicant, etc.
 PRODUCT_PACKAGES += com.google.cf.wifi
-# Demonstrate multi-installed vendor APEXes by installing another wifi HAL vendor APEX
-# which does not include the passpoint feature XML.
-#
-# The default is set in BoardConfig.mk using bootconfig.
-# This can be changed at CVD launch-time using
-#     --extra_bootconfig_args "androidboot.vendor.apex.com.android.wifi.hal:=X"
-# or post-launch, at runtime using
-#     setprop persist.vendor.apex.com.android.wifi.hal X && reboot
-# where X is the name of the APEX file to use.
-PRODUCT_PACKAGES += com.google.cf.wifi.no-passpoint
-
 $(call add_soong_config_namespace, wpa_supplicant)
 $(call add_soong_config_var_value, wpa_supplicant, platform_version, $(PLATFORM_VERSION))
 $(call add_soong_config_var_value, wpa_supplicant, nl80211_driver, CONFIG_DRIVER_NL80211_QCA)
-PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=virt_wifi
-else
-PRODUCT_SOONG_NAMESPACES += device/google/cuttlefish/apex/com.google.cf.wifi_hwsim
-PRODUCT_PACKAGES += com.google.cf.wifi_hwsim
-$(call add_soong_config_namespace, wpa_supplicant)
-$(call add_soong_config_var_value, wpa_supplicant, platform_version, $(PLATFORM_VERSION))
-$(call add_soong_config_var_value, wpa_supplicant, nl80211_driver, CONFIG_DRIVER_NL80211_QCA)
-PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=mac8011_hwsim_virtio
 
-$(call soong_config_append,cvdhost,enforce_mac80211_hwsim,true)
-endif
 else
-
 PRODUCT_PACKAGES += \
     rename_netiface \
-    wpa_supplicant
+    wpa_supplicant \
+    setup_wifi \
+    mac80211_create_radios \
+    hostapd \
+    android.hardware.wifi-service \
+    init.wifi
 PRODUCT_COPY_FILES += \
     device/google/cuttlefish/shared/config/wpa_supplicant.rc:$(TARGET_COPY_OUT_VENDOR)/etc/init/wpa_supplicant.rc
 
+# VirtWifi interface configuration
+ifeq ($(DEVICE_VIRTWIFI_PORT),)
+    DEVICE_VIRTWIFI_PORT := eth2
+endif
+PRODUCT_VENDOR_PROPERTIES += ro.vendor.virtwifi.port=${DEVICE_VIRTWIFI_PORT}
+
 # WLAN driver configuration files
 ifndef LOCAL_WPA_SUPPLICANT_OVERLAY
 LOCAL_WPA_SUPPLICANT_OVERLAY := $(LOCAL_PATH)/config/wpa_supplicant_overlay.conf
 endif
+
 ifndef LOCAL_P2P_SUPPLICANT
 LOCAL_P2P_SUPPLICANT := $(LOCAL_PATH)/config/p2p_supplicant.conf
 endif
+
 PRODUCT_COPY_FILES += \
     external/wpa_supplicant_8/wpa_supplicant/wpa_supplicant_template.conf:$(TARGET_COPY_OUT_VENDOR)/etc/wifi/wpa_supplicant.conf \
     $(LOCAL_WPA_SUPPLICANT_OVERLAY):$(TARGET_COPY_OUT_VENDOR)/etc/wifi/wpa_supplicant_overlay.conf \
     $(LOCAL_P2P_SUPPLICANT):$(TARGET_COPY_OUT_VENDOR)/etc/wifi/p2p_supplicant.conf
-
-ifeq ($(PRODUCT_ENFORCE_MAC80211_HWSIM),true)
-PRODUCT_PACKAGES += \
-    mac80211_create_radios \
-    hostapd \
-    android.hardware.wifi@1.0-service \
-    init.wifi.sh
-
-PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=mac8011_hwsim_virtio
-
-$(call soong_config_append,cvdhost,enforce_mac80211_hwsim,true)
-
-else
-PRODUCT_PACKAGES += setup_wifi
-PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=virt_wifi
 endif
 
+# Wifi Runtime Resource Overlay
+PRODUCT_PACKAGES += \
+    CuttlefishTetheringOverlay \
+    CuttlefishWifiOverlay
+
+ifeq ($(PRODUCT_ENFORCE_MAC80211_HWSIM),true)
+PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=mac8011_hwsim_virtio
+$(call soong_config_append,cvdhost,enforce_mac80211_hwsim,true)
+else
+PRODUCT_VENDOR_PROPERTIES += ro.vendor.wifi_impl=virt_wifi
 endif
 
 # UWB HAL
 PRODUCT_PACKAGES += \
     android.hardware.uwb-service
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/guest/hals/uwb/uwb-service.rc:$(TARGET_COPY_OUT_VENDOR)/etc/init/uwb-service.rc
 
-ifeq ($(PRODUCT_ENFORCE_MAC80211_HWSIM),true)
-# Wifi Runtime Resource Overlay
-PRODUCT_PACKAGES += \
-    CuttlefishTetheringOverlay \
-    CuttlefishWifiOverlay
-endif
 
 # Host packages to install
 PRODUCT_HOST_PACKAGES += socket_vsock_proxy
 
-PRODUCT_EXTRA_VNDK_VERSIONS := 28 29 30 31
-
-PRODUCT_SOONG_NAMESPACES += external/mesa3d
-
 #for Confirmation UI
 PRODUCT_SOONG_NAMESPACES += vendor/google_devices/common/proprietary/confirmatioui_hal
 
@@ -825,19 +680,14 @@
 
 # Enable GPU-intensive background blur support on Cuttlefish when requested by apps
 PRODUCT_VENDOR_PROPERTIES += \
-    ro.surface_flinger.supports_background_blur 1
+    ro.surface_flinger.supports_background_blur=1
 
-# Set support one-handed mode
-PRODUCT_PRODUCT_PROPERTIES += \
-    ro.support_one_handed_mode=true
+# Disable GPU-intensive background blur for widget picker
+PRODUCT_SYSTEM_PROPERTIES += \
+    ro.launcher.depth.widget=0
 
-# Set one_handed_mode screen translate offset percentage
-PRODUCT_PRODUCT_PROPERTIES += \
-    persist.debug.one_handed_offset_percentage=50
-
-# Set one_handed_mode translate animation duration milliseconds
-PRODUCT_PRODUCT_PROPERTIES += \
-    persist.debug.one_handed_translate_animation_duration=300
+# Start fingerprint virtual HAL process
+PRODUCT_VENDOR_PROPERTIES += ro.vendor.fingerprint_virtual_hal_start=true
 
 # Vendor Dlkm Locader
 PRODUCT_PACKAGES += \
@@ -847,5 +697,9 @@
 PRODUCT_PACKAGES += \
     android.hardware.nfc-service.cuttlefish
 
+# CAS AIDL HAL
+PRODUCT_PACKAGES += \
+    android.hardware.cas-service.example
+
 PRODUCT_COPY_FILES += \
     device/google/cuttlefish/shared/config/pci.ids:$(TARGET_COPY_OUT_VENDOR)/pci.ids
diff --git a/shared/foldable/device_state_configuration.xml b/shared/foldable/device_state_configuration.xml
index 9618b11..877a583 100644
--- a/shared/foldable/device_state_configuration.xml
+++ b/shared/foldable/device_state_configuration.xml
@@ -34,4 +34,11 @@
       </lid-switch>
     </conditions>
   </device-state>
+  <device-state>
+    <identifier>3</identifier>
+    <name>REAR_DISPLAY_MODE</name>
+    <flags>
+      <flag>FLAG_EMULATED_ONLY</flag>
+    </flags>
+  </device-state>
 </device-state-config>
diff --git a/shared/foldable/display_layout_configuration.xml b/shared/foldable/display_layout_configuration.xml
index 54b76b1..c77e8f4 100644
--- a/shared/foldable/display_layout_configuration.xml
+++ b/shared/foldable/display_layout_configuration.xml
@@ -5,11 +5,11 @@
     <state>0</state>
 
     <display enabled="false">
-      <address>4619827259835644672</address>
+      <address>4619827353912518656</address>
     </display>
 
     <display enabled="true" defaultDisplay="true">
-      <address>4619827551948147201</address>
+      <address>4619827353912518657</address>
     </display>
   </layout>
 
@@ -18,11 +18,11 @@
     <state>1</state>
 
     <display enabled="true" defaultDisplay="true">
-      <address>4619827259835644672</address>
+      <address>4619827353912518656</address>
     </display>
 
     <display enabled="false">
-      <address>4619827551948147201</address>
+      <address>4619827353912518657</address>
     </display>
   </layout>
 
@@ -31,10 +31,23 @@
     <state>2</state>
 
     <display enabled="true" defaultDisplay="true">
-      <address>4619827259835644672</address>
+      <address>4619827353912518656</address>
     </display>
 
     <display enabled="false">
+      <address>4619827353912518657</address>
+    </display>
+  </layout>
+
+  <layout>
+  <!-- REAR_DISPLAY_MODE: display0 disabled, display1 enabled -->
+    <state>3</state>
+
+    <display enabled="false">
+      <address>4619827259835644672</address>
+    </display>
+
+    <display enabled="true" defaultDisplay="true">
       <address>4619827551948147201</address>
     </display>
   </layout>
diff --git a/shared/foldable/display_settings.xml b/shared/foldable/display_settings.xml
index 6ad8287..95a3274 100644
--- a/shared/foldable/display_settings.xml
+++ b/shared/foldable/display_settings.xml
@@ -3,6 +3,6 @@
     <config identifier="0" />
     <!-- Allow rotation of fixed-orientation activities on the inner screen. -->
     <display
-        name="local:4619827259835644672"
+        name="local:4619827353912518656"
         ignoreOrientationRequest="true"/>
 </display-settings>
diff --git a/shared/foldable/overlay/frameworks/base/core/res/res/values/config.xml b/shared/foldable/overlay/frameworks/base/core/res/res/values/config.xml
index b0b42ab..b101b36 100644
--- a/shared/foldable/overlay/frameworks/base/core/res/res/values/config.xml
+++ b/shared/foldable/overlay/frameworks/base/core/res/res/values/config.xml
@@ -24,12 +24,46 @@
       <item>0:1</item> <!-- CLOSED : STATE_FLAT -->
       <item>1:2</item> <!-- HALF_OPENED : STATE_HALF_OPENED -->
       <item>2:3</item> <!-- OPENED : STATE_FLIPPED -->
+      <item>3:1</item> <!-- REAR_DISPLAY: STATE_FLAT -->
   </string-array>
-  <!-- The device states (supplied by DeviceStateManager) that should be treated as folded by the
+
+  <!-- Map of DeviceState to rotation lock setting. Each entry must be in the format "key:value",
+     or "key:value:fallback_key" for example: "0:1" or "2:0:1". The keys are device states, and
+     the values are one of Settings.Secure.DeviceStateRotationLockSetting.
+     The fallback is a key to a device state that can be specified when the value is
+     Settings.Secure.DEVICE_STATE_ROTATION_LOCK_IGNORED.
+ -->
+  <string-array name="config_perDeviceStateRotationLockDefaults" translatable="false">
+    <item>0:1</item> <!-- CLOSED -> LOCKED -->
+    <item>1:0:2</item> <!-- HALF_OPENED -> IGNORED and fallback to device state OPENED -->
+    <item>2:2</item> <!-- OPENED -> UNLOCKED -->
+    <item>3:0:0</item> <!-- REAR_DISPLAY -> IGNORED and fallback to device state CLOSED -->
+  </string-array>
+
+  <!-- The device states (supplied by DeviceStateManager) that should be treated as open by the
        display fold controller. -->
+  <integer-array name="config_openDeviceStates" translatable="false">
+    <item>2</item> <!-- OPEN -->
+  </integer-array>
+
+  <!-- The device states (supplied by DeviceStateManager) that should be treated as folded by the
+       display fold controller. This also controls the folded bit in CameraServiceProxy. -->
   <integer-array name="config_foldedDeviceStates" translatable="false">
     <item>0</item> <!-- CLOSED -->
   </integer-array>
+
+  <!-- The device states (supplied by DeviceStateManager) that should be treated as half folded by
+       the display fold controller. This also controls the folded bit in CameraServiceProxy. -->
+  <integer-array name="config_halfFoldedDeviceStates" translatable="false">
+    <item>1</item> <!-- HALF_OPENED -->
+  </integer-array>
+
+  <!-- The device states (supplied by DeviceStateManager) that should be treated as a rear display
+       state. Default is empty. -->
+  <integer-array name="config_rearDisplayDeviceStates" translatable="false">
+    <item>3</item> <!-- REAR_DISPLAY_STATE -->
+  </integer-array>
+
   <!-- Indicates whether to enable an animation when unfolding a device or not -->
   <bool name="config_unfoldTransitionEnabled">true</bool>
   <!-- Indicates whether to enable hinge angle sensor when using unfold animation -->
@@ -41,4 +75,13 @@
   <bool name="config_supportsSplitScreenMultiWindow">true</bool>
   <!-- Radius of the software rounded corners. -->
   <dimen name="rounded_corner_radius">34px</dimen>
+
+  <!-- List of the labels of requestable device state config values -->
+  <string-array name="config_deviceStatesAvailableForAppRequests">
+    <item>config_deviceStateRearDisplay</item>
+  </string-array>
+
+  <!-- Device state that corresponds to rear display mode, feature provided
+         through Jetpack WindowManager -->
+  <integer name="config_deviceStateRearDisplay">3</integer>
 </resources>
diff --git a/shared/go/device_vendor.mk b/shared/go/device_vendor.mk
index 85d64dd..f3e371c 100644
--- a/shared/go/device_vendor.mk
+++ b/shared/go/device_vendor.mk
@@ -18,28 +18,18 @@
 SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
 
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/go_handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/go_handheld_core_hardware.xml
 
 $(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
-PRODUCT_VENDOR_PROPERTIES += \
-    keyguard.no_require_sim=true \
-    ro.cdma.home.operator.alpha=Android \
-    ro.cdma.home.operator.numeric=302780 \
-    ro.com.android.dataroaming=true \
-    ro.telephony.default_network=9 \
-
-TARGET_USES_CF_RILD ?= true
-ifeq ($(TARGET_USES_CF_RILD),true)
-PRODUCT_PACKAGES += \
-    libcuttlefish-ril-2 \
-    libcuttlefish-rild
-endif
-
 PRODUCT_PACKAGES += \
     cuttlefish_phone_overlay_frameworks_base_core \
     cuttlefish_go_phone_overlay_frameworks_base_core \
diff --git a/shared/go/overlays/core/Android.bp b/shared/go/overlays/core/Android.bp
index b46bcce..69bc6c8 100644
--- a/shared/go/overlays/core/Android.bp
+++ b/shared/go/overlays/core/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/shared/graphics/BoardConfig.mk b/shared/graphics/BoardConfig.mk
new file mode 100644
index 0000000..4a385bd9
--- /dev/null
+++ b/shared/graphics/BoardConfig.mk
@@ -0,0 +1,17 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/graphics/sepolicy
diff --git a/shared/graphics/device_vendor.mk b/shared/graphics/device_vendor.mk
new file mode 100644
index 0000000..b5f4102
--- /dev/null
+++ b/shared/graphics/device_vendor.mk
@@ -0,0 +1,86 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If a downstream target does not want any graphics support, do not
+# include this file!
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/config/graphics/init_graphics.vendor.rc:$(TARGET_COPY_OUT_VENDOR)/etc/init/init_graphics.vendor.rc \
+
+# Gfxstream common libraries:
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl
+PRODUCT_PACKAGES += \
+    libandroidemu \
+    libOpenglCodecCommon \
+    libOpenglSystemCommon \
+    libGLESv1_CM_emulation \
+    lib_renderControl_enc \
+    libEGL_emulation \
+    libGLESv2_enc \
+    libGLESv2_emulation \
+    libGLESv1_enc \
+    libGoldfishProfiler \
+
+# Gfxstream OpenGL implementation (OpenGL streamed to the host).
+PRODUCT_PACKAGES += \
+    libEGL_emulation \
+    libGLESv1_CM_emulation \
+    libGLESv1_enc \
+    libGLESv2_emulation \
+    libGLESv2_enc \
+
+# Gfxstream Vulkan implementation (Vulkan streamed to the host).
+ifeq ($(TARGET_VULKAN_SUPPORT),true)
+PRODUCT_PACKAGES += \
+    vulkan.ranchu \
+    libvulkan_enc
+endif
+
+ifeq ($(TARGET_VULKAN_SUPPORT),true)
+ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.hardware.vulkan.level-0.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.vulkan.level.xml \
+    frameworks/native/data/etc/android.hardware.vulkan.version-1_0_3.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.vulkan.version.xml \
+    frameworks/native/data/etc/android.software.vulkan.deqp.level-2023-03-01.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.vulkan.deqp.level.xml \
+    frameworks/native/data/etc/android.software.opengles.deqp.level-2023-03-01.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.opengles.deqp.level.xml
+endif
+endif
+
+#
+# Hardware Composer HAL
+#
+# The device needs to avoid having both hwcomposer2.4 and hwcomposer3
+# services running at the same time so make the user manually enables
+# in order to run with --gpu_mode=drm.
+ifeq ($(TARGET_ENABLE_DRMHWCOMPOSER),true)
+DEVICE_MANIFEST_FILE += \
+    device/google/cuttlefish/shared/config/manifest_android.hardware.graphics.composer@2.4-service.xml
+PRODUCT_PACKAGES += \
+    android.hardware.graphics.composer@2.4-service \
+    hwcomposer.drm
+else
+PRODUCT_PACKAGES += \
+    android.hardware.graphics.composer3-service.ranchu
+endif
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.vendor.hwcomposer.pmem=/dev/block/pmem1
+
+# Gralloc implementation
+PRODUCT_PACKAGES += \
+    android.hardware.graphics.allocator-service.minigbm \
+    android.hardware.graphics.mapper@4.0-impl.minigbm \
+    mapper.minigbm
diff --git a/shared/graphics/sepolicy/file_contexts b/shared/graphics/sepolicy/file_contexts
new file mode 100644
index 0000000..5027637
--- /dev/null
+++ b/shared/graphics/sepolicy/file_contexts
@@ -0,0 +1,29 @@
+/dev/block/pmem1  u:object_r:hal_graphics_composer_pmem_device:s0
+
+/dev/dri u:object_r:gpu_device:s0
+/dev/dri/card0  u:object_r:graphics_device:s0
+/dev/dri/renderD128  u:object_r:gpu_device:s0
+
+/vendor/bin/hw/android\.hardware\.graphics\.allocator-service\.minigbm   u:object_r:hal_graphics_allocator_default_exec:s0
+/vendor/bin/hw/android\.hardware\.graphics\.allocator@4\.0-service\.minigbm   u:object_r:hal_graphics_allocator_default_exec:s0
+/vendor/bin/hw/android\.hardware\.graphics\.composer3-service\.ranchu  u:object_r:hal_graphics_composer_default_exec:s0
+
+/vendor/lib(64)?/libdrm.so  u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/hw/android\.hardware\.graphics\.mapper@4\.0-impl\.minigbm\.so u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/hw/mapper\.minigbm\.so u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libminigbm_gralloc.so  u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libminigbm_gralloc4_utils.so  u:object_r:same_process_hal_file:s0
+
+# gfxstream (to be better factored (fewer libraries?))
+/vendor/lib(64)?/hw/vulkan\.ranchu\.so   u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libEGL_emulation\.so          u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv1_CM_emulation\.so    u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv2_emulation\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libOpenglCodecCommon\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libOpenglSystemCommon\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/lib_renderControl_enc\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv1_enc\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGLESv2_enc\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libvulkan_enc\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libandroidemu\.so       u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/libGoldfishProfiler\.so       u:object_r:same_process_hal_file:s0
diff --git a/shared/graphics/sepolicy/hal_graphics_composer.te b/shared/graphics/sepolicy/hal_graphics_composer.te
new file mode 100644
index 0000000..aa851ec
--- /dev/null
+++ b/shared/graphics/sepolicy/hal_graphics_composer.te
@@ -0,0 +1,10 @@
+allow hal_graphics_composer_server hal_graphics_allocator_default_tmpfs:file read;
+allow hal_graphics_composer_server self:{ socket vsock_socket } create_socket_perms_no_ioctl;
+
+get_prop(hal_graphics_composer_server, vendor_cuttlefish_config_server_port_prop)
+get_prop(hal_graphics_composer_server, vendor_hwcomposer_prop)
+
+# Persistent memory for some hwcomposer configuration.
+type hal_graphics_composer_pmem_device, dev_type;
+allow hal_graphics_composer_server hal_graphics_composer_pmem_device:blk_file rw_file_perms;
+allow hal_graphics_composer_server block_device:dir search;
diff --git a/shared/sepolicy/vendor/hal_graphics_composer_default.te b/shared/graphics/sepolicy/hal_graphics_composer_default.te
similarity index 100%
rename from shared/sepolicy/vendor/hal_graphics_composer_default.te
rename to shared/graphics/sepolicy/hal_graphics_composer_default.te
diff --git a/shared/minidroid/BoardConfig.mk b/shared/minidroid/BoardConfig.mk
new file mode 100644
index 0000000..9d43df0
--- /dev/null
+++ b/shared/minidroid/BoardConfig.mk
@@ -0,0 +1,145 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# FIXME: Split up and merge back in with shared/BoardConfig.mk
+
+TARGET_KERNEL_USE ?= 6.1
+TARGET_KERNEL_ARCH ?= $(TARGET_ARCH)
+TARGET_KERNEL_PATH ?= kernel/prebuilts/$(TARGET_KERNEL_USE)/$(TARGET_KERNEL_ARCH)/kernel-$(TARGET_KERNEL_USE)
+KERNEL_MODULES_PATH ?= \
+    kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/$(subst _,-,$(TARGET_KERNEL_ARCH))
+PRODUCT_COPY_FILES += $(TARGET_KERNEL_PATH):kernel
+
+# The list of modules strictly/only required either to reach second stage
+# init, OR for recovery. Do not use this list to workaround second stage
+# issues.
+RAMDISK_KERNEL_MODULES := \
+    failover.ko \
+    net_failover.ko \
+    virtio_blk.ko \
+    virtio_console.ko \
+    virtio_net.ko \
+    virtio_pci.ko \
+    virtio_pci_modern_dev.ko \
+    virtio-rng.ko \
+    vmw_vsock_virtio_transport.ko \
+
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES := \
+    $(patsubst %,$(KERNEL_MODULES_PATH)/%,$(RAMDISK_KERNEL_MODULES))
+
+# GKI >5.15 will have and require virtio_pci_legacy_dev.ko
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard $(KERNEL_MODULES_PATH)/virtio_pci_legacy_dev.ko)
+
+TARGET_NO_RECOVERY := true
+
+BOARD_VENDOR_RAMDISK_KERNEL_MODULES_BLOCKLIST_FILE := \
+    device/google/cuttlefish/shared/modules.blocklist
+
+TARGET_BOOTLOADER_BOARD_NAME := cutf
+
+BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+
+# Disable sparse on all filesystem images
+# This will prevent sparsing of super.img
+TARGET_USERIMAGES_SPARSE_EROFS_DISABLED ?= true
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED ?= true
+TARGET_USERIMAGES_SPARSE_F2FS_DISABLED ?= true
+
+# FIXME: Not needed for minidroid, but needs fixes to CF assembler
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 67108864
+BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE := ext4
+TARGET_USERIMAGES_USE_EXT4 := true
+
+BOARD_BOOTIMAGE_PARTITION_SIZE := 67108864
+BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE := 8388608
+BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE := 67108864
+
+BOARD_AVB_ENABLE := true
+BOARD_AVB_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+
+BOARD_AVB_VBMETA_SYSTEM := system
+BOARD_AVB_VBMETA_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_VBMETA_SYSTEM_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
+
+BOARD_AVB_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_BOOT_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_BOOT_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_BOOT_ROLLBACK_INDEX_LOCATION := 2
+
+BOARD_AVB_INIT_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_INIT_BOOT_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_INIT_BOOT_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_INIT_BOOT_ROLLBACK_INDEX_LOCATION := 3
+
+TARGET_AVB_SYSTEM_HASHTREE_ALGORITHM ?= sha256
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm $(TARGET_AVB_SYSTEM_HASHTREE_ALGORITHM)
+
+BOARD_MALLOC_ALIGNMENT := 16
+
+BOARD_USES_GENERIC_KERNEL_IMAGE := true
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/dtb.img:dtb.img \
+    device/google/cuttlefish/required_images:required_images \
+
+BOARD_BOOTLOADER_IN_UPDATE_PACKAGE := true
+BOARD_RAMDISK_USE_LZ4 := true
+
+# Default to minidroid, if not set.
+LOCAL_ANDROIDBOOT_HARDWARE ?= minidroid
+# Default to minidroid's primary init .rc, if not set.
+LOCAL_ANDROIDBOOT_INIT_RC ?= /system/etc/init/hw/init.minidroid.rc
+
+BOARD_KERNEL_CMDLINE += printk.devkmsg=on
+BOARD_KERNEL_CMDLINE += audit=1
+BOARD_KERNEL_CMDLINE += panic=-1
+BOARD_KERNEL_CMDLINE += 8250.nr_uarts=1
+BOARD_KERNEL_CMDLINE += cma=0
+BOARD_KERNEL_CMDLINE += firmware_class.path=/vendor/etc/
+BOARD_KERNEL_CMDLINE += loop.max_part=7
+BOARD_KERNEL_CMDLINE += init=/init
+BOARD_BOOTCONFIG += androidboot.hardware=$(LOCAL_ANDROIDBOOT_HARDWARE)
+BOARD_BOOTCONFIG += \
+    androidboot.init_rc=$(LOCAL_ANDROIDBOOT_INIT_RC)
+BOARD_BOOTCONFIG += kernel.mac80211_hwsim.radios=0
+BOARD_BOOTCONFIG += \
+    kernel.vmw_vsock_virtio_transport_common.virtio_transport_max_vsock_pkt_buf_size=16384
+BOARD_BOOTCONFIG += \
+    androidboot.microdroid.debuggable=1 \
+    androidboot.adb.enabled=1
+
+BOARD_INCLUDE_DTB_IN_BOOTIMG := true
+BOARD_BOOT_HEADER_VERSION := 4
+BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
+BOARD_INIT_BOOT_HEADER_VERSION := 4
+BOARD_MKBOOTIMG_INIT_ARGS += --header_version $(BOARD_INIT_BOOT_HEADER_VERSION)
+
+BOARD_GOOGLE_SYSTEM_DYNAMIC_PARTITIONS_PARTITION_LIST := system vendor
+# reserve 256MiB for dynamic partition metadata
+BOARD_GOOGLE_SYSTEM_DYNAMIC_PARTITIONS_SIZE := 268435456
+
+# 1MiB bigger than the dynamic partition to make build happy...
+BOARD_SUPER_PARTITION_SIZE := 269484032
+BOARD_SUPER_PARTITION_GROUPS := google_system_dynamic_partitions
+BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
+BOARD_SUPER_IMAGE_IN_UPDATE_PACKAGE := true
+
+TARGET_SKIP_OTA_PACKAGE := true
+TARGET_SKIP_OTATOOLS_PACKAGE := true
diff --git a/shared/minidroid/android-info.txt b/shared/minidroid/android-info.txt
new file mode 100644
index 0000000..fc79813
--- /dev/null
+++ b/shared/minidroid/android-info.txt
@@ -0,0 +1 @@
+config=minidroid
diff --git a/shared/minidroid/device.mk b/shared/minidroid/device.mk
new file mode 100644
index 0000000..1b9cc5f
--- /dev/null
+++ b/shared/minidroid/device.mk
@@ -0,0 +1,121 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_ramdisk.mk)
+
+PRODUCT_COMPRESSED_APEX := false
+$(call inherit-product, $(SRC_TARGET_DIR)/product/updatable_apex.mk)
+
+$(call soong_config_append,cvd,launch_configs,cvd_config_minidroid.json)
+
+PRODUCT_SYSTEM_PROPERTIES += \
+    service.adb.listen_addrs=vsock:5555 \
+
+VENDOR_SECURITY_PATCH := $(PLATFORM_SECURITY_PATCH)
+BOOT_SECURITY_PATCH := $(PLATFORM_SECURITY_PATCH)
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.vendor.boot_security_patch=$(BOOT_SECURITY_PATCH)
+
+# Disable Treble and the VNDK
+PRODUCT_FULL_TREBLE_OVERRIDE := false
+PRODUCT_USE_VNDK_OVERRIDE := false
+PRODUCT_USE_PRODUCT_VNDK_OVERRIDE := false
+
+PRODUCT_SHIPPING_API_LEVEL := 33
+
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+PRODUCT_BUILD_VENDOR_IMAGE := true
+TARGET_COPY_OUT_VENDOR := vendor
+
+PRODUCT_BRAND := generic
+
+# Stolen from microdroid/Android.bp
+PRODUCT_PACKAGES += \
+    init_second_stage \
+    libbinder \
+    libbinder_ndk \
+    libstdc++ \
+    secilc \
+    libadbd_auth \
+    libadbd_fs \
+    heapprofd_client_api \
+    libartpalette-system \
+    apexd \
+    atrace \
+    debuggerd \
+    linker \
+    servicemanager \
+    service \
+    tombstoned \
+    tombstone_transmit.microdroid \
+    cgroups.json \
+    task_profiles.json \
+    public.libraries.android.txt \
+    logcat \
+    logd \
+
+# Packages included only for eng or userdebug builds
+# su needed for logpersist.* commands
+PRODUCT_PACKAGES_DEBUG += \
+    logpersist.start \
+    su \
+
+# Start logcatd by default and keep up to 30 rotated files around in userdebug/eng builds
+ifneq (,$(filter userdebug eng, $(TARGET_BUILD_VARIANT)))
+PRODUCT_PROPERTY_OVERRIDES += \
+  logd.logpersistd=logcatd \
+  logd.logpersistd.size=30
+endif
+
+# Shell and utilities
+PRODUCT_PACKAGES += \
+    reboot \
+    sh \
+    strace \
+    toolbox \
+    toybox \
+
+# Test Binder RPC services
+PRODUCT_PACKAGES += \
+    minidroid_sd \
+    server_minidroid \
+    client_minidroid \
+    client_minidroid_rust \
+
+# Additional packages
+PRODUCT_PACKAGES += \
+    com.android.runtime \
+    com.android.adbd \
+    mdnsd \
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/minidroid/fstab.minidroid:$(TARGET_COPY_OUT_VENDOR_RAMDISK)/first_stage_ramdisk/fstab.minidroid \
+    device/google/cuttlefish/shared/minidroid/fstab.minidroid:$(TARGET_COPY_OUT_VENDOR)/etc/fstab.minidroid \
+
+# FIXME: Hack to get some rootdirs created
+PRODUCT_PACKAGES += \
+    init.environ.rc
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/minidroid/init.rc:system/etc/init/hw/init.minidroid.rc \
+    packages/modules/Virtualization/microdroid/ueventd.rc:vendor/etc/ueventd.rc \
+
+DEVICE_MANIFEST_FILE := \
+    packages/modules/Virtualization/microdroid/microdroid_vendor_manifest.xml
+PRODUCT_PACKAGES += vendor_compatibility_matrix.xml
+
+TARGET_BOARD_INFO_FILE ?= device/google/cuttlefish/shared/minidroid/android-info.txt
diff --git a/shared/minidroid/fstab.minidroid b/shared/minidroid/fstab.minidroid
new file mode 100644
index 0000000..774bc11
--- /dev/null
+++ b/shared/minidroid/fstab.minidroid
@@ -0,0 +1,7 @@
+# Cuttlefish always produces A/B for non-super partitions
+/dev/block/by-name/boot /boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=boot
+/dev/block/by-name/init_boot /init_boot emmc defaults recoveryonly,slotselect,first_stage_mount,avb=init_boot
+/dev/block/by-name/vendor_boot /vendor_boot emmc defaults recoveryonly,slotselect
+# These partitions are not A/B for minidroid
+system /system ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,avb=vbmeta_system,avb_keys=/avb
+vendor /vendor ext4 noatime,ro,errors=panic wait,logical,first_stage_mount,avb=vbmeta
diff --git a/shared/minidroid/init.rc b/shared/minidroid/init.rc
new file mode 100644
index 0000000..697b43f
--- /dev/null
+++ b/shared/minidroid/init.rc
@@ -0,0 +1,199 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# init.rc for minidroid.
+#
+# IMPORTANT: Do not create world writable files or directories.
+# This is a common source of Android security bugs.
+#
+
+import /init.environ.rc
+import /system/etc/init/mdnsd.rc
+import /system/etc/init/servicemanager.rc
+import /system/etc/init/logd.rc
+# TODO(b/267320398): this file does not exist in -user builds, but init can
+# continue despite that. Remove the import once we can rely on .rc files from
+# /system/etc/init getting loaded automatically.
+import /system/etc/init/logcatd.rc
+
+# Cgroups are mounted right before early-init using list from /etc/cgroups.json
+on early-init
+    # set RLIMIT_NICE to allow priorities from 19 to -20
+    setrlimit nice 40 40
+
+    start ueventd
+
+on init
+    mkdir /mnt/apk 0755 system system
+    mkdir /mnt/extra-apk 0755 root root
+
+    # Mount binderfs
+    mkdir /dev/binderfs
+    mount binder binder /dev/binderfs stats=global
+    chmod 0755 /dev/binderfs
+
+    symlink /dev/binderfs/binder /dev/binder
+    symlink /dev/binderfs/hwbinder /dev/hwbinder
+    symlink /dev/binderfs/vndbinder /dev/vndbinder
+
+    chmod 0666 /dev/binderfs/hwbinder
+    chmod 0666 /dev/binderfs/binder
+    chmod 0666 /dev/binderfs/vndbinder
+
+    # create an empty ld.config.txt before running any non-static exectuables
+    # (apexd and linkerconfig) to suppress "ld.config.txt not found" warnings.
+    write /linkerconfig/ld.config.txt \#
+    chmod 644 /linkerconfig/ld.config.txt
+
+    # Start logd before any other services run to ensure we capture all of their logs.
+    start logd
+
+    # TODO: minidroid does not use microdroid_manager. If/when this is fixed
+    # align this change and others related to microdroid_manager back to the
+    # microdroid init.rc
+    start apexd-vm
+
+    # restorecon so microdroid_manager can create subdirectories
+    restorecon /mnt/extra-apk
+
+    # Wait for apexd to finish activating APEXes before starting more processes.
+    # Note that minidroid starts apexd in VM mode in which apexd doesn't wait for init after setting
+    # apexd.status to activated, but immediately transitions to ready. Therefore, it's not safe to
+    # wait for the activated status, by the time this line is reached it may be already be ready.
+    wait_for_prop apexd.status ready
+    perform_apex_config
+
+    # Notify to microdroid_manager that perform_apex_config is done.
+    # Microdroid_manager shouldn't execute payload before this, because app
+    # payloads are not designed to run with bootstrap bionic
+    setprop apex_config.done true
+
+    setprop ro.debuggable ${ro.boot.microdroid.debuggable:-0}
+    start servicemanager
+
+on property:dev.bootcomplete=1
+    # Stop ueventd to save memory
+    stop ueventd
+
+on init && property:ro.boot.microdroid.debuggable=1
+    # Mount tracefs (with GID=AID_READTRACEFS)
+    mount tracefs tracefs /sys/kernel/tracing gid=3012
+
+on init && property:ro.boot.adb.enabled=1
+    start adbd
+
+# Mount filesystems and start core system services.
+on late-init
+    trigger early-fs
+
+    # Mount fstab in init.{$device}.rc by mount_all command. Optional parameter
+    # '--early' can be specified to skip entries with 'latemount'.
+    # /system and /vendor must be mounted by the end of the fs stage,
+    # while /data is optional.
+    trigger fs
+    trigger post-fs
+
+    # Mount fstab in init.{$device}.rc by mount_all with '--late' parameter
+    # to only mount entries with 'latemount'. This is needed if '--early' is
+    # specified in the previous mount_all command on the fs stage.
+    # With /system mounted and properties form /system + /factory available,
+    # some services can be started.
+    trigger late-fs
+
+    trigger post-fs-data
+
+    # Load persist properties and override properties (if enabled) from /data.
+    trigger load_persist_props_action
+
+    trigger early-boot
+    trigger boot
+
+on post-fs
+    # Once everything is setup, no need to modify /.
+    # The bind+remount combination allows this to work in containers.
+    mount rootfs rootfs / remount bind ro nodev
+
+    # TODO(b/185767624): change the hard-coded size?
+    mount tmpfs tmpfs /data noatime nosuid nodev rw size=128M
+
+    # We chown/chmod /data again so because mount is run as root + defaults
+    chown system system /data
+    chmod 0771 /data
+
+    # We restorecon /data in case the userdata partition has been reset.
+    restorecon /data
+
+    # set up misc directory structure first so that we can end early boot
+    # and start apexd
+    mkdir /data/misc 01771 system misc
+    # work around b/183668221
+    restorecon /data/misc
+
+    mkdir /data/misc/authfs 0700 root root
+
+on late-fs && property:ro.debuggable=1
+    # Ensure that tracefs has the correct permissions.
+    # This does not work correctly if it is called in post-fs.
+    chmod 0755 /sys/kernel/tracing
+    chmod 0755 /sys/kernel/debug/tracing
+
+on post-fs-data
+    mark_post_data
+
+    mkdir /data/vendor 0771 root root
+    mkdir /data/vendor_ce 0771 root root
+    mkdir /data/vendor_de 0771 root root
+    mkdir /data/vendor/hardware 0771 root root
+
+    # Start tombstoned early to be able to store tombstones.
+    # microdroid doesn't have anr, but tombstoned requires it
+    mkdir /data/anr 0775 system system
+    mkdir /data/tombstones 0771 system system
+    mkdir /data/vendor/tombstones 0771 root root
+
+    start tombstoned
+
+    # For security reasons, /data/local/tmp should always be empty.
+    # Do not place files or directories in /data/local/tmp
+    mkdir /data/local 0751 root root
+    mkdir /data/local/tmp 0771 shell shell
+
+on boot
+    ifup lo
+    hostname localhost
+    domainname localdomain
+    ifup eth1
+
+    setprop sys.boot_completed 1
+    setprop dev.bootcomplete 1
+    write /dev/kmsg "VIRTUAL_DEVICE_BOOT_COMPLETED"
+
+service tombstone_transmit /system/bin/tombstone_transmit.microdroid -cid 2 -port 2000 -remove_tombstones_after_transmitting
+    user system
+    group system
+    shutdown critical
+
+service apexd-vm /system/bin/apexd --vm
+    user root
+    group system
+    oneshot
+    disabled
+    capabilities CHOWN DAC_OVERRIDE DAC_READ_SEARCH FOWNER SYS_ADMIN
+
+service ueventd /system/bin/ueventd
+    class core
+    critical
+    seclabel u:r:ueventd:s0
+    shutdown critical
+    user root
+    group root
+    capabilities CHOWN DAC_OVERRIDE DAC_READ_SEARCH FOWNER FSETID MKNOD NET_ADMIN SETGID SETUID SYS_MODULE SYS_RAWIO SYS_ADMIN
+
+service console /system/bin/sh
+    class core
+    console
+    disabled
+    user shell
+    group shell log readproc
+    seclabel u:r:shell:s0
+    setenv HOSTNAME console
+
diff --git a/shared/minidroid/sample/Android.bp b/shared/minidroid/sample/Android.bp
new file mode 100644
index 0000000..e19e5da
--- /dev/null
+++ b/shared/minidroid/sample/Android.bp
@@ -0,0 +1,56 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "device_google_cuttlefish_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["device_google_cuttlefish_license"],
+}
+
+cc_binary {
+    name: "server_minidroid",
+    srcs: ["server.cpp"],
+    shared_libs: [
+        "libbinder_ndk",
+        "minidroid_sd",
+        "liblog",
+    ],
+    static_libs: [
+        "com.android.minidroid.testservice-ndk",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
+
+cc_binary {
+    name: "client_minidroid",
+    srcs: ["client.cpp"],
+    shared_libs: [
+        "libbinder_ndk",
+        "minidroid_sd",
+        "liblog",
+    ],
+    static_libs: [
+        "com.android.minidroid.testservice-ndk",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
+
+// https://google.github.io/comprehensive-rust/android/aidl/client.html
+rust_binary {
+    name: "client_minidroid_rust",
+    srcs: ["client.rs"],
+    rustlibs: [
+        "com.android.minidroid.testservice-rust",
+        "liblog_rust",
+        "liblogger",
+        "librpcbinder_rs",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
diff --git a/shared/minidroid/sample/aidl/Android.bp b/shared/minidroid/sample/aidl/Android.bp
new file mode 100644
index 0000000..ed78c9e
--- /dev/null
+++ b/shared/minidroid/sample/aidl/Android.bp
@@ -0,0 +1,20 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+aidl_interface {
+    name: "com.android.minidroid.testservice",
+    srcs: ["com/android/minidroid/testservice/ITestService.aidl"],
+    unstable: true,
+    backend: {
+        ndk: {
+            enabled: true,
+        },
+        java: {
+            enabled: false,
+        },
+        rust: {
+            enabled: true,
+        }
+    },
+}
diff --git a/shared/minidroid/sample/aidl/com/android/minidroid/testservice/ITestService.aidl b/shared/minidroid/sample/aidl/com/android/minidroid/testservice/ITestService.aidl
new file mode 100644
index 0000000..59b8620
--- /dev/null
+++ b/shared/minidroid/sample/aidl/com/android/minidroid/testservice/ITestService.aidl
@@ -0,0 +1,15 @@
+package com.android.minidroid.testservice;
+
+interface ITestService {
+    const int SERVICE_PORT = 5678;
+
+    /* make server process print 'Hello World' to stdout. */
+    void sayHello();
+
+    /* make server process print @{text} to stdout. */
+    void printText(String text);
+
+    /* add two integers and return the result. */
+    int addInteger(int a, int b);
+
+}
diff --git a/shared/minidroid/sample/client.cpp b/shared/minidroid/sample/client.cpp
new file mode 100644
index 0000000..68f1500
--- /dev/null
+++ b/shared/minidroid/sample/client.cpp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aidl/com/android/minidroid/testservice/ITestService.h>
+#include <minidroid_sd.h>
+
+#include <stdio.h>
+#include <unistd.h>
+
+#define LOG_TAG "client_minidroid"
+#include <log/log.h>
+
+int main(int argc, char** argv) {
+  if (argc != 3) {
+    LOG_FATAL(
+        "Wrong usage of ITestService client. Please enter the CID and port of "
+        "the proxy process!");
+    return -1;
+  }
+
+  int service_host_cid = atoi(argv[1]);
+  int service_port = atoi(argv[2]);
+
+  ALOGI("Hello Minidroid client! Connecting to CID %d and port %d",
+        service_host_cid, service_port);
+
+  ndk::SpAIBinder binder = bi::sd::getService(service_host_cid, service_port);
+
+  if (nullptr == binder.get()) {
+    LOG_FATAL("Unable to find service!");
+    return -1;
+  }
+
+  auto test_service =
+      aidl::com::android::minidroid::testservice::ITestService::fromBinder(
+          binder);
+
+  test_service->sayHello();
+  test_service->printText("Hello from client!");
+  int32_t result = 0;
+  test_service->addInteger(4, 6, &result);
+
+  ALOGI("Finished client. 4 + 6 is %d", result);
+
+  return 0;
+}
diff --git a/shared/minidroid/sample/client.rs b/shared/minidroid/sample/client.rs
new file mode 100644
index 0000000..9f1526c
--- /dev/null
+++ b/shared/minidroid/sample/client.rs
@@ -0,0 +1,45 @@
+//! Test comms client, but in Rust
+
+use binder::{StatusCode, Strong};
+use com_android_minidroid_testservice::aidl::com::android::minidroid::testservice::ITestService::ITestService;
+use com_android_minidroid_testservice::binder;
+use log::{error, info};
+use rpcbinder::RpcSession;
+
+fn get_service(cid: u32, port: u32) -> Result<Strong<dyn ITestService>, StatusCode> {
+    RpcSession::new().setup_vsock_client(cid, port)
+}
+
+fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
+    let _ = logger::init(
+        logger::Config::default()
+            .with_tag_on_device("client_minidroid_rust")
+            .with_min_level(log::Level::Debug),
+    );
+    // Redirect panic messages to logcat.
+    std::panic::set_hook(Box::new(|panic_info| {
+        error!("{}", panic_info);
+    }));
+
+    if std::env::args().len() != 3 {
+        return Err(format!("usage: {} CID port", std::env::args().next().unwrap()).into());
+    }
+
+    let service_host_cid =
+        std::env::args().nth(1).and_then(|arg| arg.parse::<u32>().ok()).expect("invalid CID");
+    let service_port =
+        std::env::args().nth(2).and_then(|arg| arg.parse::<u32>().ok()).expect("invalid port");
+
+    info!(
+        "Hello Rust Minidroid client! Connecting to CID {} and port {}",
+        service_host_cid, service_port
+    );
+
+    let service = get_service(service_host_cid, service_port)?;
+    service.sayHello()?;
+    service.printText("Hello from Rust client! 🦀")?;
+    let result = service.addInteger(4, 6)?;
+    info!("Finished client. 4 + 6 = {}", result);
+
+    Ok(())
+}
diff --git a/shared/minidroid/sample/server.cpp b/shared/minidroid/sample/server.cpp
new file mode 100644
index 0000000..2656d9a
--- /dev/null
+++ b/shared/minidroid/sample/server.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aidl/com/android/minidroid/testservice/BnTestService.h>
+#include <minidroid_sd.h>
+
+#include <stdio.h>
+#include <string>
+
+#define LOG_TAG "server_minidroid"
+#include <log/log.h>
+
+namespace {
+
+void start_test_service() {
+  class TestService
+      : public aidl::com::android::minidroid::testservice::BnTestService {
+    ndk::ScopedAStatus sayHello() override {
+      ALOGI("Hello World!\n");
+      return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus printText(const std::string& text) override {
+      ALOGI("%s\n", text.c_str());
+      return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus addInteger(int32_t a, int32_t b, int32_t* out) override {
+      *out = a + b;
+      return ndk::ScopedAStatus::ok();
+    }
+  };
+  auto testService = ndk::SharedRefBase::make<TestService>();
+
+  bi::sd::setupRpcServer(testService->asBinder(), testService->SERVICE_PORT);
+}
+}  // namespace
+
+int main() {
+  ALOGI("Hello Minidroid server!\n");
+
+  start_test_service();
+
+  return 0;
+}
diff --git a/shared/minidroid/sample/servicediscovery/Android.bp b/shared/minidroid/sample/servicediscovery/Android.bp
new file mode 100644
index 0000000..9d02479
--- /dev/null
+++ b/shared/minidroid/sample/servicediscovery/Android.bp
@@ -0,0 +1,29 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "device_google_cuttlefish_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["device_google_cuttlefish_license"],
+}
+
+cc_library {
+    name: "minidroid_sd",
+    srcs: ["minidroid_sd.cpp"],
+    shared_libs: [
+        "libbinder_ndk",
+        "libbinder_rpc_unstable",
+        "libbase",
+    ],
+    static_libs: [
+        "libprotobuf-cpp-lite-ndk",
+    ],
+
+    export_include_dirs: [
+    	"include",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
diff --git a/shared/minidroid/sample/servicediscovery/include/minidroid_sd.h b/shared/minidroid/sample/servicediscovery/include/minidroid_sd.h
new file mode 100644
index 0000000..bb12257
--- /dev/null
+++ b/shared/minidroid/sample/servicediscovery/include/minidroid_sd.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_auto_utils.h>
+#include <string>
+
+namespace bi {
+namespace sd {
+
+void setupRpcServer(ndk::SpAIBinder service, int port);
+
+ndk::SpAIBinder getService(int cid, int port);
+
+}  // namespace sd
+}  // namespace bi
diff --git a/shared/minidroid/sample/servicediscovery/minidroid_sd.cpp b/shared/minidroid/sample/servicediscovery/minidroid_sd.cpp
new file mode 100644
index 0000000..5a9f7ec
--- /dev/null
+++ b/shared/minidroid/sample/servicediscovery/minidroid_sd.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/minidroid_sd.h"
+
+#include <sys/socket.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <linux/vm_sockets.h>
+#include <stdio.h>
+#include <binder_rpc_unstable.hpp>
+
+void bi::sd::setupRpcServer(ndk::SpAIBinder service, int port) {
+  ABinderProcess_startThreadPool();
+  ARpcServer* server = ARpcServer_newVsock(service.get(), VMADDR_CID_ANY, port);
+
+  AServiceManager_addService(service.get(), "TestService");
+  printf("Calling join on server!\n");
+  ARpcServer_join(server);
+}
+
+ndk::SpAIBinder bi::sd::getService(int cid, int port) {
+  return ndk::SpAIBinder(
+      ARpcSession_setupVsockClient(ARpcSession_new(), cid, port));
+}
diff --git a/shared/modules.blocklist b/shared/modules.blocklist
new file mode 100644
index 0000000..8dc5e71
--- /dev/null
+++ b/shared/modules.blocklist
@@ -0,0 +1 @@
+blocklist vkms.ko
diff --git a/shared/overlays/SettingsProvider/Android.bp b/shared/overlays/SettingsProvider/Android.bp
index 6072a34..6ee5aac 100644
--- a/shared/overlays/SettingsProvider/Android.bp
+++ b/shared/overlays/SettingsProvider/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -5,4 +20,5 @@
 runtime_resource_overlay {
     name: "cuttlefish_overlay_settings_provider",
     soc_specific: true,
+    sdk_version: "current",
 }
diff --git a/shared/overlays/connectivity/Android.bp b/shared/overlays/connectivity/Android.bp
index 58b2a02..57d117d 100644
--- a/shared/overlays/connectivity/Android.bp
+++ b/shared/overlays/connectivity/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/shared/overlays/core/Android.bp b/shared/overlays/core/Android.bp
index e297776..d194959 100644
--- a/shared/overlays/core/Android.bp
+++ b/shared/overlays/core/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -5,4 +20,5 @@
 runtime_resource_overlay {
     name: "cuttlefish_overlay_frameworks_base_core",
     soc_specific: true,
+    sdk_version: "current",
 }
diff --git a/shared/overlays/core/res/values/config.xml b/shared/overlays/core/res/values/config.xml
new file mode 100644
index 0000000..bfcec6c
--- /dev/null
+++ b/shared/overlays/core/res/values/config.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+/*
+** Copyright 2023, The Android Open Source Project.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+
+<resources xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
+    <!-- Show the "Adaptive Brightness" toggle. -->
+    <bool name="config_automatic_brightness_available">true</bool>
+</resources>
\ No newline at end of file
diff --git a/shared/pc/OWNERS b/shared/pc/OWNERS
index 47eb80e..62a7330 100644
--- a/shared/pc/OWNERS
+++ b/shared/pc/OWNERS
@@ -1,3 +1,5 @@
-# pc cuttlefish leads
-armenk@google.com
+# ARC++ next owners
+hirono@google.com
+liyingtan@google.com
+mdehaini@google.com
 xutan@google.com
\ No newline at end of file
diff --git a/shared/pc/device_vendor.mk b/shared/pc/device_vendor.mk
index ad597c5..c7fab44 100644
--- a/shared/pc/device_vendor.mk
+++ b/shared/pc/device_vendor.mk
@@ -23,6 +23,10 @@
     frameworks/native/data/etc/pc_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/pc_core_hardware.xml
 
 $(call inherit-product, frameworks/native/build/tablet-7in-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
-DEVICE_PACKAGE_OVERLAYS += device/google/cuttlefish/shared/pc/overlay
\ No newline at end of file
+DEVICE_PACKAGE_OVERLAYS += device/google/cuttlefish/shared/pc/overlay
diff --git a/shared/permissions/Android.bp b/shared/permissions/Android.bp
index 1b1e7bb..5cfafe0 100644
--- a/shared/permissions/Android.bp
+++ b/shared/permissions/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/shared/permissions/cuttlefish_excluded_hardware.xml b/shared/permissions/cuttlefish_excluded_hardware.xml
index c3d03d5..3fba9f6 100644
--- a/shared/permissions/cuttlefish_excluded_hardware.xml
+++ b/shared/permissions/cuttlefish_excluded_hardware.xml
@@ -15,5 +15,4 @@
 -->
 <permissions>
     <unavailable-feature name="android.software.print" />
-    <unavailable-feature name="android.software.voice_recognizers" />
 </permissions>
diff --git a/shared/phone/device_vendor.mk b/shared/phone/device_vendor.mk
index f373136..f992362 100644
--- a/shared/phone/device_vendor.mk
+++ b/shared/phone/device_vendor.mk
@@ -18,7 +18,6 @@
 SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
 
 ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
 PRODUCT_COPY_FILES += \
@@ -26,37 +25,31 @@
 endif
 
 $(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
+PRODUCT_EXTRA_VNDK_VERSIONS := 29 30 31
+
 TARGET_PRODUCT_PROP := $(LOCAL_PATH)/product.prop
 
-PRODUCT_VENDOR_PROPERTIES += \
-    keyguard.no_require_sim=true \
-    ro.cdma.home.operator.alpha=Android \
-    ro.cdma.home.operator.numeric=302780 \
-    ro.com.android.dataroaming=true \
-    ro.telephony.default_network=9 \
-
-TARGET_USES_CF_RILD ?= true
-ifeq ($(TARGET_USES_CF_RILD),true)
-ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_PACKAGES += com.google.cf.rild
-else
-PRODUCT_PACKAGES += \
-    libcuttlefish-ril-2 \
-    libcuttlefish-rild
-endif
-endif
-
 ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
 PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.hardware.biometrics.face.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.biometrics.face.xml \
     frameworks/native/data/etc/android.hardware.faketouch.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.faketouch.xml \
     frameworks/native/data/etc/android.hardware.fingerprint.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.fingerprint.xml \
-    frameworks/native/data/etc/android.hardware.telephony.gsm.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.gsm.xml \
-    frameworks/native/data/etc/android.hardware.telephony.ims.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.ims.xml
+
+    ifneq ($(TARGET_DISABLE_BIOMETRICS_FACE),true)
+        PRODUCT_COPY_FILES += \
+        frameworks/native/data/etc/android.hardware.biometrics.face.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.biometrics.face.xml \
+
+    endif
 endif
 
+
+
 # Runtime Resource Overlays
 ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
 PRODUCT_PACKAGES += com.google.aosp_cf_phone.rros
diff --git a/shared/phone/overlays/CuttlefishTetheringOverlay/Android.bp b/shared/phone/overlays/CuttlefishTetheringOverlay/Android.bp
index a35bfd2..3dcd123 100644
--- a/shared/phone/overlays/CuttlefishTetheringOverlay/Android.bp
+++ b/shared/phone/overlays/CuttlefishTetheringOverlay/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -8,3 +23,10 @@
     vendor: true,
     sdk_version: "current",
 }
+
+override_runtime_resource_overlay {
+    name: "CuttlefishTetheringOverlayGoogle",
+    base: "CuttlefishTetheringOverlay",
+    package_name: "com.google.android.networkstack.tethering.cuttlefishoverlay",
+    target_package_name: "com.google.android.networkstack.tethering",
+}
diff --git a/shared/phone/overlays/CuttlefishWifiOverlay/Android.bp b/shared/phone/overlays/CuttlefishWifiOverlay/Android.bp
index fd2f7eb..5330899 100644
--- a/shared/phone/overlays/CuttlefishWifiOverlay/Android.bp
+++ b/shared/phone/overlays/CuttlefishWifiOverlay/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -8,3 +23,10 @@
     vendor: true,
     sdk_version: "current",
 }
+
+override_runtime_resource_overlay {
+    name: "CuttlefishWifiOverlayGoogle",
+    base: "CuttlefishWifiOverlay",
+    package_name: "com.google.android.wifi.resources.cf",
+    target_package_name: "com.google.android.wifi.resources",
+}
diff --git a/shared/phone/overlays/CuttlefishWifiOverlay/res/values/config.xml b/shared/phone/overlays/CuttlefishWifiOverlay/res/values/config.xml
index 5586326..c9a694e 100644
--- a/shared/phone/overlays/CuttlefishWifiOverlay/res/values/config.xml
+++ b/shared/phone/overlays/CuttlefishWifiOverlay/res/values/config.xml
@@ -18,13 +18,17 @@
 -->
 <resources xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
     <!-- True if the firmware supports connected MAC randomization -->
-    <!-- TODO(b/223101490) Disable temporarily for Wi-Fi connection issue -->
-    <bool name="config_wifi_connected_mac_randomization_supported">false</bool>
+    <bool name="config_wifi_connected_mac_randomization_supported">true</bool>
+    <bool name="config_wifiAllowNonPersistentMacRandomizationOnOpenSsids">true</bool>
 
     <!-- True if the firmware supports p2p MAC randomization -->
-    <bool name="config_wifi_p2p_mac_randomization_supported">true</bool>
+    <!-- TODO(b/237945863) Re-enable this flag when mac randomization for
+                           P2P is supported at Cloud Android -->
+    <bool name="config_wifi_p2p_mac_randomization_supported">false</bool>
 
     <!-- True if the firmware supports ap MAC randomization -->
     <bool name="config_wifi_ap_mac_randomization_supported">true</bool>
 
+    <bool name ="config_wifi5ghzSupport">true</bool>
+
 </resources>
diff --git a/shared/phone/overlays/core/Android.bp b/shared/phone/overlays/core/Android.bp
index 23fddab..bed3839 100644
--- a/shared/phone/overlays/core/Android.bp
+++ b/shared/phone/overlays/core/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -5,4 +20,5 @@
 runtime_resource_overlay {
     name: "cuttlefish_phone_overlay_frameworks_base_core",
     soc_specific: true,
+    sdk_version: "current",
 }
diff --git a/shared/phone/overlays/core/res/values/config.xml b/shared/phone/overlays/core/res/values/config.xml
index 1d7b019..b7edba2 100644
--- a/shared/phone/overlays/core/res/values/config.xml
+++ b/shared/phone/overlays/core/res/values/config.xml
@@ -55,18 +55,17 @@
   <string name="config_mms_user_agent_profile_url" translatable="false">http://gsm.lge.com/html/gsm/Nexus5-M3.xml</string>
   <string name="config_wlan_data_service_package" translatable="false">com.android.ims</string>
   <string name="config_wlan_network_service_package" translatable="false">com.android.ims</string>
-
-  <!-- List of biometric sensors on the device, in decreasing strength. Consumed by AuthService
-  when registering authenticators with BiometricService. Format must be ID:Modality:Strength,
-  where: IDs are unique per device, Modality as defined in BiometricAuthenticator.java,
-  and Strength as defined in Authenticators.java -->
-  <string-array name="config_biometric_sensors" translatable="false" >
-    <item>2:2:255</item> <!-- ID2:Fingerprint(HIDL):Weak -->
-    <item>3:8:255</item> <!-- ID3:Face(HIDL):Weak -->
-  </string-array>
+  <string name="config_qualified_networks_service_package">com.android.qns</string>
+  <string name="config_gba_package">com.android.gbaservice</string>
 
   <!-- Enable Night display, which requires HWC 2.0. -->
   <bool name="config_nightDisplayAvailable">true</bool>
   <!-- Let ColorFade use a color layer to avoid deadlocking in WM CTS. See b/233386717. -->
   <bool name="config_animateScreenLights">true</bool>
+
+  <!-- Is the device capable of hot swapping an UICC Card -->
+  <bool name="config_hotswapCapable">true</bool>
+
+  <!-- String indicating the package name of the vendor satellite service implementation -->
+  <string name="config_satellite_service_package" translatable="false">com.google.android.telephony.satellite</string>
 </resources>
diff --git a/shared/phone/product.prop b/shared/phone/product.prop
index 2bce15e..616def3 100644
--- a/shared/phone/product.prop
+++ b/shared/phone/product.prop
@@ -12,12 +12,7 @@
 bluetooth.profile.asha.central.enabled=true
 bluetooth.profile.a2dp.source.enabled=true
 bluetooth.profile.avrcp.target.enabled=true
-bluetooth.profile.bap.broadcast.assist.enabled=true
-bluetooth.profile.bap.unicast.client.enabled=true
-bluetooth.profile.bas.client.enabled=true
-bluetooth.profile.csip.set_coordinator.enabled=true
 bluetooth.profile.gatt.enabled=true
-bluetooth.profile.hap.client.enabled=true
 bluetooth.profile.hfp.ag.enabled=true
 bluetooth.profile.hid.device.enabled=true
 bluetooth.profile.hid.host.enabled=true
@@ -27,5 +22,12 @@
 bluetooth.profile.pan.nap.enabled=true
 bluetooth.profile.pan.panu.enabled=true
 bluetooth.profile.pbap.server.enabled=true
-bluetooth.profile.ccp.server.enabled=true
-bluetooth.profile.vcp.controller.enabled=true
+
+# Disable LeAudio related profile as there is no support for it
+bluetooth.profile.bap.broadcast.assist.enabled=false
+bluetooth.profile.bap.unicast.client.enabled=false
+bluetooth.profile.bas.client.enabled=false
+bluetooth.profile.ccp.server.enabled=false
+bluetooth.profile.csip.set_coordinator.enabled=false
+bluetooth.profile.hap.client.enabled=false
+bluetooth.profile.vcp.controller.enabled=false
diff --git a/shared/sepolicy/vendor/adbd.te b/shared/sepolicy/vendor/adbd.te
index c933fe7..d932066 100644
--- a/shared/sepolicy/vendor/adbd.te
+++ b/shared/sepolicy/vendor/adbd.te
@@ -1,4 +1,2 @@
 allow adbd self:{ socket vsock_socket } {create listen accept rw_socket_perms_no_ioctl};
 allow adbd kernel:system module_request;
-
-gpu_access(adbd)
diff --git a/shared/sepolicy/vendor/atrace.te b/shared/sepolicy/vendor/atrace.te
new file mode 100644
index 0000000..d4626f3
--- /dev/null
+++ b/shared/sepolicy/vendor/atrace.te
@@ -0,0 +1 @@
+dontaudit atrace debugfs_tracing_debug:file rw_file_perms;
diff --git a/shared/sepolicy/vendor/bootanim.te b/shared/sepolicy/vendor/bootanim.te
deleted file mode 100644
index b183efa..0000000
--- a/shared/sepolicy/vendor/bootanim.te
+++ /dev/null
@@ -1,2 +0,0 @@
-allow bootanim self:process execmem;
-gpu_access(bootanim)
diff --git a/shared/sepolicy/vendor/bt_device.te b/shared/sepolicy/vendor/bt_device.te
deleted file mode 100644
index 5c88bfb..0000000
--- a/shared/sepolicy/vendor/bt_device.te
+++ /dev/null
@@ -1 +0,0 @@
-type bt_device, dev_type;
\ No newline at end of file
diff --git a/shared/sepolicy/vendor/bug_map b/shared/sepolicy/vendor/bug_map
index fe3d21d..9f93bb7 100644
--- a/shared/sepolicy/vendor/bug_map
+++ b/shared/sepolicy/vendor/bug_map
@@ -1,8 +1,10 @@
-init init capability b/199386018
+atrace debugfs_tracing_debug file b/230656878
 init logcat_exec file b/216584034
+init sysfs_devices_block file b/225071527
 init system_lib_file dir b/133444385
 init system_lib_file file b/133444385
 kernel kernel capability b/179966921
 migrate_legacy_obb_data dalvikcache_data_file file b/152338071
-gmscore_app hal_camera_prop file b/156287758
 priv_app radio_vendor_data_file dir b/188833462
+dlkm_loader dlkm_loader capability b/149866755
+dlkm_loader kernel process b/149866755
diff --git a/shared/sepolicy/vendor/cameraserver.te b/shared/sepolicy/vendor/cameraserver.te
deleted file mode 100644
index 7bc3fbf..0000000
--- a/shared/sepolicy/vendor/cameraserver.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(cameraserver)
diff --git a/shared/sepolicy/vendor/dlkm_loader.te b/shared/sepolicy/vendor/dlkm_loader.te
index c47e229..afbc48f 100644
--- a/shared/sepolicy/vendor/dlkm_loader.te
+++ b/shared/sepolicy/vendor/dlkm_loader.te
@@ -5,13 +5,26 @@
 
 # Allow insmod on vendor, system and system_dlkm partitions
 allow dlkm_loader self:capability sys_module;
-allow dlkm_loader system_file:system module_load;
+allow dlkm_loader system_dlkm_file:dir r_dir_perms;
+allow dlkm_loader system_dlkm_file:file r_file_perms;
 allow dlkm_loader system_dlkm_file:system module_load;
+allow dlkm_loader system_file:system module_load;
 allow dlkm_loader vendor_file:system module_load;
 
 # needed for libmodprobe to read kernel commandline
 allow dlkm_loader proc_cmdline:file r_file_perms;
 
+# Needed because CONFIG_USB_DUMMY_HCD adds some additional logic to
+# finit_module() syscall, causing that syscall to create/update keyrings.
+# Once we remove CONFIG_USB_DUMMY_HCD config, self:key write permission can be
+# removed.
+allow dlkm_loader self:key write;
+
+# Allow writing to kernel log
+allow dlkm_loader kmsg_device:chr_file rw_file_perms;
+
 # dlkm_loader searches tracefs while looking for modules
 dontaudit dlkm_loader debugfs_bootreceiver_tracing:dir search;
 dontaudit dlkm_loader debugfs_mm_events_tracing:dir search;
+
+set_prop(dlkm_loader, vendor_device_prop)
\ No newline at end of file
diff --git a/shared/sepolicy/vendor/e2fs.te b/shared/sepolicy/vendor/e2fs.te
new file mode 100644
index 0000000..54af273
--- /dev/null
+++ b/shared/sepolicy/vendor/e2fs.te
@@ -0,0 +1,3 @@
+# make_f2fs wants to read /sys/devices/*/block/*
+allow e2fs sysfs_devices_block:dir search;
+allow e2fs sysfs_devices_block:file r_file_perms;
diff --git a/shared/sepolicy/vendor/ephemeral_app.te b/shared/sepolicy/vendor/ephemeral_app.te
deleted file mode 100644
index 92fcef6..0000000
--- a/shared/sepolicy/vendor/ephemeral_app.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(ephemeral_app)
diff --git a/shared/sepolicy/vendor/file_contexts b/shared/sepolicy/vendor/file_contexts
index 01dcc97..0d94687 100644
--- a/shared/sepolicy/vendor/file_contexts
+++ b/shared/sepolicy/vendor/file_contexts
@@ -8,6 +8,7 @@
 /dev/block/by-name/vendor_boot_[ab] u:object_r:boot_block_device:s0
 /dev/block/by-name/vbmeta_[ab] u:object_r:ab_block_device:s0
 /dev/block/by-name/vbmeta_system_[ab] u:object_r:ab_block_device:s0
+/dev/block/by-name/vbmeta_vendor_dlkm_[ab] u:object_r:ab_block_device:s0
 /dev/block/by-name/super u:object_r:super_block_device:s0
 /dev/block/by-name/userdata u:object_r:userdata_block_device:s0
 /dev/block/by-name/metadata u:object_r:metadata_block_device:s0
@@ -15,11 +16,7 @@
 /dev/block/by-name/frp  u:object_r:frp_block_device:s0
 
 /dev/block/pmem0  u:object_r:rebootescrow_device:s0
-/dev/block/pmem1  u:object_r:hal_graphics_composer_pmem_device:s0
 /dev/block/zram0  u:object_r:swap_block_device:s0
-/dev/dri u:object_r:gpu_device:s0
-/dev/dri/card0  u:object_r:graphics_device:s0
-/dev/dri/renderD128  u:object_r:gpu_device:s0
 /dev/hvc0  u:object_r:serial_device:s0
 /dev/hvc1  u:object_r:serial_device:s0
 /dev/hvc2  u:object_r:serial_device:s0
@@ -35,6 +32,12 @@
 /dev/hvc6  u:object_r:gnss_device:s0
 /dev/hvc7  u:object_r:gnss_device:s0
 
+# hvc8 for confirmation UI
+/dev/hvc8  u:object_r:confirmationui_device:s0
+
+# hvc9 for uwb
+/dev/hvc9  u:object_r:uwb_device:s0
+
 # ARM serial console device
 /dev/ttyAMA[0-9]*  u:object_r:serial_device:s0
 
@@ -62,12 +65,9 @@
 /vendor/bin/vsoc_input_service  u:object_r:vsoc_input_service_exec:s0
 /vendor/bin/rename_netiface  u:object_r:rename_netiface_exec:s0
 /vendor/bin/suspend_blocker  u:object_r:suspend_blocker_exec:s0
-/vendor/bin/hw/libcuttlefish-rild  u:object_r:libcuttlefish_rild_exec:s0
-/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-external-vsock-service u:object_r:hal_camera_default_exec:s0
-/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google u:object_r:hal_camera_default_exec:s0
-/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google-lazy u:object_r:hal_camera_default_exec:s0
+/vendor/bin/metrics_helper   u:object_r:metrics_helper_exec:s0
 /vendor/bin/hw/android\.hardware\.power\.stats@1\.0-service\.mock  u:object_r:hal_power_stats_default_exec:s0
-/vendor/bin/hw/android\.hardware.audio.service u:object_r:hal_audio_cuttlefish_exec:s0
+/vendor/bin/hw/android\.hardware\.audio\.service u:object_r:hal_audio_cuttlefish_exec:s0
 /vendor/bin/hw/android\.hardware\.bluetooth@1\.1-service\.remote  u:object_r:hal_bluetooth_remote_exec:s0
 /vendor/bin/hw/android\.hardware\.bluetooth@1\.1-service\.sim  u:object_r:hal_bluetooth_sim_exec:s0
 /vendor/bin/hw/android\.hardware\.contexthub@1\.2-service\.mock  u:object_r:hal_contexthub_default_exec:s0
@@ -76,14 +76,10 @@
 /vendor/bin/hw/android\.hardware\.drm@[0-9]+\.[0-9]+-service\.widevine  u:object_r:hal_drm_widevine_exec:s0
 /vendor/bin/hw/android\.hardware\.drm-service\.widevine  u:object_r:hal_drm_widevine_exec:s0
 /vendor/bin/hw/android\.hardware\.drm@[0-9]+\.[0-9]+-service-lazy\.widevine  u:object_r:hal_drm_widevine_exec:s0
-/vendor/bin/hw/android\.hardware\.graphics\.allocator-V1-service\.minigbm   u:object_r:hal_graphics_allocator_default_exec:s0
-/vendor/bin/hw/android\.hardware\.graphics\.allocator@4\.0-service\.minigbm   u:object_r:hal_graphics_allocator_default_exec:s0
-/vendor/bin/hw/android\.hardware\.graphics\.composer3-service\.ranchu  u:object_r:hal_graphics_composer_default_exec:s0
 /vendor/bin/hw/android\.hardware\.gatekeeper@1\.0-service\.software  u:object_r:hal_gatekeeper_default_exec:s0
 /vendor/bin/hw/android\.hardware\.health-service\.cuttlefish u:object_r:hal_health_default_exec:s0
 /vendor/bin/hw/android\.hardware\.health\.storage-service\.cuttlefish u:object_r:hal_health_storage_default_exec:s0
 /vendor/bin/hw/android\.hardware\.lights-service\.example u:object_r:hal_light_default_exec:s0
-/vendor/bin/hw/android\.hardware\.neuralnetworks@1\.3-service-sample-.*   u:object_r:hal_neuralnetworks_sample_exec:s0
 /vendor/bin/hw/android\.hardware\.neuralnetworks-shim-service-sample   u:object_r:hal_neuralnetworks_sample_exec:s0
 /vendor/bin/hw/android\.hardware\.neuralnetworks-service-sample-.*   u:object_r:hal_neuralnetworks_sample_exec:s0
 /vendor/bin/hw/android\.hardware\.nfc-service\.cuttlefish  u:object_r:hal_nfc_default_exec:s0
@@ -98,42 +94,18 @@
 /vendor/bin/hw/android\.hardware\.thermal@2\.0-service\.mock  u:object_r:hal_thermal_default_exec:s0
 /vendor/bin/hw/android\.hardware\.identity-service\.remote  u:object_r:hal_identity_remote_exec:s0
 /vendor/bin/hw/android\.hardware\.security\.keymint-service\.remote  u:object_r:hal_keymint_remote_exec:s0
+/vendor/bin/hw/android\.hardware\.security\.keymint-service\.rust  u:object_r:hal_keymint_remote_exec:s0
 /vendor/bin/hw/android\.hardware\.keymaster@4\.1-service.remote  u:object_r:hal_keymaster_remote_exec:s0
-/vendor/bin/hw/android\.hardware\.gatekeeper@1\.0-service.remote  u:object_r:hal_gatekeeper_remote_exec:s0
-/vendor/bin/hw/android\.hardware\.confirmationui@1\.0-service.cuttlefish  u:object_r:hal_confirmationui_cuttlefish_exec:s0
+/vendor/bin/hw/android\.hardware\.gatekeeper-service.remote  u:object_r:hal_gatekeeper_remote_exec:s0
+/vendor/bin/hw/android\.hardware\.confirmationui-service.cuttlefish  u:object_r:hal_confirmationui_cuttlefish_exec:s0
 /vendor/bin/hw/android\.hardware\.oemlock-service.example u:object_r:hal_oemlock_default_exec:s0
 /vendor/bin/hw/android\.hardware\.weaver-service.example u:object_r:hal_weaver_default_exec:s0
 /vendor/bin/hw/android\.hardware\.authsecret@1\.0-service  u:object_r:hal_authsecret_default_exec:s0
 /vendor/bin/hw/android\.hardware\.authsecret-service.example u:object_r:hal_authsecret_default_exec:s0
 /vendor/bin/hw/android\.hardware\.rebootescrow-service\.default  u:object_r:hal_rebootescrow_default_exec:s0
 /vendor/bin/dlkm_loader  u:object_r:dlkm_loader_exec:s0
-/vendor/bin/init\.wifi\.sh    u:object_r:init_wifi_sh_exec:s0
+/vendor/bin/init\.wifi    u:object_r:init_wifi_sh_exec:s0
 
-/vendor/lib(64)?/libdrm.so  u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libglapi.so  u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/dri/.* u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/hw/android\.hardware\.graphics\.mapper@4\.0-impl\.minigbm\.so u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libminigbm_gralloc.so  u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libminigbm_gralloc4_utils.so  u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/hw/android\.hardware\.health@2\.0-impl-2\.1-cuttlefish\.so  u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/hw/vulkan.pastel.so  u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/libcuttlefish_fs.so  u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/vsoc_lib.so  u:object_r:same_process_hal_file:s0
-
-# gfxstream (to be better factored (fewer libraries?))
-/vendor/lib(64)?/hw/vulkan\.ranchu\.so   u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libEGL_emulation\.so          u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv1_CM_emulation\.so    u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv2_emulation\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libOpenglCodecCommon\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libOpenglSystemCommon\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/lib_renderControl_enc\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv1_enc\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv2_enc\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libvulkan_enc\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libandroidemu\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libEGL_angle\.so          u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv1_CM_angle\.so    u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGLESv2_angle\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libfeature_support_angle\.so       u:object_r:same_process_hal_file:s0
-/vendor/lib(64)?/libGoldfishProfiler\.so       u:object_r:same_process_hal_file:s0
diff --git a/shared/sepolicy/vendor/fsck.te b/shared/sepolicy/vendor/fsck.te
new file mode 100644
index 0000000..e9c3b0c
--- /dev/null
+++ b/shared/sepolicy/vendor/fsck.te
@@ -0,0 +1,2 @@
+allow fsck sysfs_devices_block:dir r_dir_perms;
+allow fsck sysfs_devices_block:file r_file_perms;
diff --git a/shared/sepolicy/vendor/genfs_contexts b/shared/sepolicy/vendor/genfs_contexts
index 542db04..758fb54 100644
--- a/shared/sepolicy/vendor/genfs_contexts
+++ b/shared/sepolicy/vendor/genfs_contexts
@@ -7,7 +7,7 @@
 genfscon sysfs $1/0000:00:eval($2 + 1, 16, 2).0/virtio`'eval($3 + 1)`'/block u:object_r:sysfs_devices_block:s0 # vdb
 genfscon sysfs $1/0000:00:eval($2 + 2, 16, 2).0/virtio`'eval($3 + 2)`'/block u:object_r:sysfs_devices_block:s0 # vdc
 genfscon sysfs $1/0000:00:eval($2 + 3, 16, 2).0/virtio`'eval($3 + 3)`'/ndbus0 u:object_r:sysfs_devices_block:s0 # pmem0
-genfscon sysfs $1/0000:00:eval($2 + 4, 16, 2).0/virtio`'eval($3 + 3)`'/ndbus0 u:object_r:sysfs_devices_block:s0 # pmem1
+genfscon sysfs $1/0000:00:eval($2 + 4, 16, 2).0/virtio`'eval($3 + 4)`'/ndbus0 u:object_r:sysfs_devices_block:s0 # pmem1
 dnl')dnl
 dnl
 dnl # $1 = pci prefix
@@ -30,14 +30,48 @@
 dnl')dnl
 dnl
 # crosvm (x86)
-cf_pci_block_device(/devices/pci0000:00, 0xb, 10)
+cf_pci_block_device(/devices/pci0000:00, 0xc, 11)
 cf_pci_gpu_device(/devices/pci0000:00, 0x2)
 ## find /sys/devices/platform/* -type d -name 'rtc[0-9]' | sed 's,/rtc[0-9],,'
 genfscon sysfs /devices/platform/rtc_cmos/rtc u:object_r:sysfs_rtc:s0
-## find /sys/devices/platform/* -type d -name 'wakeup[0-9]'
-genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/wakeup u:object_r:sysfs_wakeup:s0
+## find /sys/devices/LNXSYSTM:00/* -type d -name 'wakeup[0-9]*'
 genfscon sysfs /devices/LNXSYSTM:00/LNXPWRBN:00/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSLPBN:00/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:00/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:01/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:02/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:03/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:04/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:05/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:06/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:07/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:08/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:09/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0a/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0b/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0c/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0d/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0e/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:0f/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:10/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:11/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:12/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:13/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:14/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:15/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:16/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:17/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:18/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:19/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:1a/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:1b/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:1c/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:1d/wakeup u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:00/device:1e/wakeup u:object_r:sysfs_wakeup:s0
+## find /sys/devices/platform/* -type d -name 'wakeup[0-9]*'
 genfscon sysfs /devices/platform/rtc_cmos/rtc/rtc0/wakeup3 u:object_r:sysfs_wakeup:s0
+genfscon sysfs /devices/platform/rtc_cmos/rtc/rtc0/wakeup4 u:object_r:sysfs_wakeup:s0
 cf_rtc_wakeup_alarmtimer(/devices/platform/rtc_cmos, 0, 1)
 ## currently disabled
 #genfscon sysfs /devices/LNXSYSTM:00/GFSH0001:00/wakeup u:object_r:sysfs_wakeup:s0
@@ -48,7 +82,7 @@
 genfscon sysfs /devices/virtual/mac80211_hwsim/hwsim1/net u:object_r:sysfs_net:s0
 
 # crosvm (arm64)
-cf_pci_block_device(/devices/platform/10000.pci/pci0000:00, 0xb, 10)
+cf_pci_block_device(/devices/platform/10000.pci/pci0000:00, 0xd, 12)
 cf_pci_gpu_device(/devices/platform/10000.pci/pci0000:00, 0x2)
 ## find /sys/devices/platform/* -type d -name 'rtc[0-9]' | sed 's,/rtc[0-9],,'
 genfscon sysfs /devices/platform/2000.rtc/rtc u:object_r:sysfs_rtc:s0
@@ -56,7 +90,7 @@
 ## arm64 2000.rtc on crosvm does not currently expose a wakeup node
 
 # qemu (x86)
-cf_pci_block_device(/devices/pci0000:00, 0xb, 9)
+cf_pci_block_device(/devices/pci0000:00, 0xd, 11)
 #cf_pci_gpu_device(/devices/pci0000:00, 0x2) - duplicated with crosvm(x86)
 ## find /sys/devices/platform/* -type d -name 'rtc[0-9]' | sed 's,/rtc[0-9],,'
 genfscon sysfs /devices/pnp0/00:04/rtc u:object_r:sysfs_rtc:s0
@@ -64,19 +98,23 @@
 cf_rtc_wakeup_alarmtimer(/devices/pnp0/00:04, 0, 19)
 
 # qemu (arm64)
-cf_pci_block_device(/devices/platform/4010000000.pcie/pci0000:00, 0xa, 9)
-cf_pci_gpu_device(/devices/platform/4010000000.pcie/pci0000:00, 0x2)
+cf_pci_block_device(/devices/platform/4010000000.pcie/pci0000:00, 0xc, 11)
+cf_pci_gpu_device(/devices/platform/4010000000.pcie/pci0000:00, 0x1)
 ## find /sys/devices/platform/* -type d -name 'rtc[0-9]' | sed 's,/rtc[0-9],,'
 genfscon sysfs /devices/platform/9010000.pl031/rtc u:object_r:sysfs_rtc:s0
 ## find /sys/devices/platform/* -type d -name 'wakeup[0-9]'
 cf_rtc_wakeup_alarmtimer(/devices/platform/9010000.pl031, 0, 0)
 
 # qemu (arm)
-cf_pci_block_device(/devices/platform/3f000000.pcie/pci0000:00, 0xa, 9)
-cf_pci_gpu_device(/devices/platform/3f000000.pcie/pci0000:00, 0x2)
+cf_pci_block_device(/devices/platform/3f000000.pcie/pci0000:00, 0xc, 11)
+cf_pci_gpu_device(/devices/platform/3f000000.pcie/pci0000:00, 0x1)
 genfscon sysfs /devices/platform/rtc-test.1/wakeup/wakeup2 u:object_r:sysfs_wakeup:s0
 genfscon sysfs /devices/platform/rtc-test.2/wakeup/wakeup3 u:object_r:sysfs_wakeup:s0
 
+# qemu (riscv64)
+cf_pci_block_device(/devices/platform/soc/30000000.pci/pci0000:00, 0xd, 12)
+cf_pci_gpu_device(/devices/platform/soc/30000000.pci/pci0000:00, 0x1)
+
 # common on all platforms / vm managers
 genfscon sysfs /devices/platform/rtc-test.0/rtc u:object_r:sysfs_rtc:s0
 genfscon sysfs /devices/platform/rtc-test.1/rtc u:object_r:sysfs_rtc:s0
diff --git a/shared/sepolicy/vendor/google/mediaprovider.te b/shared/sepolicy/vendor/google/mediaprovider.te
deleted file mode 100644
index 515ef26..0000000
--- a/shared/sepolicy/vendor/google/mediaprovider.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(mediaprovider)
diff --git a/shared/sepolicy/vendor/hal_audio_cuttlefish.te b/shared/sepolicy/vendor/hal_audio_cuttlefish.te
index 0bdd256..9db6b63 100644
--- a/shared/sepolicy/vendor/hal_audio_cuttlefish.te
+++ b/shared/sepolicy/vendor/hal_audio_cuttlefish.te
@@ -5,5 +5,7 @@
 
 init_daemon_domain(hal_audio_cuttlefish)
 
+hal_client_domain(hal_audio_cuttlefish, hal_allocator)
+
 binder_use(hal_audio_cuttlefish)
 allow hal_audio_cuttlefish audioserver:fifo_file write;
diff --git a/shared/sepolicy/vendor/hal_camera_default.te b/shared/sepolicy/vendor/hal_camera_default.te
deleted file mode 100644
index e4a1156..0000000
--- a/shared/sepolicy/vendor/hal_camera_default.te
+++ /dev/null
@@ -1,20 +0,0 @@
-vndbinder_use(hal_camera_default)
-
-hal_client_domain(hal_camera_default, hal_graphics_allocator)
-
-# For camera hal to talk with sensor service
-binder_call(hal_camera_default, sensor_service_server)
-binder_call(sensor_service_server, hal_camera_default)
-
-# Allow the Camera HAL to communicate with the thermal HAL.
-hal_client_domain(hal_camera_default, hal_thermal)
-
-gpu_access(hal_camera_default)
-
-# Vsocket camera
-allow hal_camera_default self:vsock_socket { accept bind create getopt listen read write };
-
-# The camera HAL can respond to APEX updates (see ApexUpdateListener), but this
-# is not used by the emulated camera HAL APEX. Ignore these denials.
-dontaudit hal_camera_default property_socket:sock_file { write };
-dontaudit hal_camera_default apex_info_file:file { read };
diff --git a/shared/sepolicy/vendor/hal_confirmationui_cuttlefish.te b/shared/sepolicy/vendor/hal_confirmationui_cuttlefish.te
index 13cd1a9..9b843c2 100644
--- a/shared/sepolicy/vendor/hal_confirmationui_cuttlefish.te
+++ b/shared/sepolicy/vendor/hal_confirmationui_cuttlefish.te
@@ -4,11 +4,17 @@
 type hal_confirmationui_cuttlefish_exec, exec_type, vendor_file_type, file_type;
 init_daemon_domain(hal_confirmationui_cuttlefish)
 
-vendor_internal_prop(vendor_vsock_confirmationui_port_prop)
-get_prop(hal_confirmationui_cuttlefish, vendor_vsock_confirmationui_port_prop)
+binder_call(hal_confirmationui_cuttlefish, keystore)
 
-allow hal_confirmationui_cuttlefish self:{ vsock_socket } { create getopt read write getattr connect shutdown };
+type confirmationui_device, file_type;
+
+allow hal_confirmationui_cuttlefish device:dir r_dir_perms;
+allow hal_confirmationui_cuttlefish confirmationui_device:chr_file rw_file_perms;
+
+vendor_internal_prop(vendor_enable_confirmationui_prop)
+get_prop(hal_confirmationui_cuttlefish, vendor_enable_confirmationui_prop)
 
 # Write to kernel log (/dev/kmsg)
 allow hal_confirmationui_cuttlefish kmsg_device:chr_file w_file_perms;
 allow hal_confirmationui_cuttlefish kmsg_device:chr_file getattr;
+
diff --git a/shared/sepolicy/vendor/hal_graphics_allocator_default.te b/shared/sepolicy/vendor/hal_graphics_allocator_default.te
deleted file mode 100644
index 00f38cc..0000000
--- a/shared/sepolicy/vendor/hal_graphics_allocator_default.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(hal_graphics_allocator_default)
diff --git a/shared/sepolicy/vendor/hal_graphics_composer.te b/shared/sepolicy/vendor/hal_graphics_composer.te
deleted file mode 100644
index d08af30..0000000
--- a/shared/sepolicy/vendor/hal_graphics_composer.te
+++ /dev/null
@@ -1,11 +0,0 @@
-allow hal_graphics_composer_server hal_graphics_allocator_default_tmpfs:file read;
-allow hal_graphics_composer_server self:{ socket vsock_socket } create_socket_perms_no_ioctl;
-gpu_access(hal_graphics_composer_server)
-
-get_prop(hal_graphics_composer_server, vendor_cuttlefish_config_server_port_prop)
-get_prop(hal_graphics_composer_server, vendor_hwcomposer_prop)
-
-# Persistent memory for some hwcomposer configuration.
-type hal_graphics_composer_pmem_device, dev_type;
-allow hal_graphics_composer_server hal_graphics_composer_pmem_device:blk_file rw_file_perms;
-allow hal_graphics_composer_server block_device:dir search;
diff --git a/shared/sepolicy/vendor/hal_keymint_remote.te b/shared/sepolicy/vendor/hal_keymint_remote.te
index 7d5f6d5..c1baebb 100644
--- a/shared/sepolicy/vendor/hal_keymint_remote.te
+++ b/shared/sepolicy/vendor/hal_keymint_remote.te
@@ -13,3 +13,4 @@
 
 get_prop(hal_keymint_remote, vendor_security_patch_level_prop)
 get_prop(hal_keymint_remote, vendor_boot_security_patch_level_prop)
+get_prop(hal_keymint_remote, serialno_prop)
diff --git a/shared/sepolicy/vendor/hal_neuralnetworks_sample.te b/shared/sepolicy/vendor/hal_neuralnetworks_sample.te
index 43198aa..44b5c84 100644
--- a/shared/sepolicy/vendor/hal_neuralnetworks_sample.te
+++ b/shared/sepolicy/vendor/hal_neuralnetworks_sample.te
@@ -3,5 +3,3 @@
 
 type hal_neuralnetworks_sample_exec, exec_type, vendor_file_type, file_type;
 init_daemon_domain(hal_neuralnetworks_sample)
-
-gpu_access(hal_neuralnetworks_sample)
diff --git a/shared/sepolicy/vendor/hal_uwb.te b/shared/sepolicy/vendor/hal_uwb.te
new file mode 100644
index 0000000..0165cc3
--- /dev/null
+++ b/shared/sepolicy/vendor/hal_uwb.te
@@ -0,0 +1 @@
+allow hal_uwb_default uwb_device:chr_file { open read write ioctl};
diff --git a/shared/sepolicy/vendor/init.te b/shared/sepolicy/vendor/init.te
index a19eb13..22a6758 100644
--- a/shared/sepolicy/vendor/init.te
+++ b/shared/sepolicy/vendor/init.te
@@ -28,6 +28,8 @@
 
 allow init keymaster_device:chr_file rw_file_perms;
 allow init gatekeeper_device:chr_file rw_file_perms;
+allow init confirmationui_device:chr_file rw_file_perms;
 allow init bt_device:chr_file rw_file_perms;
+allow init uwb_device:chr_file rw_file_perms;
 
 allow init frp_block_device:blk_file setattr;
diff --git a/shared/sepolicy/vendor/libcuttlefish_rild.te b/shared/sepolicy/vendor/libcuttlefish_rild.te
deleted file mode 100644
index 28412c7..0000000
--- a/shared/sepolicy/vendor/libcuttlefish_rild.te
+++ /dev/null
@@ -1,14 +0,0 @@
-type libcuttlefish_rild, domain;
-type libcuttlefish_rild_exec, exec_type, vendor_file_type, file_type;
-
-init_daemon_domain(libcuttlefish_rild)
-
-hal_server_domain(libcuttlefish_rild, hal_telephony)
-
-# Failing to create these sockets appears to be non-fatal
-net_domain(libcuttlefish_rild)
-
-get_prop(libcuttlefish_rild, vendor_cuttlefish_config_server_port_prop)
-get_prop(libcuttlefish_rild, vendor_modem_simulator_ports_prop)
-
-allow libcuttlefish_rild self:{ socket vsock_socket } { create_socket_perms_no_ioctl getattr };
diff --git a/shared/sepolicy/vendor/mediacodec.te b/shared/sepolicy/vendor/mediacodec.te
index dcb0a03..e26f128 100644
--- a/shared/sepolicy/vendor/mediacodec.te
+++ b/shared/sepolicy/vendor/mediacodec.te
@@ -1,3 +1 @@
 allow mediacodec system_file:dir r_dir_perms;
-
-gpu_access(mediacodec)
diff --git a/shared/sepolicy/vendor/mediaserver.te b/shared/sepolicy/vendor/mediaserver.te
deleted file mode 100644
index 922af2c..0000000
--- a/shared/sepolicy/vendor/mediaserver.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(mediaserver)
diff --git a/shared/sepolicy/vendor/mediaswcodec.te b/shared/sepolicy/vendor/mediaswcodec.te
deleted file mode 100644
index ff9c5b5..0000000
--- a/shared/sepolicy/vendor/mediaswcodec.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(mediaswcodec)
diff --git a/shared/sepolicy/vendor/mediatranscoding.te b/shared/sepolicy/vendor/mediatranscoding.te
deleted file mode 100644
index 47f6d8e..0000000
--- a/shared/sepolicy/vendor/mediatranscoding.te
+++ /dev/null
@@ -1,2 +0,0 @@
-# Allow mediatranscoding service to access the GPU
-gpu_access(mediatranscoding)
diff --git a/shared/sepolicy/vendor/metrics_helper.te b/shared/sepolicy/vendor/metrics_helper.te
new file mode 100644
index 0000000..eb608ed
--- /dev/null
+++ b/shared/sepolicy/vendor/metrics_helper.te
@@ -0,0 +1,15 @@
+type metrics_helper, domain;
+type metrics_helper_exec, exec_type, vendor_file_type, file_type;
+
+init_daemon_domain(metrics_helper)
+
+# set sys.powerctl
+# TODO(b/241398817) powerctl_prop is probably not the best domain for this
+# purpose. Keep it as a workaroung and check later if it's worth creating a new
+# one.
+set_prop(metrics_helper, powerctl_prop)
+
+type metrics_helper_device, file_type;
+
+allow metrics_helper device:dir r_dir_perms;
+allow metrics_helper metrics_helper_device:chr_file rw_file_perms;
diff --git a/shared/sepolicy/vendor/platform_app.te b/shared/sepolicy/vendor/platform_app.te
index bb4160d..703e39b 100644
--- a/shared/sepolicy/vendor/platform_app.te
+++ b/shared/sepolicy/vendor/platform_app.te
@@ -1,4 +1,9 @@
 gpu_access(platform_app)
 
 allow platform_app broadcastradio_service:service_manager find;
-allow platform_app hal_wlc_hwservice:hwservice_manager find;
\ No newline at end of file
+allow platform_app hal_wlc_hwservice:hwservice_manager find;
+
+# b/263830018
+# Ignore lookup for vendor.google.wireless_charger.IWirelessCharger/default
+# and vendor.google.google_battery.IGoogleBattery/default.
+dontaudit platform_app default_android_service:service_manager find;
diff --git a/shared/sepolicy/vendor/property.te b/shared/sepolicy/vendor/property.te
index 91b30fc..371ce92 100644
--- a/shared/sepolicy/vendor/property.te
+++ b/shared/sepolicy/vendor/property.te
@@ -3,3 +3,4 @@
 vendor_internal_prop(vendor_boot_security_patch_level_prop)
 vendor_internal_prop(vendor_hwcomposer_prop)
 vendor_restricted_prop(vendor_wlan_versions_prop)
+vendor_internal_prop(vendor_device_prop)
diff --git a/shared/sepolicy/vendor/property_contexts b/shared/sepolicy/vendor/property_contexts
index 9b98ed1..e68d1ff 100644
--- a/shared/sepolicy/vendor/property_contexts
+++ b/shared/sepolicy/vendor/property_contexts
@@ -6,7 +6,7 @@
 ro.boot.hardware.vulkan u:object_r:vendor_graphics_config_prop:s0 exact string
 ro.boot.lcd_density u:object_r:vendor_graphics_config_prop:s0 exact int
 ro.boot.vsock_keyboard_port  u:object_r:vendor_vsock_keyboard_port:s0
-ro.boot.vsock_confirmationui_port  u:object_r:vendor_vsock_confirmationui_port_prop:s0
+ro.boot.enable_confirmationui  u:object_r:vendor_enable_confirmationui_prop:s0
 ro.boot.modem_simulator_ports  u:object_r:vendor_modem_simulator_ports_prop:s0
 ro.boot.vsock_touch_port  u:object_r:vendor_vsock_touch_port:s0
 ro.boot.wifi_mac_prefix  u:object_r:vendor_wifi_mac_prefix:s0 exact string
@@ -14,7 +14,9 @@
 ro.vendor.boot_security_patch u:object_r:vendor_boot_security_patch_level_prop:s0
 vendor.bt.rootcanal_mac_address  u:object_r:vendor_bt_rootcanal_prop:s0
 vendor.bt.rootcanal_test_console  u:object_r:vendor_bt_rootcanal_prop:s0
+ro.vendor.hwcomposer.display_finder_mode  u:object_r:vendor_hwcomposer_prop:s0 exact string
 ro.vendor.hwcomposer.mode  u:object_r:vendor_hwcomposer_prop:s0 exact string
 ro.vendor.hwcomposer.pmem  u:object_r:vendor_hwcomposer_prop:s0 exact string
 vendor.wlan.firmware.version   u:object_r:vendor_wlan_versions_prop:s0 exact string
 vendor.wlan.driver.version     u:object_r:vendor_wlan_versions_prop:s0 exact string
+vendor.dlkm.modules.ready      u:object_r:vendor_device_prop:s0 exact bool
diff --git a/shared/sepolicy/vendor/radio.te b/shared/sepolicy/vendor/radio.te
deleted file mode 100644
index 62b7582..0000000
--- a/shared/sepolicy/vendor/radio.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(radio)
diff --git a/shared/sepolicy/vendor/service_contexts b/shared/sepolicy/vendor/service_contexts
index c41503e..6aaa86b 100644
--- a/shared/sepolicy/vendor/service_contexts
+++ b/shared/sepolicy/vendor/service_contexts
@@ -1,8 +1,5 @@
 android.hardware.drm.IDrmFactory/widevine    u:object_r:hal_drm_service:s0
 android.hardware.neuralnetworks.IDevice/nnapi-sample_all u:object_r:hal_neuralnetworks_service:s0
-android.hardware.neuralnetworks.IDevice/nnapi-sample_float_fast u:object_r:hal_neuralnetworks_service:s0
-android.hardware.neuralnetworks.IDevice/nnapi-sample_float_slow u:object_r:hal_neuralnetworks_service:s0
-android.hardware.neuralnetworks.IDevice/nnapi-sample_minimal    u:object_r:hal_neuralnetworks_service:s0
 android.hardware.neuralnetworks.IDevice/nnapi-sample_quant    u:object_r:hal_neuralnetworks_service:s0
 android.hardware.neuralnetworks.IDevice/nnapi-sample_sl_shim  u:object_r:hal_neuralnetworks_service:s0
 
diff --git a/shared/sepolicy/vendor/surfaceflinger.te b/shared/sepolicy/vendor/surfaceflinger.te
index 9b3e2c7..ffd04b7 100644
--- a/shared/sepolicy/vendor/surfaceflinger.te
+++ b/shared/sepolicy/vendor/surfaceflinger.te
@@ -1,6 +1,2 @@
-allow surfaceflinger self:process execmem;
-
 allow surfaceflinger hal_graphics_composer_default:dir search;
 allow surfaceflinger hal_graphics_composer_default:file r_file_perms;
-
-gpu_access(surfaceflinger)
diff --git a/shared/sepolicy/vendor/system_app.te b/shared/sepolicy/vendor/system_app.te
deleted file mode 100644
index 4a85066..0000000
--- a/shared/sepolicy/vendor/system_app.te
+++ /dev/null
@@ -1 +0,0 @@
-gpu_access(system_app)
diff --git a/shared/sepolicy/vendor/system_server.te b/shared/sepolicy/vendor/system_server.te
index 171ea52..7d0ceaa 100644
--- a/shared/sepolicy/vendor/system_server.te
+++ b/shared/sepolicy/vendor/system_server.te
@@ -1,10 +1,2 @@
-gpu_access(system_server)
-
 # Cuttlefish is still using the legacy wifi HAL (pre-HIDL)
 get_prop(system_server, wifi_hal_prop)
-
-# TODO(b/65201432): Swiftshader needs to create executable memory.
-allow system_server self:process execmem;
-
-# For com.android.tethering.inprocess
-dontaudit system_server { fs_bpf fs_bpf_tethering }:dir search;
diff --git a/shared/sepolicy/vendor/telephony/file_contexts b/shared/sepolicy/vendor/telephony/file_contexts
new file mode 100644
index 0000000..cb8cd60
--- /dev/null
+++ b/shared/sepolicy/vendor/telephony/file_contexts
@@ -0,0 +1 @@
+/vendor/bin/hw/libcuttlefish-rild  u:object_r:libcuttlefish_rild_exec:s0
diff --git a/shared/sepolicy/vendor/telephony/libcuttlefish_rild.te b/shared/sepolicy/vendor/telephony/libcuttlefish_rild.te
new file mode 100644
index 0000000..2b57859
--- /dev/null
+++ b/shared/sepolicy/vendor/telephony/libcuttlefish_rild.te
@@ -0,0 +1,15 @@
+type libcuttlefish_rild, domain;
+type libcuttlefish_rild_exec, exec_type, vendor_file_type, file_type;
+
+init_daemon_domain(libcuttlefish_rild)
+
+hal_server_domain(libcuttlefish_rild, hal_telephony)
+
+# Failing to create these sockets appears to be non-fatal
+net_domain(libcuttlefish_rild)
+
+get_prop(libcuttlefish_rild, vendor_cuttlefish_config_server_port_prop)
+get_prop(libcuttlefish_rild, vendor_modem_simulator_ports_prop)
+
+allow libcuttlefish_rild self:{ socket vsock_socket } { create_socket_perms_no_ioctl getattr };
+allow libcuttlefish_rild su:{ socket udp_socket } { create_socket_perms_no_ioctl getattr };
\ No newline at end of file
diff --git a/shared/sepolicy/vendor/ueventd.te b/shared/sepolicy/vendor/ueventd.te
index 9cb896f..6bea104 100644
--- a/shared/sepolicy/vendor/ueventd.te
+++ b/shared/sepolicy/vendor/ueventd.te
@@ -2,3 +2,5 @@
 
 allow ueventd keymaster_device:chr_file { rw_file_perms create setattr };
 allow ueventd gatekeeper_device:chr_file { rw_file_perms create setattr };
+allow ueventd confirmationui_device:chr_file { rw_file_perms create setattr };
+allow ueventd metrics_helper_device:chr_file { rw_file_perms create setattr };
diff --git a/shared/sepolicy/vendor/uwb_device.te b/shared/sepolicy/vendor/uwb_device.te
new file mode 100644
index 0000000..c26eae1
--- /dev/null
+++ b/shared/sepolicy/vendor/uwb_device.te
@@ -0,0 +1 @@
+type uwb_device, dev_type;
diff --git a/shared/sepolicy/vendor/zygote.te b/shared/sepolicy/vendor/zygote.te
deleted file mode 100644
index 38b67e0..0000000
--- a/shared/sepolicy/vendor/zygote.te
+++ /dev/null
@@ -1,3 +0,0 @@
-# TODO(b/65201432): Remove once execmem issue due to OpenGL is resolved.
-# external/swiftshader commit 10e1f74a65e44daafb6c8f3b10ee69d39a6ca024 triggers execmem in zygote
-allow zygote self:process execmem;
diff --git a/shared/slim/Android.bp b/shared/slim/Android.bp
index 2d5cac7..5271575 100644
--- a/shared/slim/Android.bp
+++ b/shared/slim/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/shared/slim/device_vendor.mk b/shared/slim/device_vendor.mk
index cc46e5c..7a21b78 100644
--- a/shared/slim/device_vendor.mk
+++ b/shared/slim/device_vendor.mk
@@ -18,7 +18,6 @@
 SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
 
 ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
 PRODUCT_COPY_FILES += \
@@ -27,27 +26,14 @@
 endif
 
 $(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
 PRODUCT_VENDOR_PROPERTIES += \
-    keyguard.no_require_sim=true \
-    ro.cdma.home.operator.alpha=Android \
-    ro.cdma.home.operator.numeric=302780 \
-    ro.com.android.dataroaming=true \
-    ro.telephony.default_network=9 \
-
-TARGET_USES_CF_RILD ?= true
-ifeq ($(TARGET_USES_CF_RILD),true)
-ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
-PRODUCT_PACKAGES += com.google.cf.rild
-else
-PRODUCT_PACKAGES += \
-    libcuttlefish-ril-2 \
-    libcuttlefish-rild
-endif
-endif
-
-PRODUCT_VENDOR_PROPERTIES += \
     debug.hwui.drawing_enabled=0 \
 
 ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
@@ -55,8 +41,7 @@
     frameworks/native/data/etc/android.hardware.biometrics.face.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.biometrics.face.xml \
     frameworks/native/data/etc/android.hardware.faketouch.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.faketouch.xml \
     frameworks/native/data/etc/android.hardware.fingerprint.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.fingerprint.xml \
-    frameworks/native/data/etc/android.hardware.telephony.gsm.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.gsm.xml \
-    frameworks/native/data/etc/android.hardware.telephony.ims.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.ims.xml
+
 endif
 
 # Runtime Resource Overlays
diff --git a/shared/swiftshader/BoardConfig.mk b/shared/swiftshader/BoardConfig.mk
new file mode 100644
index 0000000..384e709
--- /dev/null
+++ b/shared/swiftshader/BoardConfig.mk
@@ -0,0 +1,19 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+-include device/google/cuttlefish/shared/angle/BoardConfig.mk
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/swiftshader/sepolicy
diff --git a/shared/swiftshader/device_vendor.mk b/shared/swiftshader/device_vendor.mk
new file mode 100644
index 0000000..c4bd4b1
--- /dev/null
+++ b/shared/swiftshader/device_vendor.mk
@@ -0,0 +1,25 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, device/google/cuttlefish/shared/angle/device_vendor.mk)
+
+ifeq ($(TARGET_VULKAN_SUPPORT),true)
+
+# TODO(b/65201432): Swiftshader needs to create executable memory.
+PRODUCT_REQUIRES_INSECURE_EXECMEM_FOR_SWIFTSHADER := true
+PRODUCT_PACKAGES += vulkan.pastel
+
+endif
diff --git a/shared/swiftshader/sepolicy/OWNERS b/shared/swiftshader/sepolicy/OWNERS
new file mode 100644
index 0000000..9b37b0e
--- /dev/null
+++ b/shared/swiftshader/sepolicy/OWNERS
@@ -0,0 +1,4 @@
+include platform/system/sepolicy:/OWNERS
+
+adelva@google.com
+rurumihong@google.com
diff --git a/shared/swiftshader/sepolicy/bootanim.te b/shared/swiftshader/sepolicy/bootanim.te
new file mode 100644
index 0000000..19e205a
--- /dev/null
+++ b/shared/swiftshader/sepolicy/bootanim.te
@@ -0,0 +1,2 @@
+# TODO(b/65201432): Swiftshader needs to create executable memory.
+allow bootanim self:process execmem;
diff --git a/shared/swiftshader/sepolicy/file_contexts b/shared/swiftshader/sepolicy/file_contexts
new file mode 100644
index 0000000..a6c9648
--- /dev/null
+++ b/shared/swiftshader/sepolicy/file_contexts
@@ -0,0 +1 @@
+/vendor/lib(64)?/hw/vulkan.pastel.so  u:object_r:same_process_hal_file:s0
diff --git a/shared/swiftshader/sepolicy/surfaceflinger.te b/shared/swiftshader/sepolicy/surfaceflinger.te
new file mode 100644
index 0000000..716d879
--- /dev/null
+++ b/shared/swiftshader/sepolicy/surfaceflinger.te
@@ -0,0 +1,2 @@
+# TODO(b/65201432): Swiftshader needs to create executable memory.
+allow surfaceflinger self:process execmem;
diff --git a/shared/swiftshader/sepolicy/system_server.te b/shared/swiftshader/sepolicy/system_server.te
new file mode 100644
index 0000000..7b249c1
--- /dev/null
+++ b/shared/swiftshader/sepolicy/system_server.te
@@ -0,0 +1,2 @@
+# TODO(b/65201432): Swiftshader needs to create executable memory.
+allow system_server self:process execmem;
diff --git a/shared/swiftshader/sepolicy/zygote.te b/shared/swiftshader/sepolicy/zygote.te
new file mode 100644
index 0000000..78dc7ae
--- /dev/null
+++ b/shared/swiftshader/sepolicy/zygote.te
@@ -0,0 +1,2 @@
+# TODO(b/65201432): Swiftshader needs to create executable memory.
+allow zygote self:process execmem;
diff --git a/shared/telephony/BoardConfig.mk b/shared/telephony/BoardConfig.mk
new file mode 100644
index 0000000..1083635
--- /dev/null
+++ b/shared/telephony/BoardConfig.mk
@@ -0,0 +1,17 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/sepolicy/vendor/telephony
diff --git a/shared/telephony/device_vendor.mk b/shared/telephony/device_vendor.mk
new file mode 100644
index 0000000..bd78cc6
--- /dev/null
+++ b/shared/telephony/device_vendor.mk
@@ -0,0 +1,47 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If downstream target does not want telephony, do not include this file
+
+ifneq ($(TARGET_NO_TELEPHONY), true)
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+
+PRODUCT_VENDOR_PROPERTIES += \
+    keyguard.no_require_sim=true \
+    ro.cdma.home.operator.alpha=Android \
+    ro.cdma.home.operator.numeric=302780 \
+    ro.com.android.dataroaming=true \
+    ro.telephony.default_network=9 \
+
+ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_PACKAGES += com.google.cf.rild
+else
+# If downstream target provides its own RILD, set TARGET_USES_CF_RILD := false
+# If the target prefers vendor APEX, this feature is not supported
+TARGET_USES_CF_RILD ?= true
+ifeq ($(TARGET_USES_CF_RILD),true)
+PRODUCT_PACKAGES += \
+    libcuttlefish-ril-2 \
+    libcuttlefish-rild
+endif
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.hardware.telephony.gsm.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.gsm.xml \
+    frameworks/native/data/etc/android.hardware.telephony.ims.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.ims.xml \
+    frameworks/native/data/etc/android.hardware.telephony.satellite.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.satellite.xml
+endif # if not LOCAL_PREFER_VENDOR_APEX
+
+endif # if not TARGET_NO_TELEPHONY
diff --git a/shared/tv/device_vendor.mk b/shared/tv/device_vendor.mk
index 5b91b64..4769a5a 100644
--- a/shared/tv/device_vendor.mk
+++ b/shared/tv/device_vendor.mk
@@ -18,13 +18,19 @@
 SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
 
 $(call inherit-product, device/google/atv/products/atv_vendor.mk)
-$(call inherit-product, device/google/cuttlefish/shared/device.mk)
+
 $(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
 # Extend cuttlefish common sepolicy with tv-specific functionality
 BOARD_SEPOLICY_DIRS += device/google/cuttlefish/shared/tv/sepolicy/vendor
 
 PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/config/media_codecs_google_tv.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_tv.xml \
     frameworks/native/data/etc/android.hardware.bluetooth.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.bluetooth.xml \
     frameworks/native/data/etc/android.hardware.hdmi.cec.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.hdmi.cec.xml \
     frameworks/native/data/etc/android.hardware.sensor.accelerometer.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.sensor.accelerometer.xml \
@@ -34,20 +40,33 @@
     hardware/interfaces/tv/tuner/config/sample_tuner_vts_config_1_1.xml:$(TARGET_COPY_OUT_VENDOR)/etc/tuner_vts_config_1_1.xml \
     hardware/interfaces/tv/tuner/config/sample_tuner_vts_config_aidl_V1.xml:$(TARGET_COPY_OUT_VENDOR)/etc/tuner_vts_config_aidl_V1.xml
 
-# HDMI CEC HAL
-PRODUCT_PACKAGES += android.hardware.tv.cec@1.1-service
+# HDMI AIDL HAL
+PRODUCT_PACKAGES += \
+     android.hardware.tv.hdmi.connection-service
+
+# CEC AIDL HAL
+PRODUCT_PACKAGES += \
+     android.hardware.tv.hdmi.cec-service
+
+# EARC AIDL HAL
+PRODUCT_PACKAGES += \
+     android.hardware.tv.hdmi.earc-service
 
 # Setup HDMI CEC as Playback Device
 PRODUCT_PROPERTY_OVERRIDES += ro.hdmi.device_type=4
 
-# Tuner HAL
-PRODUCT_PACKAGES += android.hardware.tv.tuner-service.example
+# Tuner lazy HAL
+PRODUCT_PACKAGES += android.hardware.tv.tuner-service.example-lazy
+PRODUCT_VENDOR_PROPERTIES += ro.tuner.lazyhal=true
+
+# TV Input HAL
+PRODUCT_PACKAGES += android.hardware.tv.input-service.example
 
 # Sample Tuner Input for testing
 #PRODUCT_PACKAGES += LiveTv sampletunertvinput
 
 # Fallback IME and Home apps
-PRODUCT_PACKAGES += LeanbackIME TvSampleLeanbackLauncher TvProvision
+PRODUCT_PACKAGES += LeanbackIME TvSampleLeanbackLauncher
 
 # Enabling managed profiles
 DEVICE_PACKAGE_OVERLAYS += device/google/cuttlefish/shared/tv/overlay
diff --git a/shared/virgl/BoardConfig.mk b/shared/virgl/BoardConfig.mk
new file mode 100644
index 0000000..62d277c
--- /dev/null
+++ b/shared/virgl/BoardConfig.mk
@@ -0,0 +1,19 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BOARD_GPU_DRIVERS := virgl
+
+BOARD_VENDOR_SEPOLICY_DIRS += device/google/cuttlefish/shared/virgl/sepolicy
diff --git a/shared/virgl/device_vendor.mk b/shared/virgl/device_vendor.mk
new file mode 100644
index 0000000..b4c8b34
--- /dev/null
+++ b/shared/virgl/device_vendor.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_SOONG_NAMESPACES += external/mesa3d
+
+PRODUCT_PACKAGES += libGLES_mesa
diff --git a/shared/virgl/sepolicy/OWNERS b/shared/virgl/sepolicy/OWNERS
new file mode 100644
index 0000000..9b37b0e
--- /dev/null
+++ b/shared/virgl/sepolicy/OWNERS
@@ -0,0 +1,4 @@
+include platform/system/sepolicy:/OWNERS
+
+adelva@google.com
+rurumihong@google.com
diff --git a/shared/virgl/sepolicy/file_contexts b/shared/virgl/sepolicy/file_contexts
new file mode 100644
index 0000000..f3f6d69
--- /dev/null
+++ b/shared/virgl/sepolicy/file_contexts
@@ -0,0 +1,2 @@
+/vendor/lib(64)?/libglapi.so  u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/dri/.*       u:object_r:same_process_hal_file:s0
diff --git a/shared/wear/aosp_system.mk b/shared/wear/aosp_system.mk
index 7e8f75e..19531de 100644
--- a/shared/wear/aosp_system.mk
+++ b/shared/wear/aosp_system.mk
@@ -14,7 +14,10 @@
 # limitations under the License.
 #
 
-OVERRIDE_TARGET_FLATTEN_APEX := true
+# TODO(b/275113769): The 'wear' targets currently enforce that APEX files are flattened.
+# As riscv64 targets currently do not support this, this is a lazy-default-init that can
+# be overridden in target files. Once support is enabled, require the override.
+OVERRIDE_TARGET_FLATTEN_APEX ?= true
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
@@ -30,10 +33,7 @@
     Bluetooth \
     CalendarProvider \
     CertInstaller \
-    clatd \
-    clatd.conf \
     DownloadProvider \
-    ethernet-service \
     fsck.f2fs \
     FusedLocation \
     InputDevices \
@@ -59,7 +59,6 @@
 
 PRODUCT_SYSTEM_SERVER_JARS += \
     services \
-    ethernet-service \
 
 PRODUCT_COPY_FILES += \
     system/core/rootdir/etc/public.libraries.wear.txt:system/etc/public.libraries.txt \
diff --git a/shared/wear/device_vendor.mk b/shared/wear/device_vendor.mk
index b37c775..0255534 100644
--- a/shared/wear/device_vendor.mk
+++ b/shared/wear/device_vendor.mk
@@ -25,29 +25,16 @@
     frameworks/native/data/etc/android.software.device_admin.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.device_admin.xml \
     frameworks/native/data/etc/wearable_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/wearable_core_hardware.xml \
 
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/device.mk)
 
-PRODUCT_VENDOR_PROPERTIES += \
-    keyguard.no_require_sim=true \
-    ro.cdma.home.operator.alpha=Android \
-    ro.cdma.home.operator.numeric=302780 \
-    ro.com.android.dataroaming=true \
-    ro.telephony.default_network=9 \
-
-TARGET_USES_CF_RILD ?= true
-ifeq ($(TARGET_USES_CF_RILD),true)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
-PRODUCT_PACKAGES += \
-    libcuttlefish-ril-2 \
-    libcuttlefish-rild
-endif
-
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/android.hardware.audio.output.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.audio.output.xml \
     frameworks/native/data/etc/android.hardware.faketouch.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.faketouch.xml \
     frameworks/native/data/etc/android.hardware.location.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.location.xml \
-    frameworks/native/data/etc/android.hardware.telephony.gsm.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.gsm.xml \
-    frameworks/native/data/etc/android.hardware.telephony.ims.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.telephony.ims.xml \
 
 # Runtime Resource Overlays
 PRODUCT_PACKAGES += \
@@ -55,6 +42,9 @@
     cuttlefish_wear_overlay_frameworks_base_core \
     cuttlefish_wear_overlay_settings_provider \
 
+PRODUCT_PRODUCT_PROPERTIES += \
+    config.disable_cameraservice=true
+
 PRODUCT_CHARACTERISTICS := nosdcard,watch
 
 TARGET_BOARD_INFO_FILE ?= device/google/cuttlefish/shared/wear/android-info.txt
diff --git a/shared/wear/overlays/SettingsProvider/Android.bp b/shared/wear/overlays/SettingsProvider/Android.bp
index e6bde39..d4b764f 100644
--- a/shared/wear/overlays/SettingsProvider/Android.bp
+++ b/shared/wear/overlays/SettingsProvider/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/shared/wear/overlays/SettingsProvider/res/values/defaults.xml b/shared/wear/overlays/SettingsProvider/res/values/defaults.xml
index b02a9af..f0e448f 100644
--- a/shared/wear/overlays/SettingsProvider/res/values/defaults.xml
+++ b/shared/wear/overlays/SettingsProvider/res/values/defaults.xml
@@ -35,4 +35,10 @@
     <bool name="def_wifi_on">true</bool>
     <bool name="def_wifi_wakeup_enabled">false</bool>
     <bool name="def_vibrate_when_ringing">true</bool>
+
+    <!--
+	    Allows an on-screen keyboard to appear on the emulator by default.
+	    There is no UI for this option within Settings for Wear.
+    -->
+    <bool name="def_show_ime_with_hard_keyboard">true</bool>
 </resources>
diff --git a/shared/wear/overlays/core/Android.bp b/shared/wear/overlays/core/Android.bp
index 2f522a9..daaa48a 100644
--- a/shared/wear/overlays/core/Android.bp
+++ b/shared/wear/overlays/core/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/tests/graphics/Android.bp b/tests/graphics/Android.bp
index 6767817..2f79be9 100644
--- a/tests/graphics/Android.bp
+++ b/tests/graphics/Android.bp
@@ -17,9 +17,41 @@
 }
 
 java_test_host {
+    name: "CuttlefishDisplayHotplugTest",
+    srcs: [
+        "src/com/android/cuttlefish/tests/CuttlefishDisplayHotplugTest.java",
+    ],
+    data_native_bins: [
+        "cvd",
+        "cvd_internal_display",
+    ],
+    test_options: {
+        unit_test: false,
+    },
+    test_suites: [
+        "device-tests",
+    ],
+    libs: [
+        "cts-tradefed",
+        "tradefed",
+        "compatibility-host-util",
+    ],
+    static_libs: [
+        "cuttlefish_host_test_utils",
+    ],
+    plugins: [
+        "auto_annotation_plugin",
+        "auto_value_plugin",
+    ],
+    data: [
+        ":CuttlefishDisplayHotplugHelperApp",
+    ],
+}
+
+java_test_host {
     name: "GfxstreamEnabledTest",
     srcs: [
-        "src/**/*.java",
+        "src/com/android/cuttlefish/tests/GfxstreamEnabledTest.java",
     ],
     test_options: {
         unit_test: false,
diff --git a/tests/graphics/hotplug/Android.bp b/tests/graphics/hotplug/Android.bp
new file mode 100644
index 0000000..0d6c9e7
--- /dev/null
+++ b/tests/graphics/hotplug/Android.bp
@@ -0,0 +1,39 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "device_google_cuttlefish_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["device_google_cuttlefish_license"],
+}
+
+android_test_helper_app {
+    name: "CuttlefishDisplayHotplugHelperApp",
+    defaults: ["cts_defaults"],
+    sdk_version: "test_current",
+    static_libs: [
+        "androidx.appcompat_appcompat",
+        "androidx.test.rules",
+        "compatibility-device-util-axt",
+    ],
+    srcs: [
+        "src/com/android/cuttlefish/DisplayHotplugHelperApp.java",
+    ],
+    test_suites: [
+        "device-tests",
+    ],
+}
diff --git a/tests/graphics/hotplug/AndroidManifest.xml b/tests/graphics/hotplug/AndroidManifest.xml
new file mode 100644
index 0000000..51132b6
--- /dev/null
+++ b/tests/graphics/hotplug/AndroidManifest.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+     package="com.android.cuttlefish.displayhotplughelper">
+
+    <application>
+      <activity android:name="DisplayHotplugHelperApp"
+           android:label="Cuttlefish Display Hotplug Helper"
+           android:theme="@android:style/Theme.DeviceDefault"
+           android:exported="true">
+        <intent-filter>
+            <action android:name="android.intent.action.MAIN" />
+            <category android:name="android.intent.category.DEFAULT"/>
+            <category android:name="android.intent.category.LAUNCHER" />
+        </intent-filter>
+      </activity>
+
+    </application>
+</manifest>
\ No newline at end of file
diff --git a/tests/graphics/hotplug/src/com/android/cuttlefish/DisplayHotplugHelperApp.java b/tests/graphics/hotplug/src/com/android/cuttlefish/DisplayHotplugHelperApp.java
new file mode 100644
index 0000000..df90545
--- /dev/null
+++ b/tests/graphics/hotplug/src/com/android/cuttlefish/DisplayHotplugHelperApp.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cuttlefish.displayhotplughelper;
+
+import android.app.Activity;
+import android.hardware.display.DisplayManager;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Display;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+/**
+ * Helper application to print display information to logcat in predictable
+ * format for CuttlefishDisplayHotplugTests.
+ */
+public class DisplayHotplugHelperApp extends Activity {
+
+    private static final String TAG = "DisplayHotplugHelper";
+
+    private static final String HELPER_APP_UUID_FLAG = "display_hotplug_uuid";
+
+    private JSONObject getDisplayInfo(Display display) throws JSONException {
+        // Cuttlefish displays only have a single mode using the max resolution.
+        final Display.Mode displayMode = display.getMode();
+
+        JSONObject displayInfo = new JSONObject();
+        displayInfo.put("id", display.getDisplayId());
+        displayInfo.put("width", displayMode.getPhysicalWidth());
+        displayInfo.put("height", displayMode.getPhysicalHeight());
+        return displayInfo;
+    }
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        String loggingIdentifier = getIntent().getExtras().getString(HELPER_APP_UUID_FLAG);
+
+        Display[] displays = getSystemService(DisplayManager.class).getDisplays();
+        try {
+            JSONArray displayInfos = new JSONArray();
+            for (Display display : displays) {
+                displayInfos.put(getDisplayInfo(display));
+            }
+            JSONObject displayInfo = new JSONObject();
+            displayInfo.put("displays", displayInfos);
+
+            Log.e(TAG, loggingIdentifier + " displays: " + displayInfo);
+        } catch (JSONException e) {
+            Log.e(TAG, "Failed to create display info JSON: " + e);
+        }
+
+        finishAndRemoveTask();
+    }
+}
diff --git a/tests/graphics/src/com/android/cuttlefish/tests/CuttlefishDisplayHotplugTest.java b/tests/graphics/src/com/android/cuttlefish/tests/CuttlefishDisplayHotplugTest.java
new file mode 100644
index 0000000..627b287
--- /dev/null
+++ b/tests/graphics/src/com/android/cuttlefish/tests/CuttlefishDisplayHotplugTest.java
@@ -0,0 +1,550 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cuttlefish.tests;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import android.platform.test.annotations.LargeTest;
+
+import com.android.cuttlefish.tests.utils.CuttlefishHostTest;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
+import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test;
+import com.android.tradefed.util.AbiUtils;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.google.auto.value.AutoValue;
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Range;
+import com.google.common.truth.Correspondence;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+/**
+ * Tests that a Cuttlefish device can interactively connect and disconnect displays.
+ */
+@RunWith(DeviceJUnit4ClassRunner.class)
+public class CuttlefishDisplayHotplugTest extends CuttlefishHostTest {
+
+    private static final long DEFAULT_TIMEOUT_MS = 5000;
+
+    private static final String CVD_BINARY_BASENAME = "cvd";
+
+    private static final String CVD_DISPLAY_BINARY_BASENAME = "cvd_internal_display";
+
+    private CommandResult runCvdCommand(Collection<String> commandArgs) throws FileNotFoundException {
+        // TODO: Switch back to using `cvd` after either:
+        //  * Commands under `cvd` can be used with instances launched through `launch_cvd`.
+        //  * ATP launches instances using `cvd start` instead of `launch_cvd`.
+        String cvdBinary = runner.getHostBinaryPath(CVD_DISPLAY_BINARY_BASENAME);
+
+        List<String> fullCommand = new ArrayList<String>(commandArgs);
+        fullCommand.add(0, cvdBinary);
+
+        // Remove the "display" part of the command until switching back to `cvd`.
+        fullCommand.remove(1);
+
+        return runner.run(DEFAULT_TIMEOUT_MS, fullCommand.toArray(new String[0]));
+    }
+
+    private static final String HELPER_APP_APK = "CuttlefishDisplayHotplugHelperApp.apk";
+
+    private static final String HELPER_APP_PKG = "com.android.cuttlefish.displayhotplughelper";
+
+    private static final String HELPER_APP_ACTIVITY = "com.android.cuttlefish.displayhotplughelper/.DisplayHotplugHelperApp";
+
+    private static final String HELPER_APP_UUID_FLAG = "display_hotplug_uuid";
+
+    private static final int HELPER_APP_LOG_CHECK_ATTEMPTS = 5;
+
+    private static final int HELPER_APP_LOG_CHECK_TIMEOUT_MILLISECONDS = 200;
+
+    private static final int CHECK_FOR_UPDATED_GUEST_DISPLAYS_ATTEMPTS = 5;
+
+    private static final int CHECK_FOR_UPDATED_GUEST_DISPLAYS_SLEEP_MILLISECONDS = 500;
+
+    private static final Splitter LOGCAT_NEWLINE_SPLITTER = Splitter.on('\n').trimResults();
+
+    @Before
+    public void setUp() throws Exception {
+        getDevice().uninstallPackage(HELPER_APP_PKG);
+        installPackage(HELPER_APP_APK);
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        getDevice().uninstallPackage(HELPER_APP_PKG);
+    }
+
+    /**
+     * Display information as seen from the host (i.e. from Crosvm via a `cvd display` command).
+     */
+    @AutoValue
+    public static abstract class HostDisplayInfo {
+        static HostDisplayInfo create(int id, int width, int height) {
+            return new AutoValue_CuttlefishDisplayHotplugTest_HostDisplayInfo(id, width, height);
+        }
+
+        abstract int id();
+        abstract int width();
+        abstract int height();
+    }
+
+    /**
+     * Display information as seen from the guest (i.e. from SurfaceFlinger/DisplayManager).
+     */
+    @AutoValue
+    public static abstract class GuestDisplayInfo {
+        static GuestDisplayInfo create(int id, int width, int height) {
+            return new AutoValue_CuttlefishDisplayHotplugTest_GuestDisplayInfo(id, width, height);
+        }
+
+        abstract int id();
+        abstract int width();
+        abstract int height();
+    }
+
+    /**
+     * Expected input JSON format:
+     *
+     *   {
+     *     "displays" : {
+     *       "<display id>": {
+     *         "mode": {
+     *           "windowed": [
+     *             <width>,
+     *             <height>,
+     *           ],
+     *         },
+     *         ...
+     *       },
+     *       ...
+     *     }
+     *   }
+     *
+     */
+    private Map<Integer, HostDisplayInfo> parseHostDisplayInfos(String inputJson) {
+        if (Strings.isNullOrEmpty(inputJson)) {
+            throw new IllegalArgumentException("Null display info json.");
+        }
+
+        Map<Integer, HostDisplayInfo> displayInfos = new HashMap<Integer, HostDisplayInfo>();
+
+        try {
+            JSONObject json = new JSONObject(inputJson);
+            JSONObject jsonDisplays = json.getJSONObject("displays");
+            for (Iterator<String> keyIt = jsonDisplays.keys(); keyIt.hasNext(); ) {
+                String displayNumberString = keyIt.next();
+
+                JSONObject jsonDisplay = jsonDisplays.getJSONObject(displayNumberString);
+                JSONObject jsonDisplayMode = jsonDisplay.getJSONObject("mode");
+                JSONArray jsonDisplayModeWindowed = jsonDisplayMode.getJSONArray("windowed");
+
+                int id = Integer.parseInt(displayNumberString);
+                int w = jsonDisplayModeWindowed.getInt(0);
+                int h = jsonDisplayModeWindowed.getInt(1);
+
+                displayInfos.put(id, HostDisplayInfo.create(id, w, h));
+            }
+        } catch (JSONException e) {
+            throw new IllegalArgumentException("Invalid display info json: " + inputJson, e);
+        }
+
+        return displayInfos;
+    }
+
+
+    /**
+     * Expected input JSON format:
+     *
+     *   {
+     *     "displays" : [
+     *       {
+     *           "id": <id>,
+     *           "name": <name>,
+     *           "width": <width>,
+     *           "height": <height>,
+     *       },
+     *       ...
+     *     ]
+     *   }
+     */
+    private Map<Integer, GuestDisplayInfo> parseGuestDisplayInfos(String inputJson) {
+        if (Strings.isNullOrEmpty(inputJson)) {
+            throw new NullPointerException("Null display info json.");
+        }
+
+        Map<Integer, GuestDisplayInfo> displayInfos = new HashMap<Integer, GuestDisplayInfo>();
+
+        try {
+            JSONObject json = new JSONObject(inputJson);
+            JSONArray jsonDisplays = json.getJSONArray("displays");
+            for (int i = 0; i < jsonDisplays.length(); i++) {
+                JSONObject jsonDisplay = jsonDisplays.getJSONObject(i);
+                int id = jsonDisplay.getInt("id");
+                int w = jsonDisplay.getInt("width");
+                int h = jsonDisplay.getInt("height");
+                displayInfos.put(id, GuestDisplayInfo.create(id, w, h));
+            }
+        } catch (JSONException e) {
+            throw new IllegalArgumentException("Invalid display info json: " + inputJson, e);
+        }
+
+        return displayInfos;
+    }
+
+    private String getDisplayHotplugHelperAppOutput() throws Exception {
+        final String uuid = UUID.randomUUID().toString();
+
+        final Pattern guestDisplayInfoPattern =
+            Pattern.compile(
+                String.format("^.*DisplayHotplugHelper.*%s.* displays: (\\{.*\\})", uuid));
+
+        getDevice().executeShellCommand(
+            String.format("am start -n %s --es %s %s", HELPER_APP_ACTIVITY, HELPER_APP_UUID_FLAG, uuid));
+
+        for (int attempt = 0; attempt < HELPER_APP_LOG_CHECK_ATTEMPTS; attempt++) {
+            String logcat = getDevice().executeAdbCommand("logcat", "-d", "DisplayHotplugHelper:E", "*:S");
+
+            List<String> logcatLines = Lists.newArrayList(LOGCAT_NEWLINE_SPLITTER.split(logcat));
+
+            // Inspect latest first:
+            Collections.reverse(logcatLines);
+
+            for (String logcatLine : logcatLines) {
+                Matcher matcher = guestDisplayInfoPattern.matcher(logcatLine);
+                if (matcher.find()) {
+                    return matcher.group(1);
+                }
+            }
+
+            Thread.sleep(HELPER_APP_LOG_CHECK_TIMEOUT_MILLISECONDS);
+        }
+
+        throw new IllegalStateException("Failed to find display info from helper app using uuid:" + uuid);
+    }
+
+    private Map<Integer, GuestDisplayInfo> getGuestDisplays() throws Exception {
+        return parseGuestDisplayInfos(getDisplayHotplugHelperAppOutput());
+    }
+
+    public Map<Integer, HostDisplayInfo> getHostDisplays() throws FileNotFoundException {
+        CommandResult listDisplaysResult = runCvdCommand(Lists.newArrayList("display", "list"));
+        if (!CommandStatus.SUCCESS.equals(listDisplaysResult.getStatus())) {
+            throw new IllegalStateException(
+                    String.format("Failed to run list displays command:%s\n%s",
+                                  listDisplaysResult.getStdout(),
+                                  listDisplaysResult.getStderr()));
+        }
+        return parseHostDisplayInfos(listDisplaysResult.getStdout());
+    }
+
+    @AutoValue
+    public static abstract class AddDisplayParams {
+        static AddDisplayParams create(int width, int height) {
+            return new AutoValue_CuttlefishDisplayHotplugTest_AddDisplayParams(width, height);
+        }
+
+        abstract int width();
+        abstract int height();
+    }
+
+    /* As supported by `cvd display add` */
+    private static final int MAX_ADD_DISPLAYS = 4;
+
+    public void addDisplays(List<AddDisplayParams> params) throws FileNotFoundException {
+        if (params.size() > MAX_ADD_DISPLAYS) {
+            throw new IllegalArgumentException(
+                "`cvd display add` only supports adding up to " + MAX_ADD_DISPLAYS +
+                " at once but was requested to add " + params.size() + " displays.");
+        }
+
+        List<String> addDisplaysCommand = Lists.newArrayList("display", "add");
+        for (int i = 0; i < params.size(); i++) {
+            AddDisplayParams display = params.get(i);
+
+            addDisplaysCommand.add(String.format(
+                "--display%d=width=%d,height=%d", i, display.width(), display.height()));
+        }
+
+        CommandResult addDisplayResult = runCvdCommand(addDisplaysCommand);
+        if (!CommandStatus.SUCCESS.equals(addDisplayResult.getStatus())) {
+            throw new IllegalStateException(
+                    String.format("Failed to run add display command:%s\n%s",
+                                  addDisplayResult.getStdout(),
+                                  addDisplayResult.getStderr()));
+        }
+    }
+
+    public void addDisplay(int width, int height) throws FileNotFoundException {
+        addDisplays(List.of(AddDisplayParams.create(width, height)));
+    }
+
+    public void removeDisplays(List<Integer> displayIds) throws FileNotFoundException {
+        List<String> removeDisplaysCommand = Lists.newArrayList("display", "remove");
+        for (Integer displayId : displayIds) {
+            removeDisplaysCommand.add(displayId.toString());
+        }
+
+        CommandResult removeDisplayResult = runCvdCommand(removeDisplaysCommand);
+        if (!CommandStatus.SUCCESS.equals(removeDisplayResult.getStatus())) {
+            throw new IllegalStateException(
+                    String.format("Failed to run remove display command:%s\n%s",
+                                  removeDisplayResult.getStdout(),
+                                  removeDisplayResult.getStderr()));
+        }
+    }
+
+    public void removeDisplay(int displayId) throws FileNotFoundException {
+        removeDisplays(List.of(displayId));
+    }
+
+    Correspondence<GuestDisplayInfo, AddDisplayParams> GUEST_DISPLAY_MATCHES =
+        Correspondence.from((GuestDisplayInfo lhs, AddDisplayParams rhs) -> {
+            return lhs.width() == rhs.width() &&
+                   lhs.height() == rhs.height();
+        }, "matches the display info of");
+
+    Correspondence<HostDisplayInfo, AddDisplayParams> HOST_DISPLAY_MATCHES =
+        Correspondence.from((HostDisplayInfo lhs, AddDisplayParams rhs) -> {
+            return lhs.width() == rhs.width() &&
+                   lhs.height() == rhs.height();
+        }, "matches the display info of");
+
+    private void doOneConnectAndDisconnectCycle(List<AddDisplayParams> params) throws Exception {
+        // Check which displays Crosvm is aware of originally.
+        Map<Integer, HostDisplayInfo> originalHostDisplays = getHostDisplays();
+        assertThat(originalHostDisplays).isNotNull();
+        assertThat(originalHostDisplays).isNotEmpty();
+
+        // Check which displays SurfaceFlinger and DisplayManager are aware of originally.
+        Map<Integer, GuestDisplayInfo> originalGuestDisplays = getGuestDisplays();
+        assertThat(originalGuestDisplays).isNotNull();
+        assertThat(originalGuestDisplays).isNotEmpty();
+
+        // Perform the hotplug connect.
+        addDisplays(params);
+
+        // Check that Crosvm is aware of the new display (the added displays should
+        // be visible immediately after the host command completes and this should
+        // not need retries).
+        Map<Integer, HostDisplayInfo> afterAddHostDisplays = getHostDisplays();
+        assertThat(afterAddHostDisplays).isNotNull();
+
+        MapDifference<Integer, HostDisplayInfo> addedHostDisplaysDiff =
+            Maps.difference(afterAddHostDisplays, originalHostDisplays);
+        assertThat(addedHostDisplaysDiff.entriesOnlyOnLeft()).hasSize(params.size());
+        assertThat(addedHostDisplaysDiff.entriesOnlyOnRight()).isEmpty();
+
+        Map<Integer, HostDisplayInfo> addedHostDisplays =
+            addedHostDisplaysDiff.entriesOnlyOnLeft();
+        assertThat(addedHostDisplays.values())
+            .comparingElementsUsing(HOST_DISPLAY_MATCHES)
+            .containsExactlyElementsIn(params);
+
+        // Check that SurfaceFlinger and DisplayManager are aware of the new display.
+        Map<Integer, GuestDisplayInfo> afterAddGuestDisplays = null;
+        for (int attempt = 0; attempt < CHECK_FOR_UPDATED_GUEST_DISPLAYS_ATTEMPTS; attempt++) {
+            // Guest components (HWComposer/SurfaceFlinger/etc) may take some time to process.
+            Thread.sleep(CHECK_FOR_UPDATED_GUEST_DISPLAYS_SLEEP_MILLISECONDS);
+
+            afterAddGuestDisplays = getGuestDisplays();
+            assertThat(afterAddGuestDisplays).isNotNull();
+
+            int expectedNumberOfGuestDisplaysAfterAdd =
+                originalGuestDisplays.size() + params.size();
+
+            int numberOfGuestDisplaysAfterAdd = afterAddGuestDisplays.size();
+            if (numberOfGuestDisplaysAfterAdd == expectedNumberOfGuestDisplaysAfterAdd) {
+                break;
+            }
+
+            CLog.i("Number of guest displays after add command did not yet match expected on " +
+                    "attempt %d (actual:%d vs expected:%d)",
+                    attempt, numberOfGuestDisplaysAfterAdd, expectedNumberOfGuestDisplaysAfterAdd);
+        }
+        MapDifference<Integer, GuestDisplayInfo> addedGuestDisplaysDiff =
+            Maps.difference(afterAddGuestDisplays, originalGuestDisplays);;
+        assertThat(addedGuestDisplaysDiff.entriesOnlyOnLeft()).hasSize(params.size());
+        assertThat(addedGuestDisplaysDiff.entriesOnlyOnRight()).isEmpty();
+
+        Map<Integer, GuestDisplayInfo> addedGuestDisplays =
+            addedGuestDisplaysDiff.entriesOnlyOnLeft();
+        assertThat(addedGuestDisplays.values())
+            .comparingElementsUsing(GUEST_DISPLAY_MATCHES)
+            .containsExactlyElementsIn(params);
+
+        // Perform the hotplug disconnect.
+        List<Integer> addedHostDisplayIds = new ArrayList<Integer>();
+        for (HostDisplayInfo addedHostDisplay : addedHostDisplays.values()) {
+            addedHostDisplayIds.add(addedHostDisplay.id());
+        }
+        removeDisplays(addedHostDisplayIds);
+
+        // Check that Crosvm does not show the removed display (the removed displays
+        // should be visible immediately after the host command completes and this
+        // should not need retries).
+        Map<Integer, HostDisplayInfo> afterRemoveHostDisplays = getHostDisplays();
+        assertThat(afterRemoveHostDisplays).isNotNull();
+
+        MapDifference<Integer, HostDisplayInfo> removedHostDisplaysDiff =
+            Maps.difference(afterRemoveHostDisplays, originalHostDisplays);
+        assertThat(removedHostDisplaysDiff.entriesDiffering()).isEmpty();
+
+        // Check that SurfaceFlinger and DisplayManager do not show the removed display.
+        Map<Integer, GuestDisplayInfo> afterRemoveGuestDisplays = null;
+        for (int attempt = 0; attempt < CHECK_FOR_UPDATED_GUEST_DISPLAYS_ATTEMPTS; attempt++) {
+            // Guest components (HWComposer/SurfaceFlinger/etc) may take some time to process.
+            Thread.sleep(CHECK_FOR_UPDATED_GUEST_DISPLAYS_SLEEP_MILLISECONDS);
+
+            afterRemoveGuestDisplays = getGuestDisplays();
+            assertThat(afterRemoveGuestDisplays).isNotNull();
+
+            int expectedNumberOfGuestDisplaysAfterRemove = originalGuestDisplays.size();
+
+            int numberOfGuestDisplaysAfterRemove = afterRemoveGuestDisplays.size();
+            if (numberOfGuestDisplaysAfterRemove == expectedNumberOfGuestDisplaysAfterRemove) {
+                break;
+            }
+
+            CLog.i("Number of guest displays after remove command did not yet match expected on " +
+                   "attempt %d (actual:%d vs expected:%d)",
+                   attempt, numberOfGuestDisplaysAfterRemove,
+                   expectedNumberOfGuestDisplaysAfterRemove);
+        }
+        MapDifference<Integer, GuestDisplayInfo> removedGuestDisplaysDiff
+            = Maps.difference(afterRemoveGuestDisplays, originalGuestDisplays);
+        assertThat(removedGuestDisplaysDiff.entriesDiffering()).isEmpty();
+    }
+
+    @Test
+    public void testDisplayHotplug() throws Exception {
+        doOneConnectAndDisconnectCycle(
+            List.of(AddDisplayParams.create(600, 500)));
+    }
+
+    @Test
+    public void testDisplayHotplugMultipleDisplays() throws Exception {
+        doOneConnectAndDisconnectCycle(
+            List.of(
+                AddDisplayParams.create(1920, 1080),
+                AddDisplayParams.create(1280, 720)));
+    }
+
+    @AutoValue
+    public static abstract class MemoryInfo {
+        static MemoryInfo create(int usedRam) {
+            return new AutoValue_CuttlefishDisplayHotplugTest_MemoryInfo(usedRam);
+        }
+
+        abstract int usedRamBytes();
+    }
+
+    private static final String GET_USED_RAM_COMMAND = "dumpsys meminfo";
+
+    private static final Pattern USED_RAM_PATTERN = Pattern.compile("Used RAM: (.*?)K \\(");
+
+    private MemoryInfo getMemoryInfo() throws Exception {
+        ITestDevice device = getDevice();
+
+        CommandResult getUsedRamResult = device.executeShellV2Command(GET_USED_RAM_COMMAND);
+        if (!CommandStatus.SUCCESS.equals(getUsedRamResult.getStatus())) {
+            throw new IllegalStateException(
+                    String.format("Failed to run |%s|: stdout: %s\n stderr: %s",
+                                  GET_USED_RAM_COMMAND,
+                                  getUsedRamResult.getStdout(),
+                                  getUsedRamResult.getStderr()));
+        }
+        // Ex:
+        //    ...
+        //    GPU:              0K (        0K dmabuf +         0K private)
+        //    Used RAM: 1,155,524K (  870,488K used pss +   285,036K kernel)
+        //    Lost RAM:    59,469K
+        //    ...
+        String usedRamString = getUsedRamResult.getStdout();
+        Matcher m = USED_RAM_PATTERN.matcher(usedRamString);
+        if (!m.find()) {
+            throw new IllegalStateException(
+                     String.format("Failed to parse 'Used RAM' from stdout:\n%s",
+                                   getUsedRamResult.getStdout()));
+        }
+        // Ex: "1,228,768"
+        usedRamString = m.group(1);
+        usedRamString = usedRamString.replaceAll(",", "");
+        int usedRam = Integer.parseInt(usedRamString) * 1000;
+
+        return MemoryInfo.create(usedRam);
+    }
+
+    private static final int MAX_ALLOWED_RAM_BYTES_DIFF = 32 * 1024 * 1024;
+
+    private void doCheckForLeaks(MemoryInfo base) throws Exception {
+        MemoryInfo current = getMemoryInfo();
+
+        assertThat(current.usedRamBytes()).isIn(
+                Range.closed(base.usedRamBytes() - MAX_ALLOWED_RAM_BYTES_DIFF,
+                             base.usedRamBytes() + MAX_ALLOWED_RAM_BYTES_DIFF));
+    }
+
+    @Test
+    @LargeTest
+    public void testDisplayHotplugDoesNotLeakMemory() throws Exception {
+        List<AddDisplayParams> toAdd = List.of(AddDisplayParams.create(600, 500));
+
+        // Warm up to potentially reach any steady state memory usage.
+        for (int i = 0; i < 50; i++) {
+            doOneConnectAndDisconnectCycle(toAdd);
+        }
+
+        MemoryInfo original = getMemoryInfo();
+        for (int i = 0; i <= 500; i++) {
+            doOneConnectAndDisconnectCycle(toAdd);
+
+            if (i % 100 == 0) {
+                doCheckForLeaks(original);
+            }
+        }
+    }
+}
diff --git a/tests/hal/Android.bp b/tests/hal/Android.bp
index 5aa14ce..1a63d6e 100644
--- a/tests/hal/Android.bp
+++ b/tests/hal/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -12,7 +27,17 @@
     ],
     shared_libs: [
         "libbase",
+        "libbinder",
         "libvintf",
+        "libutils",
+        "packagemanager_aidl-cpp",
     ],
-    test_suites: ["device-tests"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    test_suites: [
+        "device-tests",
+        "automotive-tests",
+    ],
 }
diff --git a/tests/hal/OWNERS b/tests/hal/OWNERS
index 40164aa..9d308e5 100644
--- a/tests/hal/OWNERS
+++ b/tests/hal/OWNERS
@@ -1 +1,2 @@
 smoreland@google.com
+devinmoore@google.com
diff --git a/tests/hal/hal_implementation_test.cpp b/tests/hal/hal_implementation_test.cpp
index eaefd87..55926c6 100644
--- a/tests/hal/hal_implementation_test.cpp
+++ b/tests/hal/hal_implementation_test.cpp
@@ -15,7 +15,10 @@
  */
 #include <aidl/metadata.h>
 #include <android-base/logging.h>
+#include <android-base/properties.h>
 #include <android-base/strings.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/IServiceManager.h>
 #include <gtest/gtest.h>
 #include <hidl-util/FQName.h>
 #include <hidl/metadata.h>
@@ -24,14 +27,20 @@
 using namespace android;
 
 // clang-format off
+static const std::set<std::string> kAutomotiveOnlyHidl = {
+    "android.hardware.automotive.evs@1.1",
+};
+
 static const std::set<std::string> kKnownMissingHidl = {
+    "android.frameworks.automotive.display@1.0", // converted to AIDL, see b/170401743
     "android.frameworks.cameraservice.device@2.1",
+    "android.frameworks.cameraservice.service@2.2", // converted to AIDL, see b/205764761
     "android.frameworks.displayservice@1.0", // deprecated, see b/141930622
     "android.frameworks.schedulerservice@1.0", // deprecated, see b/37226359
     "android.frameworks.vr.composer@1.0",
     "android.frameworks.vr.composer@2.0",
-    "android.frameworks.automotive.display@1.0",
     "android.frameworks.stats@1.0",  // converted to AIDL, see b/177667419
+    "android.hardware.atrace@1.0", // deprecated, see b/204935495
     "android.hardware.audio@2.0",
     "android.hardware.audio@4.0",
     "android.hardware.audio@5.0",
@@ -40,24 +49,30 @@
     "android.hardware.audio.effect@4.0",
     "android.hardware.audio.effect@5.0",
     "android.hardware.audio.effect@6.0",
+    "android.hardware.authsecret@1.0", // converted to AIDL, see b/182976659
     "android.hardware.automotive.audiocontrol@1.0",
     "android.hardware.automotive.audiocontrol@2.0",
-    "android.hardware.automotive.can@1.0",
-    "android.hardware.automotive.evs@1.1",
+    "android.hardware.automotive.can@1.0",  // converted to AIDL, see b/170405615
     "android.hardware.automotive.sv@1.0",
     "android.hardware.automotive.vehicle@2.0",
-    "android.hardware.biometrics.fingerprint@2.3",
+    "android.hardware.biometrics.fingerprint@2.3", // converted to AIDL, see b/152416783
+    "android.hardware.biometrics.face@1.0", // converted to AIDL, see b/168730443
     "android.hardware.bluetooth.a2dp@1.0",
     "android.hardware.bluetooth.audio@2.1", // converted to AIDL, see b/203490261
+    "android.hardware.bluetooth@1.1", // converted to AIDL, see b/205758693
+    "android.hardware.boot@1.2", // converted to AIDL, see b/227536004
     "android.hardware.broadcastradio@1.1",
     "android.hardware.broadcastradio@2.0",
     "android.hardware.camera.provider@2.7", // Camera converted to AIDL, b/196432585
+    "android.hardware.cas@1.2", // converted to AIDL, see b/227673974
     "android.hardware.cas.native@1.0",
     "android.hardware.configstore@1.1", // deprecated, see b/149050985, b/149050733
+    "android.hardware.confirmationui@1.0", // converted to AIDL, see b/205760172
     "android.hardware.contexthub@1.2",
     "android.hardware.drm@1.4", // converted to AIDL, b/200055138
     "android.hardware.fastboot@1.1",
     "android.hardware.dumpstate@1.1", // deprecated, see b/205760700
+    "android.hardware.gatekeeper@1.0", // converted to AIDL, b/205760843
     "android.hardware.gnss@1.1", // GNSS converted to AIDL, b/206670536
     "android.hardware.gnss@2.1", // GNSS converted to AIDL, b/206670536
     "android.hardware.gnss.measurement_corrections@1.1", // is sub-interface of gnss
@@ -75,11 +90,13 @@
     "android.hardware.input.classifier@1.0", // converted to AIDL, see b/205761620
     "android.hardware.ir@1.0", // converted to AIDL, see b/205000342
     "android.hardware.keymaster@3.0",
-    "android.hardware.keymaster@4.1", // Replaced by KeyMint
+    "android.hardware.keymaster@4.1", // Replaced by AIDL KeyMint, see b/111446262
     "android.hardware.light@2.0",
     "android.hardware.media.bufferpool@1.0",
     "android.hardware.media.bufferpool@2.0",
+    "android.hardware.media.omx@1.0", // deprecated b/205761766
     "android.hardware.memtrack@1.0",
+    "android.hardware.neuralnetworks@1.3", // converted to AIDL, see b/161428342
     "android.hardware.nfc@1.2",
     "android.hardware.oemlock@1.0",
     "android.hardware.power@1.3",
@@ -95,6 +112,7 @@
     "android.hardware.tetheroffload.config@1.0",
     "android.hardware.tetheroffload.control@1.1", // see b/170699770
     "android.hardware.thermal@1.1",
+    "android.hardware.thermal@2.0", // Converted to AIDL (see b/205762943)
     "android.hardware.tv.cec@1.1",
     "android.hardware.tv.input@1.0",
     "android.hardware.tv.tuner@1.1",
@@ -103,92 +121,126 @@
     "android.hardware.vibrator@1.3",
     "android.hardware.vr@1.0",
     "android.hardware.weaver@1.0",
-    "android.hardware.wifi.hostapd@1.3",
-    "android.hardware.wifi.supplicant@1.4",
-    "android.hardware.wifi.offload@1.0",
+    "android.hardware.wifi@1.6", // Converted to AIDL (see b/205044134)
+    "android.hardware.wifi.hostapd@1.3", // Converted to AIDL (see b/194806512)
+    "android.hardware.wifi.supplicant@1.4", // Converted to AIDL (see b/196235436)
     "android.hidl.base@1.0",
     "android.hidl.memory.token@1.0",
+    "android.system.net.netd@1.1", // Converted to AIDL (see b/205764585)
     "android.system.suspend@1.0", // Converted to AIDL (see b/170260236)
+    "android.system.wifi.keystore@1.0", // Converted to AIDL (see b/205764502)
 };
 // clang-format on
 
 struct VersionedAidlPackage {
   std::string name;
   size_t version;
+  int bugNum;
   bool operator<(const VersionedAidlPackage& rhs) const {
     return (name < rhs.name || (name == rhs.name && version < rhs.version));
   }
 };
 
-static const std::set<VersionedAidlPackage> kKnownMissingAidl = {
+static const std::set<std::string> kPhoneOnlyAidl = {
+    "android.hardware.camera.provider",
+};
+
+static const std::set<std::string> kAutomotiveOnlyAidl = {
+    /**
+     * These types are only used in Android Automotive, so don't expect them
+     * on phones.
+     */
+    "android.automotive.watchdog",
+    "android.frameworks.automotive.display",
+    "android.frameworks.automotive.powerpolicy.internal",
+    "android.frameworks.automotive.telemetry",
+    "android.hardware.automotive.audiocontrol",
+    "android.hardware.automotive.can",
+    "android.hardware.broadcastradio",
+    "android.hardware.automotive.occupant_awareness",
+    "android.hardware.automotive.remoteaccess",
+    "android.hardware.automotive.vehicle",
+};
+
+static const std::set<std::string> kTvOnlyAidl = {
+    /**
+     * These types are only used in Android TV, so don't expect them on other
+     * devices.
+     * TODO(b/266868403) This test should run on TV devices to enforce the same
+     * requirements
+     */
+    "android.hardware.tv.hdmi.cec",        "android.hardware.tv.hdmi.earc",
+    "android.hardware.tv.hdmi.connection", "android.hardware.tv.tuner",
+    "android.hardware.tv.input",
+};
+
+static const std::set<std::string> kRadioOnlyAidl = {
+    // Not all devices have radio capabilities
+    "android.hardware.radio.config",    "android.hardware.radio.data",
+    "android.hardware.radio.messaging", "android.hardware.radio.modem",
+    "android.hardware.radio.network",   "android.hardware.radio.sap",
+    "android.hardware.radio.sim",       "android.hardware.radio.voice",
+};
+
+/*
+ * Always missing AIDL packages that are not served on Cuttlefish.
+ * These are typically types-only packages.
+ */
+static const std::set<std::string> kAlwaysMissingAidl = {
+    // types-only packages, which never expect a default implementation
+    "android.frameworks.cameraservice.common",
+    "android.frameworks.cameraservice.device",
+    "android.hardware.audio.common",
+    "android.hardware.audio.core.sounddose",
+    "android.hardware.biometrics.common",
+    "android.hardware.camera.common",
+    "android.hardware.camera.device",
+    "android.hardware.camera.metadata",
+    "android.hardware.common",
+    "android.hardware.common.fmq",
+    "android.hardware.graphics.common",
+    "android.hardware.input.common",
+    "android.media.audio.common.types",
+    "android.hardware.radio",
+    "android.hardware.uwb.fira_android",
+    "android.hardware.keymaster",
+    "android.hardware.automotive.vehicle.property",
+    // not on Cuttlefish since it's needed only on systems using HIDL audio HAL
+    "android.hardware.audio.sounddose",
+
+    // android.hardware.media.bufferpool2 is a HAL-less interface.
+    // It could be used for buffer recycling and caching by using the interface.
+    "android.hardware.media.bufferpool2",
+
+    /**
+     * No implementation on cuttlefish for fastboot AIDL hal because it doesn't
+     * run during normal boot, only in recovery/fastboot mode.
+     */
+    "android.hardware.fastboot",
+};
+
+/*
+ * These packages should have implementations but currently do not.
+ * These must be accompanied by a bug and expected to be here temporarily.
+ */
+static const std::vector<VersionedAidlPackage> kKnownMissingAidl = {
     // Cuttlefish Identity Credential HAL implementation is currently
     // stuck at version 3 while RKP support is being added. Will be
     // updated soon.
-    {"android.hardware.identity.", 4},
+    {"android.hardware.identity.", 4, 266869317},
+    {"android.hardware.identity.", 5, 266869317},
 
-    // types-only packages, which never expect a default implementation
-    {"android.hardware.audio.common.", 1},
-    {"android.hardware.biometrics.common.", 1},
-    {"android.hardware.biometrics.common.", 2},
-    {"android.hardware.common.", 1},
-    {"android.hardware.common.", 2},
-    {"android.hardware.common.fmq.", 1},
+    {"android.se.omapi.", 1, 266870904},
+    {"android.hardware.secure_element.", 1, 123254068},
+    {"android.hardware.soundtrigger3.", 1, 266941225},
+    {"android.media.soundtrigger.", 1, 266941225},
+    {"android.hardware.weaver.", 2, 262418065},
 
-    {"android.hardware.graphics.common.", 1},
-    {"android.hardware.graphics.common.", 2},
-    {"android.hardware.graphics.common.", 3},
-    {"android.hardware.input.common.", 1},
-
-    // android.hardware.camera.device is an interface returned by
-    // android.hardware.camera.provider.
-    // android.hardware.camera.common and android.hardware.camera.metadata are
-    // types used by android.hardware.camera.provider and
-    // android.hardware.camera.device.
-    {"android.hardware.camera.common.", 1},
-    {"android.hardware.camera.device.", 1},
-    {"android.hardware.camera.metadata.", 1},
-
-    // No implementations on cuttlefish for omapi aidl hal
-    {"android.se.omapi.", 1},
-
-    // These KeyMaster types are in an AIDL types-only HAL because they're used
-    // by the Identity Credential AIDL HAL. Remove this when fully porting
-    // KeyMaster to AIDL.
-    {"android.hardware.keymaster.", 1},
-    {"android.hardware.keymaster.", 2},
-    {"android.hardware.keymaster.", 3},
-
-    // Sound trigger doesn't have a default implementation.
-    {"android.hardware.soundtrigger3.", 1},
-    {"android.media.soundtrigger.", 1},
-    {"android.media.audio.common.", 1},
-
-    // These types are only used in Automotive.
-    {"android.automotive.computepipe.registry.", 1},
-    {"android.automotive.computepipe.runner.", 1},
-    {"android.automotive.watchdog.", 2},
-    {"android.automotive.watchdog.", 3},
-    {"android.frameworks.automotive.display.", 1},
-    {"android.frameworks.automotive.powerpolicy.", 1},
-    {"android.frameworks.automotive.powerpolicy.internal.", 1},
-    {"android.frameworks.automotive.telemetry.", 1},
-    {"android.hardware.automotive.audiocontrol.", 1},
-    {"android.hardware.automotive.audiocontrol.", 2},
-    {"android.hardware.automotive.evs.", 1},
-    {"android.hardware.automotive.occupant_awareness.", 1},
-    {"android.hardware.automotive.vehicle.", 1},
-
-    // These types are only used in TV.
-    {"android.hardware.tv.tuner.", 1},
-
-    // types-only packages, which never expect a default implementation
-    {"android.hardware.radio.", 1},
-
-    // types-only packages, which never expect a default implementation
-    {"android.hardware.uwb.fira_android.", 1},
-};
-
-static const std::set<VersionedAidlPackage> kComingSoonAidl = {
+    {"android.automotive.computepipe.registry.", 2, 273549907},
+    {"android.automotive.computepipe.runner.", 2, 273549907},
+    {"android.frameworks.automotive.powerpolicy.", 2, 274160980},
+    {"android.hardware.automotive.evs.", 2, 274162534},
+    {"android.hardware.automotive.ivn.", 1, 274139217},
 };
 
 // AOSP packages which are never considered
@@ -207,6 +259,13 @@
   return true;
 }
 
+// android.hardware.foo.IFoo -> android.hardware.foo.
+std::string getAidlPackage(const std::string& aidlType) {
+  size_t lastDot = aidlType.rfind('.');
+  CHECK(lastDot != std::string::npos);
+  return aidlType.substr(0, lastDot + 1);
+}
+
 static bool isAospHidlInterface(const FQName& name) {
   static const std::vector<std::string> kAospPackages = {
       "android.hidl",
@@ -238,7 +297,9 @@
     if (i.format() != vintf::HalFormat::HIDL) {
       return true;  // continue
     }
-    ret.insert(i.getFqInstance().getFqName());
+    FQName fqName;
+    CHECK(fqName.setTo(i.getFqInstance().getFqNameString()));
+    ret.insert(fqName);
     return true;  // continue
   };
   vintf::VintfObject::GetDeviceHalManifest()->forEachInstance(setInserter);
@@ -252,13 +313,126 @@
          !base::StartsWith(name, "android.aidl.tests");
 }
 
-static std::set<VersionedAidlPackage> allAidlManifestInterfaces() {
-  std::set<VersionedAidlPackage> ret;
+enum class DeviceType {
+  UNKNOWN,
+  AUTOMOTIVE,
+  TV,
+  WATCH,
+  PHONE,
+};
+
+static DeviceType getDeviceType() {
+  static DeviceType type = DeviceType::UNKNOWN;
+  if (type != DeviceType::UNKNOWN) return type;
+
+  sp<IBinder> binder =
+      defaultServiceManager()->waitForService(String16("package_native"));
+  sp<content::pm::IPackageManagerNative> packageManager =
+      interface_cast<content::pm::IPackageManagerNative>(binder);
+  CHECK(packageManager != nullptr);
+
+  bool hasFeature = false;
+  // PackageManager.FEATURE_AUTOMOTIVE
+  CHECK(packageManager
+            ->hasSystemFeature(String16("android.hardware.type.automotive"), 0,
+                               &hasFeature)
+            .isOk());
+  if (hasFeature) return DeviceType::AUTOMOTIVE;
+
+  // PackageManager.FEATURE_LEANBACK
+  CHECK(packageManager
+            ->hasSystemFeature(String16("android.software.leanback"), 0,
+                               &hasFeature)
+            .isOk());
+  if (hasFeature) return DeviceType::TV;
+
+  // PackageManager.FEATURE_WATCH
+  CHECK(packageManager
+            ->hasSystemFeature(String16("android.hardware.type.watch"), 0,
+                               &hasFeature)
+            .isOk());
+  if (hasFeature) return DeviceType::WATCH;
+
+  return DeviceType::PHONE;
+}
+
+static std::set<std::string> getMissingHidl() {
+  static std::once_flag unionFlag;
+  static std::set<std::string> missingHidl = kKnownMissingHidl;
+
+  std::call_once(unionFlag, [&]() {
+    const DeviceType type = getDeviceType();
+    switch (type) {
+      case DeviceType::AUTOMOTIVE:
+        LOG(INFO) << "Determined this is an Automotive device";
+        break;
+      case DeviceType::TV:
+        missingHidl.insert(kAutomotiveOnlyHidl.begin(),
+                           kAutomotiveOnlyHidl.end());
+        LOG(INFO) << "Determined this is a TV device";
+        break;
+      case DeviceType::WATCH:
+        missingHidl.insert(kAutomotiveOnlyHidl.begin(),
+                           kAutomotiveOnlyHidl.end());
+        LOG(INFO) << "Determined this is a Wear device";
+        break;
+      case DeviceType::PHONE:
+        missingHidl.insert(kAutomotiveOnlyHidl.begin(),
+                           kAutomotiveOnlyHidl.end());
+        LOG(INFO) << "Determined this is a Phone device";
+        break;
+      case DeviceType::UNKNOWN:
+        CHECK(false) << "getDeviceType return UNKNOWN type.";
+        break;
+    }
+  });
+
+  return missingHidl;
+}
+
+static bool isMissingAidl(const std::string& packageName) {
+  static std::once_flag unionFlag;
+  static std::set<std::string> missingAidl = kAlwaysMissingAidl;
+
+  std::call_once(unionFlag, [&]() {
+    const DeviceType type = getDeviceType();
+    switch (type) {
+      case DeviceType::AUTOMOTIVE:
+        missingAidl.insert(kPhoneOnlyAidl.begin(), kPhoneOnlyAidl.end());
+        missingAidl.insert(kTvOnlyAidl.begin(), kTvOnlyAidl.end());
+        break;
+      case DeviceType::TV:
+        missingAidl.insert(kAutomotiveOnlyAidl.begin(),
+                           kAutomotiveOnlyAidl.end());
+        missingAidl.insert(kRadioOnlyAidl.begin(), kRadioOnlyAidl.end());
+        break;
+      case DeviceType::WATCH:
+        missingAidl.insert(kAutomotiveOnlyAidl.begin(),
+                           kAutomotiveOnlyAidl.end());
+        missingAidl.insert(kPhoneOnlyAidl.begin(), kPhoneOnlyAidl.end());
+        missingAidl.insert(kTvOnlyAidl.begin(), kTvOnlyAidl.end());
+        break;
+      case DeviceType::PHONE:
+        missingAidl.insert(kAutomotiveOnlyAidl.begin(),
+                           kAutomotiveOnlyAidl.end());
+        missingAidl.insert(kTvOnlyAidl.begin(), kTvOnlyAidl.end());
+        break;
+      case DeviceType::UNKNOWN:
+        CHECK(false) << "getDeviceType return UNKNOWN type.";
+        break;
+    }
+  });
+
+  return missingAidl.find(packageName) != missingAidl.end();
+}
+
+static std::vector<VersionedAidlPackage> allAidlManifestInterfaces() {
+  std::vector<VersionedAidlPackage> ret;
   auto setInserter = [&](const vintf::ManifestInstance& i) -> bool {
     if (i.format() != vintf::HalFormat::AIDL) {
       return true;  // continue
     }
-    ret.insert({i.package() + "." + i.interface(), i.version().minorVer});
+    ret.push_back({i.package() + "." + i.interface(), i.version().minorVer, 0});
     return true;  // continue
   };
   vintf::VintfObject::GetDeviceHalManifest()->forEachInstance(setInserter);
@@ -289,7 +463,7 @@
   // we'll be removing items from this which we know are missing
   // in order to be left with those elements which we thought we
   // knew were missing but are actually present
-  std::set<std::string> thoughtMissing = kKnownMissingHidl;
+  std::set<std::string> thoughtMissing = getMissingHidl();
 
   for (const FQName& f : allHidlManifestInterfaces()) {
     if (thoughtMissing.erase(f.getPackageAndVersion().string()) > 0) {
@@ -337,27 +511,20 @@
   }
 }
 
-// android.hardware.foo.IFoo -> android.hardware.foo.
-std::string getAidlPackage(const std::string& aidlType) {
-  size_t lastDot = aidlType.rfind('.');
-  CHECK(lastDot != std::string::npos);
-  return aidlType.substr(0, lastDot + 1);
-}
-
 struct AidlPackageCheck {
   bool hasRegistration;
   bool knownMissing;
 };
 
 TEST(Hal, AidlInterfacesImplemented) {
-  std::set<VersionedAidlPackage> manifest = allAidlManifestInterfaces();
-  std::set<VersionedAidlPackage> thoughtMissing = kKnownMissingAidl;
-  std::set<VersionedAidlPackage> comingSoon = kComingSoonAidl;
+  std::vector<VersionedAidlPackage> manifest = allAidlManifestInterfaces();
+  std::vector<VersionedAidlPackage> thoughtMissing = kKnownMissingAidl;
 
   for (const auto& treePackage : AidlInterfaceMetadata::all()) {
     ASSERT_FALSE(treePackage.types.empty()) << treePackage.name;
     if (std::none_of(treePackage.types.begin(), treePackage.types.end(),
-                     isAospAidlInterface))
+                     isAospAidlInterface) ||
+        isMissingAidl(treePackage.name))
       continue;
     if (treePackage.stability != "vintf") continue;
 
@@ -382,21 +549,35 @@
     bool latestRegistered = false;
     for (const std::string& type : treePackage.types) {
       for (auto& [version, check] : expectedVersions) {
-        if (manifest.erase({type, version}) > 0) {
+        auto it = std::remove_if(
+            manifest.begin(), manifest.end(),
+            [&type, &ver = version](const VersionedAidlPackage& package) {
+              return package.name == type && package.version == ver;
+            });
+        if (it != manifest.end()) {
+          manifest.erase(it, manifest.end());
           if (version == expectedVersions.rbegin()->first) {
             latestRegistered = true;
           }
           check.hasRegistration = true;
         }
-        if (thoughtMissing.erase({getAidlPackage(type), version}) > 0)
+        it = std::remove_if(
+            thoughtMissing.begin(), thoughtMissing.end(),
+            [&type, &ver = version](const VersionedAidlPackage& package) {
+              return package.name == getAidlPackage(type) &&
+                     package.version == ver;
+            });
+        if (it != thoughtMissing.end()) {
+          thoughtMissing.erase(it, thoughtMissing.end());
           check.knownMissing = true;
+        }
       }
     }
 
     if (!latestRegistered && !expectedVersions.rbegin()->second.knownMissing) {
       ADD_FAILURE() << "The latest version ("
                     << expectedVersions.rbegin()->first
-                    << ") of the package is not implemented: "
+                    << ") of the module is not implemented: "
                     << treePackage.name
                     << " which declares the following types:\n    "
                     << base::Join(treePackage.types, "\n    ");
@@ -417,11 +598,8 @@
   }
 
   for (const auto& package : thoughtMissing) {
-    // TODO: b/194806512 : Remove after Wifi hostapd AIDL interface lands on aosp
-    if (comingSoon.erase(package) == 0) {
-      ADD_FAILURE() << "Interface in missing list and cannot find it anywhere: "
-                    << package.name << " V" << package.version;
-    }
+    ADD_FAILURE() << "Interface in missing list and cannot find it anywhere: "
+                  << package.name << " V" << package.version;
   }
 
   for (const auto& package : manifest) {
diff --git a/tests/powerwash/src/com/android/cuttlefish/tests/PowerwashTest.java b/tests/powerwash/src/com/android/cuttlefish/tests/PowerwashTest.java
index 42d9dda..50993da 100644
--- a/tests/powerwash/src/com/android/cuttlefish/tests/PowerwashTest.java
+++ b/tests/powerwash/src/com/android/cuttlefish/tests/PowerwashTest.java
@@ -17,12 +17,18 @@
 
 import static org.junit.Assert.assertTrue;
 
+import com.android.tradefed.config.Option;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.RemoteAndroidDevice;
 import com.android.tradefed.device.cloud.RemoteAndroidVirtualDevice;
 import com.android.tradefed.device.internal.DeviceResetHandler;
+import com.android.tradefed.log.LogUtil.CLog;
 import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
 import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test;
+import com.android.tradefed.util.CommandStatus;
 
 import org.junit.Assert;
+import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
@@ -39,29 +45,73 @@
 @RunWith(DeviceJUnit4ClassRunner.class)
 public class PowerwashTest extends BaseHostJUnit4Test {
 
+    @Option(
+            name = "test-count",
+            description = "Number of times to powerwash the device and reboot.")
+    private int mTestCount = 1;
+
+    @Option(
+            name = "force-remote-avd-type",
+            description = "Force to check device type of RemoteAndroidVirtualDevice.")
+    private boolean mForceRemoteAvdType = false;
+
+    private String mCuttlefishHostUser = null;
+    private Integer mCuttlefishDeviceNumOffset = null;
+
     @Test
     public void testPowerwash() throws Exception {
+        if (mForceRemoteAvdType) {
+            Assume.assumeTrue(
+                    "`force-remote-avd-type` is set to true, so device type must be"
+                        + " RemoteAndroidVirtualDevice to run the test. Set the option to false to"
+                        + " reset the device through Reset feature server.",
+                    getDevice() instanceof RemoteAndroidVirtualDevice);
+        }
         // Create a file in tmp directory
         final String tmpFile = "/data/local/tmp/powerwash_tmp";
         getDevice().executeShellCommand("touch " + tmpFile);
 
         // Reboot the device to make sure the file persits.
         getDevice().reboot();
-        getDevice().waitForDeviceAvailable();
         File file = getDevice().pullFile(tmpFile);
         if (file == null) {
             Assert.fail("Setup failed: tmp file failed to persist after device reboot.");
         }
-        boolean success = false;
-        if (getDevice() instanceof RemoteAndroidVirtualDevice) {
-            success = ((RemoteAndroidVirtualDevice) getDevice()).powerwashGce();
-        } else {
-            // We don't usually expect tests to use our feature server, but in this case we are
-            // validating the feature itself so it's fine
-            DeviceResetHandler handler = new DeviceResetHandler(getInvocationContext());
-            success = handler.resetDevice(getDevice());
+
+        long startAllRuns = System.currentTimeMillis();
+        for (int i = 0; i < mTestCount; i++) {
+            CLog.d("Powerwash attempt #%d", i);
+            long start = System.currentTimeMillis();
+            boolean success = false;
+            if (getDevice() instanceof RemoteAndroidVirtualDevice) {
+                mCuttlefishHostUser = ((RemoteAndroidVirtualDevice) getDevice()).getInitialUser();
+                mCuttlefishDeviceNumOffset = ((RemoteAndroidVirtualDevice) getDevice())
+                        .getInitialDeviceNumOffset();
+                if (mCuttlefishDeviceNumOffset != null && mCuttlefishHostUser != null) {
+                    success = ((RemoteAndroidVirtualDevice) getDevice())
+                            .powerwashGce(mCuttlefishHostUser, mCuttlefishDeviceNumOffset)
+                            .getStatus().equals(CommandStatus.SUCCESS);
+                } else {
+                    success = ((RemoteAndroidVirtualDevice) getDevice())
+                            .powerwash().getStatus().equals(CommandStatus.SUCCESS);
+                }
+            } else {
+                // We don't usually expect tests to use our feature server, but in this case we are
+                // validating the feature itself so it's fine
+                DeviceResetHandler handler = new DeviceResetHandler(getInvocationContext());
+                try {
+                    success = handler.resetDevice(getDevice());
+                } catch (DeviceNotAvailableException e) {
+                    CLog.e(e);
+                }
+            }
+            assertTrue(String.format("Powerwash reset failed during attemt #%d", i), success);
+            long duration = System.currentTimeMillis() - start;
+            CLog.d("Powerwash took %dms to finish", duration);
         }
-        assertTrue("Powerwash reset failed", success);
+        CLog.d(
+                "%d powerwash runs finished successfully, with average time of %dms",
+                mTestCount, (System.currentTimeMillis() - startAllRuns) / mTestCount);
 
         // Verify that the device is back online and pre-existing file is gone.
         file = getDevice().pullFile(tmpFile);
diff --git a/tests/ril/AndroidManifest.xml b/tests/ril/AndroidManifest.xml
index 4872109..4d0b661 100644
--- a/tests/ril/AndroidManifest.xml
+++ b/tests/ril/AndroidManifest.xml
@@ -20,14 +20,6 @@
     android:versionName="1.0"
     android:sharedUserId="android.uid.system">
 
-    <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
-    <uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
-    <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
-
-   <uses-sdk
-        android:minSdkVersion="19"
-        android:targetSdkVersion="25" />
-
     <instrumentation
         android:name="androidx.test.runner.AndroidJUnitRunner"
         android:targetPackage="com.android.cuttlefish.ril.tests" />
diff --git a/tests/ril/runtests.sh b/tests/ril/runtests.sh
index 2bb4bac..6347be7 100755
--- a/tests/ril/runtests.sh
+++ b/tests/ril/runtests.sh
@@ -1,5 +1,19 @@
 #!/usr/bin/env bash
 
+# Copyright 2018 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 if [[ "$1" == "--help" ]]; then
   cat <<END
 Usage for $0
diff --git a/tests/ril/src/com/android/cuttlefish/ril/tests/RilE2eTests.java b/tests/ril/src/com/android/cuttlefish/ril/tests/RilE2eTests.java
index d633991..10df99a 100644
--- a/tests/ril/src/com/android/cuttlefish/ril/tests/RilE2eTests.java
+++ b/tests/ril/src/com/android/cuttlefish/ril/tests/RilE2eTests.java
@@ -20,18 +20,21 @@
 import android.content.Context;
 import android.net.ConnectivityManager;
 import android.net.Network;
-import android.net.NetworkInfo;
+import android.net.NetworkCapabilities;
 import android.net.wifi.WifiManager;
 import android.os.Build;
+import android.telephony.CellInfo;
 import android.telephony.CellInfoLte;
 import android.telephony.CellSignalStrengthLte;
 import android.telephony.TelephonyManager;
 import android.util.Log;
 
-import androidx.test.InstrumentationRegistry;
+import androidx.annotation.NonNull;
+import androidx.test.core.app.ApplicationProvider;
 
 import com.android.compatibility.common.util.PropertyUtil;
 
+import org.hamcrest.MatcherAssert;
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.Before;
@@ -39,7 +42,12 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
+import java.net.InetSocketAddress;
 import java.net.Socket;
+import java.net.SocketTimeoutException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
 
 /**
  * Tests used to validate E2E RIL functionality.
@@ -62,7 +70,7 @@
                 "Skip testing deprecated radio HAL from Q or earlier vendor",
                 PropertyUtil.getFirstApiLevel() <= Build.VERSION_CODES.Q);
 
-        mContext = InstrumentationRegistry.getInstrumentation().getContext();
+        mContext = ApplicationProvider.getApplicationContext();
         mWifiManager = (WifiManager)mContext.getSystemService(Context.WIFI_SERVICE);
         mConnManager = (ConnectivityManager)mContext.getSystemService(Context.CONNECTIVITY_SERVICE);
         mTeleManager = (TelephonyManager)mContext.getSystemService(Context.TELEPHONY_SERVICE);
@@ -72,7 +80,7 @@
         disableWifi();
     }
 
-
+    @SuppressWarnings("deprecation") // setWifiEnabled not deprecated for system uid
     private void disableWifi() throws Exception {
         Log.i(TAG, "Disabling WIFI...");
 
@@ -80,9 +88,7 @@
         int count = MAX_POLL_DISABLED_WIFI_COUNT;
         while (mWifiManager.isWifiEnabled() && count-- > 0) {
             Log.i(TAG, "Waiting for WIFI to be disabled...");
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
+            Thread.sleep(1000);
         }
         if (count < 0) {
             Log.e(TAG, "Reached max number of polls while waiting to disable wifi");
@@ -93,19 +99,23 @@
 
     /**
      * Verify that RIL stack is able to get up and connect to network in
-     * 60 seconds.
+     * 20 seconds.
      */
-    @Test(timeout = 10 * 1000)
+    @Test(timeout = 20 * 1000)
     public void testRilConnects() throws Exception {
         while (true) {
-            NetworkInfo net = mConnManager.getActiveNetworkInfo();
-            if (net != null && net.getType() == ConnectivityManager.TYPE_MOBILE) break;
+            Network nw = mConnManager.getActiveNetwork();
+            if (nw == null) {
+                continue;
+            }
+            NetworkCapabilities cap = mConnManager.getNetworkCapabilities(nw);
+            if (cap != null && cap.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR)) {
+                break;
+            }
 
             Log.i(TAG, "Waiting for MOBILE to become primary network for DATA.");
 
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
+            Thread.sleep(1000);
         }
 
         // Bind process to MOBILE network. This should allow us to verify network is functional.
@@ -113,9 +123,18 @@
         Assert.assertNotNull(net);
         Assert.assertTrue(mConnManager.bindProcessToNetwork(net));
 
-        // Open connection to google.com servers.
-        try (Socket s = new Socket("google.com", 80)) {
-            Assert.assertTrue(s.isConnected());
+        // Open connection to Google public DNS server
+        InetSocketAddress addr = new InetSocketAddress("8.8.8.8", 53);
+        while (true) {
+            try (Socket s = new Socket()) {
+                Log.d(TAG, "Testing socket connection to 8.8.8.8:53...");
+                s.connect(addr, 5000); // use a socket connection timeout of 5s
+                Assert.assertTrue(
+                    "Failed to make socket connection to 8.8.8.8:53", s.isConnected());
+                return;
+            } catch (SocketTimeoutException e) {
+                Log.d(TAG, "Socket connection to 8.8.8.8:53 timed out, retry...");
+            }
         }
     }
 
@@ -132,7 +151,7 @@
         Assert.assertSame(TelephonyManager.NETWORK_TYPE_LTE, mTeleManager.getVoiceNetworkType());
         Assert.assertSame(TelephonyManager.SIM_STATE_READY, mTeleManager.getSimState());
         Assert.assertSame(TelephonyManager.PHONE_TYPE_GSM, mTeleManager.getPhoneType());
-        Assert.assertSame(mTeleManager.getPhoneCount(), 1);
+        Assert.assertSame(mTeleManager.getActiveModemCount(), 1);
         // See SIM FS response for 178 28480 (Cuttlefish RIL).
         Assert.assertEquals("+15551234567", mTeleManager.getLine1Number());
         // See SIM FS response for 178 28615 (Cuttlefish RIL).
@@ -142,9 +161,23 @@
 
     @Test
     public void testSignalLevels() throws Exception {
-        CellInfoLte cellInfo = (CellInfoLte) mTeleManager.getAllCellInfo().get(0);
+        List<CellInfo> cellInfos = new ArrayList<>();
+        CountDownLatch cdl = new CountDownLatch(1);
+        mTeleManager.requestCellInfoUpdate(mContext.getMainExecutor(),
+                new TelephonyManager.CellInfoCallback() {
+                    @Override
+                    public void onCellInfo(@NonNull List<CellInfo> cellInfo) {
+                        if (cellInfo != null) {
+                            cellInfos.addAll(cellInfo);
+                        }
+                        cdl.countDown();
+                    }
+                });
+        cdl.await();
+        MatcherAssert.assertThat("Size of list of cell info", cellInfos.size(), greaterThan(0));
+        CellInfoLte cellInfo = (CellInfoLte) cellInfos.get(0);
         CellSignalStrengthLte signalStrength = cellInfo.getCellSignalStrength();
         int bars = signalStrength.getLevel();
-        Assert.assertThat("Signal Bars", bars, greaterThan(1));
+        MatcherAssert.assertThat("Signal Bars", bars, greaterThan(1));
     }
 }
diff --git a/tests/utils/Android.bp b/tests/utils/Android.bp
new file mode 100644
index 0000000..5bdcb7d
--- /dev/null
+++ b/tests/utils/Android.bp
@@ -0,0 +1,28 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_library_host {
+    name: "cuttlefish_host_test_utils",
+    srcs: [
+        "src/**/*.java",
+    ],
+    libs: [
+        "compatibility-host-util",
+        "tradefed",
+    ],
+}
\ No newline at end of file
diff --git a/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlLocalRunner.java b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlLocalRunner.java
new file mode 100644
index 0000000..a334c66
--- /dev/null
+++ b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlLocalRunner.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests.utils;
+
+import com.android.tradefed.invoker.TestInformation;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunUtil;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+
+import org.junit.Assert;
+
+public class CuttlefishControlLocalRunner implements CuttlefishControlRunner {
+
+    private static final String CVD_CUTTLEFISH_CONFIG =
+        System.getProperty("user.home") + "/cuttlefish_runtime/cuttlefish_config.json";
+
+    private static final String ACLOUD_CUTTLEFISH_CONFIG =
+        "/tmp/acloud_cvd_temp/local-instance-1/cuttlefish_runtime/instances/cvd-1/cuttlefish_config.json";
+
+    private static final List<String> CUTTLEFISH_CONFIG_CANDIDATES =
+        Arrays.asList(CVD_CUTTLEFISH_CONFIG, ACLOUD_CUTTLEFISH_CONFIG);
+
+    private final IRunUtil runUtil = new RunUtil();
+
+    private final TestInformation testInformation;
+
+    private final String runtimeDirectoryPath;
+
+    public CuttlefishControlLocalRunner(TestInformation testInformation) throws FileNotFoundException {
+        this.testInformation = testInformation;
+
+        Optional<String> configPath =
+            CUTTLEFISH_CONFIG_CANDIDATES.stream().filter(x -> new File(x).exists()).findFirst();
+        if (!configPath.isPresent()) {
+            throw new FileNotFoundException("Failed to find Cuttlefish config file.");
+        }
+
+        runUtil.setEnvVariable("CUTTLEFISH_CONFIG_FILE", configPath.get());
+
+        this.runtimeDirectoryPath = Path.of(configPath.get()).getParent().toString();
+    }
+
+    @Override
+    public CommandResult run(long timeout, String... command) {
+        return runUtil.runTimedCmd(timeout, command);
+    }
+
+    @Override
+    public String getHostBinaryPath(String basename) throws FileNotFoundException {
+        return testInformation.getDependencyFile(basename, false).getAbsolutePath();
+    }
+
+    @Override
+    public String getHostRuntimePath(String basename) throws FileNotFoundException {
+        return Paths.get(this.runtimeDirectoryPath, basename).toAbsolutePath().toString();
+    }
+}
\ No newline at end of file
diff --git a/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRemoteRunner.java b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRemoteRunner.java
new file mode 100644
index 0000000..a359a9a
--- /dev/null
+++ b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRemoteRunner.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests.utils;
+
+import com.android.tradefed.device.TestDeviceOptions;
+import com.android.tradefed.device.cloud.GceAvdInfo;
+import com.android.tradefed.device.cloud.RemoteAndroidVirtualDevice;
+import com.android.tradefed.device.cloud.RemoteFileUtil;
+import com.android.tradefed.device.cloud.RemoteSshUtil;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.RunUtil;
+import com.google.common.collect.Iterables;
+
+import java.io.FileNotFoundException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+
+import org.junit.Assert;
+
+public class CuttlefishControlRemoteRunner implements CuttlefishControlRunner {
+
+    private static final String OXYGEN_CUTTLEFISH_RUNTIME_DIRECTORY = "/tmp/cfbase/3";
+
+    private static final long DEFAULT_TIMEOUT_MILLIS = 5 * 1000;
+
+    private final IRunUtil runUtil = new RunUtil();
+
+    private final TestDeviceOptions testDeviceOptions;
+
+    private final GceAvdInfo testDeviceAvdInfo;
+
+    private final String basePath;
+
+    public CuttlefishControlRemoteRunner(RemoteAndroidVirtualDevice testDevice) throws FileNotFoundException {
+        this.testDeviceOptions = testDevice.getOptions();
+        this.testDeviceAvdInfo = testDevice.getAvdInfo();
+
+        List<String> basePathCandidates =
+                Arrays.asList("/home/" + this.testDeviceOptions.getInstanceUser(),
+                              OXYGEN_CUTTLEFISH_RUNTIME_DIRECTORY);
+
+        Optional<String> basePath =
+                basePathCandidates.stream().filter(x -> remoteFileExists(x)).findFirst();
+        if (!basePath.isPresent()) {
+            throw new FileNotFoundException("Failed to find Cuttlefish runtime directory.");
+        }
+
+        this.basePath = basePath.get();
+    }
+
+    private boolean remoteFileExists(String path) {
+        return RemoteFileUtil.doesRemoteFileExist(
+                testDeviceAvdInfo, testDeviceOptions, runUtil, DEFAULT_TIMEOUT_MILLIS, path);
+    }
+
+    @Override
+    public CommandResult run(long timeout, String... originalCommand) {
+        // Note: IRunUtil has setEnvVariable() but that ends up setting the environment
+        // variable for the ssh command and not the environment variable on the ssh target.
+        List<String> command = new ArrayList<>(Arrays.asList(originalCommand));
+        command.add(0, String.format("HOME=%s", this.basePath));
+        String[] commandArray = Iterables.toArray(command, String.class);
+
+        return RemoteSshUtil.remoteSshCommandExec(
+                testDeviceAvdInfo, testDeviceOptions, runUtil, timeout, commandArray);
+    }
+
+    @Override
+    public String getHostBinaryPath(String basename) throws FileNotFoundException {
+        return Paths.get(this.basePath, "bin", basename).toAbsolutePath().toString();
+    }
+
+    @Override
+    public String getHostRuntimePath(String basename) throws FileNotFoundException {
+        return Paths.get(this.basePath, "cuttlefish_runtime", basename).toAbsolutePath().toString();
+    }
+}
diff --git a/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRunner.java b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRunner.java
new file mode 100644
index 0000000..33e3cdc
--- /dev/null
+++ b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishControlRunner.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests.utils;
+
+import com.android.tradefed.util.CommandResult;
+
+import java.io.FileNotFoundException;
+
+public interface CuttlefishControlRunner {
+
+    public CommandResult run(long timeout, String... command);
+
+    public String getHostBinaryPath(String basename) throws FileNotFoundException;
+
+    public String getHostRuntimePath(String basename) throws FileNotFoundException;
+
+}
\ No newline at end of file
diff --git a/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishHostTest.java b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishHostTest.java
new file mode 100644
index 0000000..d773f0b
--- /dev/null
+++ b/tests/utils/src/com/android/cuttlefish/tests/utils/CuttlefishHostTest.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests.utils;
+
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.device.cloud.RemoteAndroidVirtualDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
+import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test;
+import com.android.tradefed.util.CommandResult;
+
+import org.junit.Before;
+import org.junit.runner.RunWith;
+
+/**
+ * Base test class for interacting with a Cuttlefish device with host binaries.
+ */
+public abstract class CuttlefishHostTest extends BaseHostJUnit4Test {
+
+    protected CuttlefishControlRunner runner;
+
+    @Before
+    public void cuttlefishHostTestSetUp() throws Exception {
+        ITestDevice device = getDevice();
+        CLog.i("Test Device Class Name: " + device.getClass().getSimpleName());
+        if (device instanceof RemoteAndroidVirtualDevice) {
+            runner = new CuttlefishControlRemoteRunner((RemoteAndroidVirtualDevice)device);
+        } else {
+            runner = new CuttlefishControlLocalRunner(getTestInformation());
+        }
+    }
+
+}
diff --git a/tests/wifi/Android.bp b/tests/wifi/Android.bp
index 838e861..d7e36a8 100644
--- a/tests/wifi/Android.bp
+++ b/tests/wifi/Android.bp
@@ -21,6 +21,7 @@
     srcs: ["src/**/*.java"],
     libs: ["android.test.runner.stubs"],
     static_libs: [
+        "androidx.test.ext.junit",
         "androidx.test.rules",
         "platform-test-annotations",
     ],
diff --git a/tests/wifi/AndroidManifest.xml b/tests/wifi/AndroidManifest.xml
index 9394ad0..10fe252 100644
--- a/tests/wifi/AndroidManifest.xml
+++ b/tests/wifi/AndroidManifest.xml
@@ -20,13 +20,6 @@
     android:versionName="1.0"
     android:sharedUserId="android.uid.system">
 
-    <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
-    <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
-
-   <uses-sdk
-        android:minSdkVersion="19"
-        android:targetSdkVersion="25" />
-
     <instrumentation
         android:name="androidx.test.runner.AndroidJUnitRunner"
         android:targetPackage="com.android.cuttlefish.wifi.tests" />
diff --git a/tests/wifi/runtests.sh b/tests/wifi/runtests.sh
index c6642d3..b5aaa3e 100755
--- a/tests/wifi/runtests.sh
+++ b/tests/wifi/runtests.sh
@@ -1,5 +1,19 @@
 #!/usr/bin/env bash
 
+# Copyright 2018 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 if [[ "$1" == "--help" ]]; then
   cat <<END
 Usage for $0
diff --git a/tests/wifi/src/com/android/cuttlefish/wifi/tests/WifiE2eTests.java b/tests/wifi/src/com/android/cuttlefish/wifi/tests/WifiE2eTests.java
index 1ea4a1d..fc9a083 100644
--- a/tests/wifi/src/com/android/cuttlefish/wifi/tests/WifiE2eTests.java
+++ b/tests/wifi/src/com/android/cuttlefish/wifi/tests/WifiE2eTests.java
@@ -26,6 +26,7 @@
 import android.util.Log;
 
 import androidx.test.InstrumentationRegistry;
+import androidx.test.core.app.ApplicationProvider;
 
 import org.junit.Assert;
 import org.junit.Before;
@@ -33,7 +34,9 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
+import java.net.InetSocketAddress;
 import java.net.Socket;
+import java.net.SocketTimeoutException;
 import java.util.List;
 
 /**
@@ -48,39 +51,36 @@
 
     @Before
     public void setUp() throws Exception {
-        mContext = InstrumentationRegistry.getInstrumentation().getContext();
+        mContext = ApplicationProvider.getApplicationContext();
         mWifiManager = (WifiManager)mContext.getSystemService(Context.WIFI_SERVICE);
         mConnManager = (ConnectivityManager)mContext.getSystemService(Context.CONNECTIVITY_SERVICE);
     }
 
 
-    private void enableWifi() {
+    @SuppressWarnings("unused")
+    private void enableWifi() throws InterruptedException {
         Log.i(TAG, "Enabling WIFI...");
         mWifiManager.setWifiEnabled(true);
-        while (!(mWifiManager.isWifiEnabled() && mWifiManager.pingSupplicant())) {
-            Log.i(TAG, "Waiting for WIFI (Enabled: " + mWifiManager.isWifiEnabled() +
-                    ", Ready: " + mWifiManager.pingSupplicant() + ")");
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
+        while (!mWifiManager.isWifiEnabled()) {
+            Log.i(TAG, "Waiting for WIFI to be enabled...");
+            Thread.sleep(1000);
         }
     }
 
 
-    private void disableWifi() {
+    @SuppressWarnings("unused")
+    private void disableWifi() throws InterruptedException {
         Log.i(TAG, "Disabling WIFI...");
 
         mWifiManager.setWifiEnabled(false);
         while (mWifiManager.isWifiEnabled()) {
             Log.i(TAG, "Waiting for WIFI to be disabled...");
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
         }
     }
 
 
-    private void waitForSupplicantState(SupplicantState... expectedStates) {
+    private void waitForSupplicantState(SupplicantState... expectedStates)
+            throws InterruptedException {
         while (true) {
             WifiInfo info = mWifiManager.getConnectionInfo();
             SupplicantState currentState = info.getSupplicantState();
@@ -93,9 +93,7 @@
                 }
             }
 
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
+            Thread.sleep(1000);
         }
     }
 
@@ -114,7 +112,7 @@
      * Initialize wifi, erase all settings.
      */
     @Test(timeout = 10 * 1000)
-    public void testWifiInitialization() {
+    public void testWifiInitialization() throws Exception {
         enableWifi();
 
         List<WifiConfiguration> configs = mWifiManager.getConfiguredNetworks();
@@ -170,9 +168,7 @@
 
             Log.i(TAG, "Waiting for WIFI to become primary network for DATA.");
 
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException e) {}
+            Thread.sleep(1000);
         }
 
         // 6. Bind process to WIFI network. This should allow us to verify network is functional.
@@ -180,9 +176,18 @@
         Assert.assertNotNull(net);
         Assert.assertTrue(mConnManager.bindProcessToNetwork(net));
 
-        // 7. Open connection to google.com servers.
-        try (Socket s = new Socket("google.com", 80)) {
-            Assert.assertTrue(s.isConnected());
+        // 7. Open connection to Google public DNS server
+        InetSocketAddress addr = new InetSocketAddress("8.8.8.8", 53);
+        while (true) {
+            try (Socket s = new Socket()) {
+                Log.d(TAG, "Testing socket connection to 8.8.8.8:53...");
+                s.connect(addr, 5000); // use a socket connection timeout of 5s
+                Assert.assertTrue(
+                        "Failed to make socket connection to 8.8.8.8:53", s.isConnected());
+                return;
+            } catch (SocketTimeoutException e) {
+                Log.d(TAG, "Socket connection to 8.8.8.8:53 timed out (5s), retry...");
+            }
         }
     }
 }
diff --git a/tests/wmediumd_control/Android.bp b/tests/wmediumd_control/Android.bp
new file mode 100644
index 0000000..dfae4d1
--- /dev/null
+++ b/tests/wmediumd_control/Android.bp
@@ -0,0 +1,55 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_test_host {
+    name: "CuttlefishWmediumdControlTest",
+    srcs: [
+        "src/**/*.java",
+    ],
+    data_native_bins: [
+        "wmediumd_control",
+    ],
+    test_suites: [
+        "device-tests",
+    ],
+    libs: [
+        "auto_value_annotations",
+        "junit",
+        "tradefed",
+    ],
+    static_libs: [
+        "cuttlefish_host_test_utils",
+        "platform-test-annotations",
+    ],
+    jni_libs: [
+        "libext2_blkid",
+        "libfruit",
+        "libbase",
+        "libcuttlefish_fs",
+        "libcuttlefish_utils",
+        "libcurl",
+        "libcrypto",
+        "libext2_uuid",
+        "liblog",
+        "libssl",
+        "libz",
+        "libjsoncpp",
+        "libprotobuf-cpp-full",
+    ],
+    plugins: ["auto_value_plugin", "auto_annotation_plugin"],
+}
diff --git a/tests/wmediumd_control/src/com/android/cuttlefish/tests/StationInfo.java b/tests/wmediumd_control/src/com/android/cuttlefish/tests/StationInfo.java
new file mode 100644
index 0000000..df7d718
--- /dev/null
+++ b/tests/wmediumd_control/src/com/android/cuttlefish/tests/StationInfo.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests;
+
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.junit.Assert;
+
+
+public class StationInfo {
+    public String macAddress;
+    public double xPosition;
+    public double yPosition;
+    public String lci;
+    public String civicloc;
+    public int txPower;
+
+    public StationInfo(String macAddress, double xPosition, double yPosition, String lci,
+            String civicloc, int txPower) throws Exception {
+        Assert.assertTrue(isValidMacAddr(macAddress));
+        this.macAddress = macAddress;
+        this.xPosition = xPosition;
+        this.yPosition = yPosition;
+        this.lci = lci;
+        this.civicloc = civicloc;
+        this.txPower = txPower;
+    }
+
+    private static boolean isValidMacAddr(String str) {
+        Pattern pattern = Pattern.compile("^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$");
+        Matcher matcher = pattern.matcher(str);
+        return matcher.find();
+    }
+
+    public static StationInfo getStationInfo(List<String> stationInfoLine) throws Exception {
+        return new StationInfo(stationInfoLine.get(0), Double.parseDouble(stationInfoLine.get(1)),
+            Double.parseDouble(stationInfoLine.get(2)), stationInfoLine.get(3),
+            stationInfoLine.get(4), Integer.parseInt(stationInfoLine.get(5)));
+    }
+}
\ No newline at end of file
diff --git a/tests/wmediumd_control/src/com/android/cuttlefish/tests/WmediumdControlE2eTest.java b/tests/wmediumd_control/src/com/android/cuttlefish/tests/WmediumdControlE2eTest.java
new file mode 100644
index 0000000..df02adc
--- /dev/null
+++ b/tests/wmediumd_control/src/com/android/cuttlefish/tests/WmediumdControlE2eTest.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cuttlefish.tests;
+
+import com.android.cuttlefish.tests.utils.CuttlefishHostTest;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.device.cloud.RemoteAndroidVirtualDevice;
+import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
+import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.google.common.base.Splitter;
+
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.stream.Collectors;
+import java.util.List;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+@RunWith(DeviceJUnit4ClassRunner.class)
+public class WmediumdControlE2eTest extends CuttlefishHostTest {
+
+    private static final String WMEDIUMD_BINARY_BASENAME = "wmediumd_control";
+
+    private static final String WMEDIUMD_SERVER_BASENAME = "internal/wmediumd_api_server";
+
+    private static final Splitter NEWLINE_SPLITTER = Splitter.on('\n');
+
+    private static final Splitter TAB_SPLITTER = Splitter.on('\t');
+
+    private static final Splitter SPACE_SPLITTER = Splitter.on(' ');
+
+    private ITestDevice testDevice;
+
+    private int getRSSI() throws Exception {
+        CommandResult wifiScanCommandResult = testDevice.executeShellV2Command("cmd wifi status");
+        Assert.assertEquals(CommandStatus.SUCCESS, wifiScanCommandResult.getStatus());
+
+        String[] parsedResult = wifiScanCommandResult.getStdout().split(",");
+        for (String chunk : parsedResult) {
+            if (chunk.contains("RSSI:")) {
+                String[] parsedChunk = chunk.trim().split(" ");
+                Assert.assertEquals(parsedChunk.length, 2);
+                return Integer.parseInt(parsedChunk[1]);
+            }
+        }
+        return 0;
+    }
+
+    private String getStationMacAddress(List<StationInfo> stationInfoList) {
+        List<String> stationMacAddressList = stationInfoList.stream().map(x -> x.macAddress).filter(addr -> addr.substring(0, 6).equals("02:15:")).collect(Collectors.toList());
+        Assert.assertTrue(stationMacAddressList.size() > 0);
+        return stationMacAddressList.get(0);
+    }
+
+    private String getApMacAddress(List<StationInfo> stationInfoList) {
+        List<String> apMacAddressList = stationInfoList.stream().map(x -> x.macAddress).filter(addr -> addr.substring(0, 6).equals("42:00:")).collect(Collectors.toList());
+        Assert.assertTrue(apMacAddressList.size() > 0);
+        return apMacAddressList.get(0);
+    }
+
+    private CommandResult runWmediumdCommand(long timeout, String... command) throws FileNotFoundException {
+        String wmediumdBinary;
+        String wmediumdServer;
+
+        Assert.assertNotNull(runner);
+
+        wmediumdBinary = runner.getHostBinaryPath(WMEDIUMD_BINARY_BASENAME);
+        wmediumdServer = runner.getHostRuntimePath(WMEDIUMD_SERVER_BASENAME);
+
+        ArrayList<String> fullCommand = new ArrayList<String>(Arrays.asList(command));
+        fullCommand.add(0, wmediumdBinary);
+        fullCommand.add(1, String.format("--wmediumd_api_server=%s", wmediumdServer));
+
+        return runner.run(timeout, fullCommand.toArray(new String[0]));
+    }
+
+    /** One line for "Total Stations" and one line for the "tsv header". */
+    private static final int NUMBER_OF_NONEMPTY_INFO_LINES = 2;
+
+    public List<StationInfo> listStations() throws Exception {
+        CommandResult result = runWmediumdCommand(10000, "list_stations");
+        CLog.i("stdout:%s", result.getStdout());
+        CLog.i("stderr:%s", result.getStderr());
+        Assert.assertEquals(CommandStatus.SUCCESS, result.getStatus());
+
+        List<String> lines = NEWLINE_SPLITTER.omitEmptyStrings().splitToList(result.getStdout());
+        List<String> parsedTotalStationsLine = SPACE_SPLITTER.splitToList(lines.get(0));
+        String lastLine = parsedTotalStationsLine.get(parsedTotalStationsLine.size() - 1);
+        Assert.assertEquals(lines.size() - NUMBER_OF_NONEMPTY_INFO_LINES, Integer.parseInt(lastLine));
+
+        List<StationInfo> stationInfoList = new ArrayList<>();
+        for (int idx = NUMBER_OF_NONEMPTY_INFO_LINES; idx < lines.size(); ++idx) {
+            stationInfoList.add(StationInfo.getStationInfo(TAB_SPLITTER.splitToList(lines.get(idx))));
+        }
+        return stationInfoList;
+    }
+
+    public StationInfo getStation(String macAddress) throws Exception {
+        List<StationInfo> stationInfoList = listStations();
+        for (StationInfo station : stationInfoList) {
+            if (station.macAddress.equals(macAddress)) {
+                return station;
+            }
+        }
+        return null;
+    }
+
+    private void setSnr(String macAddress1, String macAddress2, int snr) throws Exception {
+        CommandResult result = runWmediumdCommand(10000, "set_snr", macAddress1, macAddress2, Integer.toString(snr));
+        Assert.assertEquals(CommandStatus.SUCCESS, result.getStatus());
+    }
+
+    private void setPosition(String macAddress, double xPosition, double yPosition) throws Exception {
+        CommandResult result = runWmediumdCommand(10000, "--", "set_position", macAddress, Double.toString(xPosition), Double.toString(yPosition));
+        Assert.assertEquals(CommandStatus.SUCCESS, result.getStatus());
+    }
+
+    private void setLci(String macAddress, String lci) throws Exception {
+        CommandResult result = runWmediumdCommand(10000, "set_lci", macAddress, lci);
+        Assert.assertEquals(CommandStatus.SUCCESS, result.getStatus());
+    }
+
+    private void setCivicloc(String macAddress, String civicloc) throws Exception {
+        CommandResult result = runWmediumdCommand(10000, "set_civicloc", macAddress, civicloc);
+        Assert.assertEquals(CommandStatus.SUCCESS, result.getStatus());
+    }
+
+    @Before
+    public void setUp() throws Exception {
+        this.testDevice = getDevice();
+    }
+
+    @Test(timeout = 60 * 1000)
+    public void testWmediumdControlListStations() throws Exception {
+        if (!testDevice.connectToWifiNetwork("VirtWifi", "")) return;
+
+        listStations();
+    }
+
+    @Test(timeout = 60 * 1000)
+    public void testWmediumdControlSetSnr() throws Exception {
+        if (!testDevice.connectToWifiNetwork("VirtWifi", "")) return;
+
+        List<StationInfo> stationInfoList = listStations();
+        String stationMacAddress = getStationMacAddress(stationInfoList);
+        String apMacAddress = getApMacAddress(stationInfoList);
+        int rssiDefault = getRSSI();
+        int rssiSnr11, rssiSnr88;
+
+        setSnr(apMacAddress, stationMacAddress, 11);
+        while ((rssiSnr11 = getRSSI()) == rssiDefault) {
+            Thread.sleep(1000);
+        }
+
+        setSnr(apMacAddress, stationMacAddress, 88);
+        while ((rssiSnr88 = getRSSI()) == rssiSnr11) {
+            Thread.sleep(1000);
+        }
+
+        Assert.assertTrue(rssiSnr11 < rssiSnr88);
+    }
+
+    @Test(timeout = 60 * 1000)
+    public void testWmediumdControlSetPosition() throws Exception {
+        if (!testDevice.connectToWifiNetwork("VirtWifi", "")) return;
+
+        List<StationInfo> stationInfoList = listStations();
+        String stationMacAddress = getStationMacAddress(stationInfoList);
+        String apMacAddress = getApMacAddress(stationInfoList);
+        int rssiDefault = getRSSI();
+        int rssiDistance1000, rssiDistance100, rssiDistance10;
+
+        setPosition(apMacAddress, 0.0, 0.0);
+        setPosition(stationMacAddress, 0.0, -1000.0);
+        while ((rssiDistance1000 = getRSSI()) == rssiDefault) {
+            Thread.sleep(1000);
+        }
+
+        setPosition(stationMacAddress, 0.0, 100.0);
+        while ((rssiDistance100 = getRSSI()) == rssiDistance1000) {
+            Thread.sleep(1000);
+        }
+
+        setPosition(stationMacAddress, -10.0, 0.0);
+        while ((rssiDistance10 = getRSSI()) == rssiDistance100) {
+            Thread.sleep(1000);
+        }
+
+        Assert.assertTrue(rssiDistance1000 < rssiDistance100);
+        Assert.assertTrue(rssiDistance100 < rssiDistance10);
+    }
+
+    @Test(timeout = 60 * 1000)
+    public void testWmediumdControlSetLci() throws Exception {
+        if (!testDevice.connectToWifiNetwork("VirtWifi", ""))
+            return;
+
+        List<StationInfo> stationInfoList = listStations();
+        String apMacAddress = getApMacAddress(stationInfoList);
+
+        String testLci = "abcdef";
+
+        setLci(apMacAddress, testLci);
+
+        StationInfo apStation = getStation(apMacAddress);
+
+        String trimmedLci = apStation.lci.substring(1, apStation.lci.length() - 1);
+        Assert.assertEquals(testLci, trimmedLci);
+    }
+
+    @Test(timeout = 60 * 1000)
+    public void testWmediumdControlSetCivicloc() throws Exception {
+        if (!testDevice.connectToWifiNetwork("VirtWifi", ""))
+            return;
+
+        List<StationInfo> stationInfoList = listStations();
+        String apMacAddress = getApMacAddress(stationInfoList);
+
+        String testCivicloc = "zxcvb";
+
+        setCivicloc(apMacAddress, testCivicloc);
+
+        StationInfo apStation = getStation(apMacAddress);
+
+        String trimmedCivicloc = apStation.civicloc.substring(1, apStation.civicloc.length() - 1);
+        Assert.assertEquals(testCivicloc, trimmedCivicloc);
+    }
+}
diff --git a/tools/Android.bp b/tools/Android.bp
index fe771af..5e255a1 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -1,3 +1,18 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/tools/copyright_bot/copyright_bot.sh b/tools/copyright_bot/copyright_bot.sh
new file mode 100755
index 0000000..67ab50f
--- /dev/null
+++ b/tools/copyright_bot/copyright_bot.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# Copyright 2023 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#apply template 1 to these file extensions
+scanning_dir=$1
+bot_dir=$(dirname "$0")
+sh $bot_dir/copyright_fixer.sh $scanning_dir "cpp" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "cc" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "h" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "java" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "proto" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "js" $bot_dir/copyright_template_1.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "css" $bot_dir/copyright_template_1.txt
+
+#apply template 2 to these file extensions
+sh $bot_dir/copyright_fixer.sh $scanning_dir "rs" $bot_dir/copyright_template_2.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "go" $bot_dir/copyright_template_2.txt
+
+#apply template 3 to these file extensions
+sh $bot_dir/copyright_fixer.sh $scanning_dir "html" $bot_dir/copyright_template_3.txt
+
+#apply template 4 to these file extensions
+sh $bot_dir/copyright_fixer.sh $scanning_dir "xml" $bot_dir/copyright_template_4.txt
+
+#apply template 5 to these file extensions
+sh $bot_dir/copyright_fixer.sh $scanning_dir "sh" $bot_dir/copyright_template_5.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "bp" $bot_dir/copyright_template_5.txt
+sh $bot_dir/copyright_fixer.sh $scanning_dir "mk" $bot_dir/copyright_template_5.txt
\ No newline at end of file
diff --git a/tools/copyright_bot/copyright_fixer.sh b/tools/copyright_bot/copyright_fixer.sh
new file mode 100755
index 0000000..ae420b9
--- /dev/null
+++ b/tools/copyright_bot/copyright_fixer.sh
@@ -0,0 +1,46 @@
+# Copyright 2023 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script loops over all dot file in parent_dir folder and all subfolders ,
+# then it fixes the copyright headers based on provided file_extension and copyright_template_file
+parent_dir=$1
+file_extension=$2
+copyright_template_file=$3
+
+default_year_pattern="YYYY"
+
+#find all files that doesn't contain copyright word with specific  extension
+all_file_names=`grep -riL "copyright" $parent_dir --include \*.$file_extension `
+
+#loop over list of file names
+for file_name in $all_file_names
+do
+   #extract file creation date
+   creation_date=`git log --follow --format=%as --date default $file_name | tail -1`
+   # extract file creation year fron the date
+   year=`echo $creation_date | awk -F\- '{print $1}'`
+   echo $file_name   $year
+
+   #read input template file
+   cat $copyright_template_file >> copyright_temp_file;
+   #replace the "YYYY" from template with proper extracted year
+   sed -i -e "s/$default_year_pattern/$year/g" copyright_temp_file
+
+
+   #echo $copyright_temp_file
+   #append modified copyright header to file with no copyright
+   cat $file_name >> copyright_temp_file;
+   cp copyright_temp_file $file_name;
+   rm copyright_temp_file;
+done
\ No newline at end of file
diff --git a/tools/copyright_bot/copyright_template_1.txt b/tools/copyright_bot/copyright_template_1.txt
new file mode 100644
index 0000000..174c37c
--- /dev/null
+++ b/tools/copyright_bot/copyright_template_1.txt
@@ -0,0 +1,15 @@
+/*
+ * Copyright (C) YYYY The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
diff --git a/tools/copyright_bot/copyright_template_2.txt b/tools/copyright_bot/copyright_template_2.txt
new file mode 100644
index 0000000..ddb338d
--- /dev/null
+++ b/tools/copyright_bot/copyright_template_2.txt
@@ -0,0 +1,14 @@
+//
+// Copyright (C) YYYY The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
diff --git a/tools/copyright_bot/copyright_template_3.txt b/tools/copyright_bot/copyright_template_3.txt
new file mode 100644
index 0000000..fb29947
--- /dev/null
+++ b/tools/copyright_bot/copyright_template_3.txt
@@ -0,0 +1,15 @@
+<?--
+ Copyright (C) YYYY The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ -->
diff --git a/tools/copyright_bot/copyright_template_4.txt b/tools/copyright_bot/copyright_template_4.txt
new file mode 100644
index 0000000..7cbf0c3
--- /dev/null
+++ b/tools/copyright_bot/copyright_template_4.txt
@@ -0,0 +1,18 @@
+<!--
+/*
+**
+** Copyright YYYY, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
diff --git a/tools/copyright_bot/copyright_template_5.txt b/tools/copyright_bot/copyright_template_5.txt
new file mode 100644
index 0000000..186fcc8
--- /dev/null
+++ b/tools/copyright_bot/copyright_template_5.txt
@@ -0,0 +1,13 @@
+# Copyright YYYY, The Android Open Source Project
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tools/create_base_image.go b/tools/create_base_image.go
index 40d533b..91d1b5a 100644
--- a/tools/create_base_image.go
+++ b/tools/create_base_image.go
@@ -1,25 +1,60 @@
 package main
 
 import (
-  "os"
-  "os/exec"
-  "os/user"
-  "flag"
-  "fmt"
-  "strings"
-  "io/ioutil"
-  "log"
-  "time"
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"log"
+	"os"
+	"os/exec"
+	"os/user"
+	"strings"
+	"time"
+	"strconv"
 )
 
 type OnFail int
 
 const (
-    IgnoreOnFail OnFail = iota
-    WarnOnFail
-    ExitOnFail
+	IgnoreOnFail OnFail = iota
+	WarnOnFail
+	ExitOnFail
 )
 
+type arrayFlags []string
+
+// Implemented for flag#Value interface
+func (s *arrayFlags) String() string {
+	if s == nil {
+		return ""
+	}
+	return fmt.Sprintf("%v", *s)
+}
+
+// Implemented for flag#Value interface
+func (s *arrayFlags) Set(value string) error {
+	*s = append(*s, value)
+	return nil
+}
+
+// Returns `"foo" "bar"`
+func (s *arrayFlags) AsArgs() string {
+	var result []string
+	for _, value := range *s {
+		result = append(result, fmt.Sprintf("%q", value))
+	}
+	return strings.Join(result, " ")
+}
+
+// Returns `--flag="foo" --flag="bar"`
+func (s *arrayFlags) AsRepeatedFlag(name string) string {
+	var result []string
+	for _, value := range *s {
+		result = append(result, fmt.Sprintf(`--%s="%s"`, name, value))
+	}
+	return strings.Join(result, " ")
+}
+
 var build_instance string
 var build_project string
 var build_zone string
@@ -27,236 +62,341 @@
 var dest_family string
 var dest_project string
 var launch_instance string
+var arch string
 var source_image_family string
 var source_image_project string
 var repository_url string
 var repository_branch string
 var version string
-var SSH_FLAGS string
+var internal_ip_flag string
 var INTERNAL_extra_source string
 var verbose bool
 var username string
+var image_disk_size_gb int
+
+// NOTE: For `gcloud compute ssh` command, `ssh_flags` will be used as SSH_ARGS rather than
+// as `--ssh_flag` repeated flag. Why? because --ssh_flag is not parsed as expected when
+// containing quotes and spaces.
+var ssh_flags arrayFlags
+var host_orchestration_flag bool
 
 func init() {
-  user, err := user.Current()
-  if err != nil {
-    panic(err)
-  }
-  username = user.Username
+	user, err := user.Current()
+	if err != nil {
+		panic(err)
+	}
+	username = user.Username
 
-  flag.StringVar(&build_instance, "build_instance",
-    username+"-build", "Instance name to create for the build")
-  flag.StringVar(&build_project, "build_project",
-    mustShell("gcloud config get-value project"), "Project to use for scratch")
-  flag.StringVar(&build_zone, "build_zone",
-    mustShell("gcloud config get-value compute/zone"),
-    "Zone to use for scratch resources")
-  flag.StringVar(&dest_image, "dest_image",
-    "vsoc-host-scratch-"+username, "Image to create")
-  flag.StringVar(&dest_family, "dest_family", "",
-    "Image family to add the image to")
-  flag.StringVar(&dest_project, "dest_project",
-    mustShell("gcloud config get-value project"), "Project to use for the new image")
-  flag.StringVar(&launch_instance, "launch_instance", "",
-    "Name of the instance to launch with the new image")
-  flag.StringVar(&source_image_family, "source_image_family", "debian-11",
-    "Image familty to use as the base")
-  flag.StringVar(&source_image_project, "source_image_project", "debian-cloud",
-    "Project holding the base image")
-  flag.StringVar(&repository_url, "repository_url",
-    "https://github.com/google/android-cuttlefish.git",
-    "URL to the repository with host changes")
-  flag.StringVar(&repository_branch, "repository_branch",
-    "main", "Branch to check out")
-  flag.StringVar(&version, "version", "", "cuttlefish-common version")
-  flag.StringVar(&SSH_FLAGS, "INTERNAL_IP", "",
-    "INTERNAL_IP can be set to --internal-ip run on a GCE instance."+
-    "The instance will need --scope compute-rw.")
-  flag.StringVar(&INTERNAL_extra_source, "INTERNAL_extra_source", "",
-    "INTERNAL_extra_source may be set to a directory containing the source for extra packages to build.")
-  flag.BoolVar(&verbose, "verbose", true, "print commands and output (default: true)")
-  flag.Parse()
+	flag.StringVar(&build_instance, "build_instance",
+		username+"-build", "Instance name to create for the build")
+	flag.StringVar(&build_project, "build_project",
+		mustShell("gcloud config get-value project"), "Project to use for scratch")
+	// The new get-value output format is different. The result is in 2nd line.
+	str_list := strings.Split(build_project, "\n")
+	if len(str_list) == 2 {
+		build_project = str_list[1]
+	}
+
+	flag.StringVar(&build_zone, "build_zone",
+		mustShell("gcloud config get-value compute/zone"),
+		"Zone to use for scratch resources")
+	// The new get-value output format is different. The result is in 2nd line.
+	str_list = strings.Split(build_zone, "\n")
+	if len(str_list) == 2 {
+		build_zone = str_list[1]
+	}
+
+	flag.StringVar(&dest_image, "dest_image",
+		"vsoc-host-scratch-"+username, "Image to create")
+	flag.StringVar(&dest_family, "dest_family", "",
+		"Image family to add the image to")
+	flag.StringVar(&dest_project, "dest_project",
+		mustShell("gcloud config get-value project"), "Project to use for the new image")
+	// The new get-value output format is different. The result is in 2nd line.
+	str_list = strings.Split(dest_project, "\n")
+	if len(str_list) == 2 {
+		dest_project = str_list[1]
+	}
+
+	flag.StringVar(&launch_instance, "launch_instance", "",
+		"Name of the instance to launch with the new image")
+	flag.StringVar(&arch, "arch", "gce_x86_64",
+		"Which CPU arch, arm/x86_64/gce_x86_64")
+	flag.StringVar(&source_image_family, "source_image_family", "debian-11",
+		"Image familty to use as the base")
+	flag.StringVar(&source_image_project, "source_image_project", "debian-cloud",
+		"Project holding the base image")
+	flag.StringVar(&repository_url, "repository_url",
+		"https://github.com/google/android-cuttlefish.git",
+		"URL to the repository with host changes")
+	flag.StringVar(&repository_branch, "repository_branch",
+		"main", "Branch to check out")
+	flag.StringVar(&version, "version", "", "cuttlefish-common version")
+	flag.StringVar(&internal_ip_flag, "INTERNAL_IP", "",
+		"INTERNAL_IP can be set to --internal-ip run on a GCE instance."+
+			"The instance will need --scope compute-rw.")
+	flag.StringVar(&INTERNAL_extra_source, "INTERNAL_extra_source", "",
+		"INTERNAL_extra_source may be set to a directory containing the source for extra packages to build.")
+	flag.BoolVar(&verbose, "verbose", true, "print commands and output (default: true)")
+	flag.IntVar(&image_disk_size_gb, "image_disk_size_gb", 10, "Image disk size in GB")
+	flag.Var(&ssh_flags, "ssh_flag",
+		"Values for --ssh-flag and --scp_flag for gcloud compute ssh/scp respectively. This flag may be repeated")
+	flag.BoolVar(&host_orchestration_flag, "host_orchestration", false,
+		"assembles image with host orchestration capabilities")
+	flag.Parse()
 }
 
 func shell(cmd string) (string, error) {
-  if verbose {
-    fmt.Println(cmd)
-  }
-  b, err := exec.Command("/bin/sh", "-c", cmd).CombinedOutput()
-  if verbose {
-    fmt.Println(string(b))
-  }
-  if err != nil {
-    return "", err
-  }
-  return strings.TrimSpace(string(b)), nil
+	if verbose {
+		fmt.Println(cmd)
+	}
+	b, err := exec.Command("/bin/sh", "-c", cmd).CombinedOutput()
+	if verbose {
+		fmt.Println(string(b))
+	}
+	if err != nil {
+		return "", err
+	}
+	return strings.TrimSpace(string(b)), nil
 }
 
 func mustShell(cmd string) string {
-  if verbose {
-    fmt.Println(cmd)
-  }
-  out, err := shell(cmd)
-  if err != nil {
-    panic(err)
-  }
-  if verbose {
-    fmt.Println(out)
-  }
-  return strings.TrimSpace(out)
+	if verbose {
+		fmt.Println(cmd)
+	}
+	out, err := shell(cmd)
+	if err != nil {
+		panic(err)
+	}
+	if verbose {
+		fmt.Println(out)
+	}
+	return strings.TrimSpace(out)
 }
 
 func gce(action OnFail, gceArg string, errorStr ...string) (string, error) {
-  cmd := "gcloud " + gceArg
-  out, err := shell(cmd)
-  if out != "" {
-    fmt.Println(out)
-  }
-  if err != nil && action != IgnoreOnFail {
-    var buf string
-    fmt.Sprintf(buf, "gcloud error occurred: %s", err)
-    if (len(errorStr) > 0) {
-      buf += " [" + errorStr[0] + "]"
-    }
-    if action == ExitOnFail {
-      panic(buf)
-    }
-    if action == WarnOnFail {
-      fmt.Println(buf)
-    }
-  }
-  return out, err
+	cmd := "gcloud " + gceArg
+	out, err := shell(cmd)
+	if out != "" {
+		fmt.Println(out)
+	}
+	if err != nil && action != IgnoreOnFail {
+		var buf string
+		fmt.Sprintf(buf, "gcloud error occurred: %s", err)
+		if len(errorStr) > 0 {
+			buf += " [" + errorStr[0] + "]"
+		}
+		if action == ExitOnFail {
+			panic(buf)
+		}
+		if action == WarnOnFail {
+			fmt.Println(buf)
+		}
+	}
+	return out, err
 }
 
 func waitForInstance(PZ string) {
-  for {
-    time.Sleep(5 * time.Second)
-    _, err := gce(WarnOnFail, `compute ssh `+SSH_FLAGS+` `+PZ+` `+
-                  build_instance+` -- uptime`)
-    if err == nil {
-      break
-    }
-  }
+	for {
+		time.Sleep(5 * time.Second)
+		_, err := gce(WarnOnFail, `compute ssh `+internal_ip_flag+` `+PZ+` `+
+			build_instance+` -- `+ssh_flags.AsArgs()+` uptime `)
+		if err == nil {
+			break
+		}
+	}
 }
 
-func packageSource(url string, branch string, version string, subdir string) {
-  repository_dir := url[strings.LastIndex(url, "/")+1:]
-  debian_dir := mustShell(`basename "`+repository_dir+`" .git`)
-  if subdir != "" {
-    debian_dir = repository_dir + "/" + subdir
-  }
-  mustShell("git clone " + url + " -b "+branch)
-  mustShell("dpkg-source -b " + debian_dir)
-  mustShell("rm -rf " + debian_dir)
-  mustShell("ls -l")
-  mustShell("pwd")
+func packageSource(url string, branch string, subdir string) {
+	repository_dir := url[strings.LastIndex(url, "/")+1:]
+	repository_dir = mustShell(`basename "` + repository_dir + `" .git`)
+	debian_dir := repository_dir
+	if subdir != "" {
+		debian_dir = repository_dir + "/" + subdir
+	}
+	mustShell("git clone " + url + " -b " + branch)
+	mustShell("dpkg-source -b " + debian_dir)
+	mustShell("rm -rf " + repository_dir)
+	mustShell("ls -l")
+	mustShell("pwd")
 }
 
 func createInstance(instance string, arg string) {
-  _, err := gce(WarnOnFail, `compute instances describe "`+instance+`"`)
-  if err != nil {
-    gce(ExitOnFail, `compute instances create `+arg+` "`+instance+`"`)
-  }
+	_, err := gce(WarnOnFail, `compute instances describe "`+instance+`"`)
+	if err != nil {
+		gce(ExitOnFail, `compute instances create `+arg+` "`+instance+`"`)
+	}
 }
 
 func main() {
-  gpu_type := "nvidia-tesla-p100-vws"
-  PZ := "--project=" + build_project + " --zone=" + build_zone
+	gpu_type := "nvidia-tesla-p100-vws"
+	PZ := "--project=" + build_project + " --zone=" + build_zone
 
-  dest_family_flag := ""
-  if dest_family != "" {
-    dest_family_flag = "--family=" + dest_family
-  }
+	if arch != "gce_x86_64" {
+		// new path that generate image locally without creating GCE instance
 
-  scratch_dir, err := ioutil.TempDir("", "")
-  if err != nil {
-    log.Fatal(err)
-  }
+		abt := os.Getenv("ANDROID_BUILD_TOP")
+		cmd := `"` + abt + `/device/google/cuttlefish/tools/create_base_image_combined.sh"`
+		cmd += " " + arch
+		out, err := shell(cmd)
+		if out != "" {
+			fmt.Println(out)
+		}
+		if err != nil {
+			fmt.Println("create_base_image arch %s error occurred: %s", arch, err)
+		}
 
-  oldDir, err := os.Getwd()
-  if err != nil {
-    log.Fatal(err)
-  }
-  os.Chdir(scratch_dir)
-  packageSource(repository_url, repository_branch, "cuttlefish-common_" + version, "")
-  os.Chdir(oldDir)
+		// gce operations
+		delete_instances := build_instance + " " + dest_image
+		if launch_instance != "" {
+			delete_instances += " " + launch_instance
+		}
+		zip_file := "disk_" + username + ".raw.tar.gz"
+		gs_file := "gs://cloud-android-testing-esp/" + zip_file
+		cloud_storage_file := "https://storage.googleapis.com/cloud-android-testing-esp/" + zip_file
+		location := "us"
 
-  abt := os.Getenv("ANDROID_BUILD_TOP")
-  source_files := `"` + abt + `/device/google/cuttlefish/tools/create_base_image_gce.sh"`
-  source_files += " " + `"` + abt + `/device/google/cuttlefish/tools/update_gce_kernel.sh"`
-  source_files += " " + `"` + abt + `/device/google/cuttlefish/tools/remove_old_gce_kernel.sh"`
-  source_files += " " + scratch_dir + "/*"
-  if INTERNAL_extra_source != "" {
-    source_files += " " + INTERNAL_extra_source + "/*"
-  }
+		// delete all previous instances, images and disks
+		gce(WarnOnFail, `compute instances delete -q `+PZ+` `+delete_instances, `Not running`)
+		gce(WarnOnFail, `compute disks delete -q `+PZ+` "`+dest_image+`"`, `No scratch disk`)
+		gce(WarnOnFail, `compute images delete -q --project="`+build_project+`" "`+dest_image+`"`,
+			`Not respinning`)
+		gce(WarnOnFail, `alpha storage rm `+gs_file)
 
-  delete_instances := build_instance + " " + dest_image
-  if launch_instance != "" {
-    delete_instances += " " + launch_instance
-  }
+		// upload new local host image into GCE storage
+		gce(WarnOnFail, `alpha storage cp `+abt+`/`+zip_file+` gs://cloud-android-testing-esp`)
 
-  gce(WarnOnFail, `compute instances delete -q `+PZ+` `+delete_instances,
-    `Not running`)
-  gce(WarnOnFail, `compute disks delete -q `+PZ+` "`+dest_image+
-    `"`, `No scratch disk`)
-  gce(WarnOnFail, `compute images delete -q --project="`+build_project+
-    `" "`+dest_image+`"`, `Not respinning`)
-  gce(WarnOnFail, `compute disks create `+PZ+` --image-family="`+source_image_family+
-    `" --image-project="`+source_image_project+`" "`+dest_image+`"`)
-  gce(ExitOnFail, `compute accelerator-types describe "`+gpu_type+`" `+PZ,
-    `Please use a zone with `+gpu_type+` GPUs available.`)
-  createInstance(build_instance, PZ+
-    ` --machine-type=n1-standard-16 --image-family="`+source_image_family+
-    `" --image-project="`+source_image_project+
-    `" --boot-disk-size=200GiB --accelerator="type=`+gpu_type+
-    `,count=1" --maintenance-policy=TERMINATE --boot-disk-size=200GiB`)
+		// create GCE image based on new uploaded host image
+		gce(WarnOnFail, `compute images create "`+dest_image+`" --project="`+build_project+
+			`" --family="`+source_image_family+`" --source-uri="`+cloud_storage_file+
+			`" --storage-location="`+location+`" --guest-os-features=UEFI_COMPATIBLE`)
 
-  waitForInstance(PZ)
+		// find Nvidia GPU and then create GCE instance
+		gce(ExitOnFail, `compute accelerator-types describe "`+gpu_type+`" `+PZ,
+			`Please use a zone with `+gpu_type+` GPUs available.`)
+		createInstance(build_instance, PZ+
+			` --machine-type=n1-standard-16 --network-interface=network-tier=PREMIUM,subnet=default`+
+			` --accelerator="type=`+gpu_type+
+			`,count=1" --maintenance-policy=TERMINATE --provisioning-model=STANDARD`+
+			` --service-account=204446994883-compute@developer.gserviceaccount.com`+
+			` --scopes=https://www.googleapis.com/auth/devstorage.read_only,`+
+			`https://www.googleapis.com/auth/logging.write,`+
+			`https://www.googleapis.com/auth/monitoring.write,`+
+			`https://www.googleapis.com/auth/servicecontrol,`+
+			`https://www.googleapis.com/auth/service.management.readonly,`+
+			`https://www.googleapis.com/auth/trace.append`+
+			` --tags=http-server --create-disk=auto-delete=yes,boot=yes,device-name=`+build_instance+
+			`,image=projects/cloud-android-testing/global/images/`+dest_image+
+			`,mode=rw,size=200,type=projects/cloud-android-testing/zones/`+build_zone+
+			`/diskTypes/pd-balanced --no-shielded-secure-boot --shielded-vtpm`+
+			` --shielded-integrity-monitoring --reservation-affinity=any`)
 
-  // Ubuntu tends to mount the wrong disk as root, so help it by waiting until
-  // it has booted before giving it access to the clean image disk
-  gce(WarnOnFail, `compute instances attach-disk `+PZ+` "`+build_instance+
-    `" --disk="`+dest_image+`"`)
+		// enable serial-port (console)
+		gce(WarnOnFail, `compute instances add-metadata `+build_instance+
+			` --metadata serial-port-enable=TRUE`)
+		return
+	}
 
-  // beta for the --internal-ip flag that may be passed via SSH_FLAGS
-  gce(ExitOnFail, `beta compute scp `+SSH_FLAGS+` `+PZ+` `+source_files+
-    ` "`+build_instance+`:"`)
+	dest_family_flag := ""
+	if dest_family != "" {
+		dest_family_flag = "--family=" + dest_family
+	}
 
-  // Update the host kernel before installing any kernel modules
-  // Needed to guarantee that the modules in the chroot aren't built for the
-  // wrong kernel
-  gce(WarnOnFail, `compute ssh `+SSH_FLAGS+` `+PZ+` "`+build_instance+
-    `" -- ./update_gce_kernel.sh`)
-  // TODO rammuthiah if the instance is clobbered with ssh commands within
-  // 5 seconds of reboot, it becomes inaccessible. Workaround that by sleeping
-  // 50 seconds.
-  time.Sleep(50 * time.Second)
-  gce(ExitOnFail, `compute ssh `+SSH_FLAGS+` `+PZ+` "`+build_instance+
-    `" -- ./remove_old_gce_kernel.sh`)
+	scratch_dir, err := ioutil.TempDir("", "")
+	if err != nil {
+		log.Fatal(err)
+	}
 
-  gce(ExitOnFail, `compute ssh `+SSH_FLAGS+` `+PZ+` "`+build_instance+
-    `" -- ./create_base_image_gce.sh`)
-  gce(ExitOnFail, `compute instances delete -q `+PZ+` "`+build_instance+`"`)
-  gce(ExitOnFail, `compute images create --project="`+build_project+
-    `" --source-disk="`+dest_image+`" --source-disk-zone="`+build_zone+
-    `" --licenses=https://www.googleapis.com/compute/v1/projects/vm-options/global/licenses/enable-vmx `+
-    dest_family_flag+` "`+dest_image+`"`)
-  gce(ExitOnFail, `compute disks delete -q `+PZ+` "`+dest_image+`"`)
+	oldDir, err := os.Getwd()
+	if err != nil {
+		log.Fatal(err)
+	}
+	os.Chdir(scratch_dir)
+	packageSource(repository_url, repository_branch, "base")
+	packageSource(repository_url, repository_branch, "frontend")
+	os.Chdir(oldDir)
 
-  if launch_instance != "" {
-    createInstance(launch_instance, PZ+
-      ` --image-project="`+build_project+`" --image="`+dest_image+
-      `" --machine-type=n1-standard-4 --scopes storage-ro --accelerator="type=`+
-      gpu_type+`,count=1" --maintenance-policy=TERMINATE`)
-  }
+	abt := os.Getenv("ANDROID_BUILD_TOP")
+	source_files := `"` + abt + `/device/google/cuttlefish/tools/create_base_image_gce.sh"`
+	source_files += " " + `"` + abt + `/device/google/cuttlefish/tools/install_nvidia.sh"`
+	source_files += " " + `"` + abt + `/device/google/cuttlefish/tools/update_gce_kernel.sh"`
+	source_files += " " + `"` + abt + `/device/google/cuttlefish/tools/remove_old_gce_kernel.sh"`
+	source_files += " " + scratch_dir + "/*"
+	if INTERNAL_extra_source != "" {
+		source_files += " " + INTERNAL_extra_source + "/*"
+	}
 
-  fmt.Printf("Test and if this looks good, consider releasing it via:\n"+
-            "\n"+
-            "gcloud compute images create \\\n"+
-            "  --project=\"%s\" \\\n"+
-            "  --source-image=\"%s\" \\\n"+
-            "  --source-image-project=\"%s\" \\\n"+
-            "  \"%s\" \\\n"+
-            "  \"%s\"\n",
-            dest_project, dest_image, build_project, dest_family_flag, dest_image)
+	delete_instances := build_instance + " " + dest_image
+	if launch_instance != "" {
+		delete_instances += " " + launch_instance
+	}
+
+	gce(WarnOnFail, `compute instances delete -q `+PZ+` `+delete_instances,
+		`Not running`)
+	gce(WarnOnFail, `compute disks delete -q `+PZ+` "`+dest_image+
+		`"`, `No scratch disk`)
+	gce(WarnOnFail, `compute images delete -q --project="`+build_project+
+		`" "`+dest_image+`"`, `Not respinning`)
+	gce(WarnOnFail, `compute disks create `+PZ+`  --size=`+strconv.Itoa(image_disk_size_gb)+`G `+
+		`--image-family="`+source_image_family+`" --image-project="`+source_image_project+`" "`+dest_image+`"`)
+	gce(ExitOnFail, `compute accelerator-types describe "`+gpu_type+`" `+PZ,
+		`Please use a zone with `+gpu_type+` GPUs available.`)
+	createInstance(build_instance, PZ+
+		` --machine-type=n1-standard-16 --image-family="`+source_image_family+
+		`" --image-project="`+source_image_project+
+		`" --boot-disk-size=200GiB --accelerator="type=`+gpu_type+
+		`,count=1" --maintenance-policy=TERMINATE --boot-disk-size=200GiB`)
+
+	waitForInstance(PZ)
+
+	// Ubuntu tends to mount the wrong disk as root, so help it by waiting until
+	// it has booted before giving it access to the clean image disk
+	gce(WarnOnFail, `compute instances attach-disk `+PZ+` "`+build_instance+
+		`" --disk="`+dest_image+`"`)
+
+	// beta for the --internal-ip flag that may be passed via internal_ip_flag
+	gce(ExitOnFail, `beta compute scp `+internal_ip_flag+` `+PZ+` `+source_files+
+		` "`+build_instance+`:" `+ssh_flags.AsRepeatedFlag("scp-flag"))
+
+	// Update the host kernel before installing any kernel modules
+	// Needed to guarantee that the modules in the chroot aren't built for the
+	// wrong kernel
+	gce(WarnOnFail, `compute ssh `+internal_ip_flag+` `+PZ+` "`+build_instance+
+		`"`+` -- `+ssh_flags.AsArgs()+` ./update_gce_kernel.sh`)
+	// TODO rammuthiah if the instance is clobbered with ssh commands within
+	// 5 seconds of reboot, it becomes inaccessible. Workaround that by sleeping
+	// 50 seconds.
+	time.Sleep(50 * time.Second)
+	gce(ExitOnFail, `compute ssh `+internal_ip_flag+` `+PZ+` "`+build_instance+
+		`"`+` -- `+ssh_flags.AsArgs()+` ./remove_old_gce_kernel.sh`)
+
+	ho_arg := ""
+	if host_orchestration_flag {
+		ho_arg = "-o"
+	}
+	gce(ExitOnFail, `compute ssh `+internal_ip_flag+` `+PZ+` "`+build_instance+
+		`"`+` -- `+ssh_flags.AsArgs()+` ./create_base_image_gce.sh `+ho_arg)
+	gce(ExitOnFail, `compute instances delete -q `+PZ+` "`+build_instance+`"`)
+	gce(ExitOnFail, `compute images create --project="`+build_project+
+		`" --source-disk="`+dest_image+`" --source-disk-zone="`+build_zone+
+		`" --licenses=https://www.googleapis.com/compute/v1/projects/vm-options/global/licenses/enable-vmx `+
+		dest_family_flag+` "`+dest_image+`"`)
+	gce(ExitOnFail, `compute disks delete -q `+PZ+` "`+dest_image+`"`)
+
+	if launch_instance != "" {
+		createInstance(launch_instance, PZ+
+			` --image-project="`+build_project+`" --image="`+dest_image+
+			`" --machine-type=n1-standard-4 --scopes storage-ro --accelerator="type=`+
+			gpu_type+`,count=1" --maintenance-policy=TERMINATE`)
+	}
+
+	fmt.Printf("Test and if this looks good, consider releasing it via:\n"+
+		"\n"+
+		"gcloud compute images create \\\n"+
+		"  --project=\"%s\" \\\n"+
+		"  --source-image=\"%s\" \\\n"+
+		"  --source-image-project=\"%s\" \\\n"+
+		"  \"%s\" \\\n"+
+		"  \"%s\"\n",
+		dest_project, dest_image, build_project, dest_family_flag, dest_image)
 }
diff --git a/tools/create_base_image_arm.sh b/tools/create_base_image_arm.sh
index 5258636..a5fe3ad 100755
--- a/tools/create_base_image_arm.sh
+++ b/tools/create_base_image_arm.sh
@@ -20,77 +20,52 @@
 script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
 
 if [ -z $ANDROID_BUILD_TOP ]; then
-	echo "error: run script after 'lunch'"
-	exit 1
+	echo "error: please run script after 'lunch'"
+	export ANDROID_BUILD_TOP=$(realpath "${script_dir}"/../../../..)
+	echo "error: set ANDROID_BUILD_TOP as ${ANDROID_BUILD_TOP}"
 fi
 
 source "${ANDROID_BUILD_TOP}/external/shflags/shflags"
 
-DEFINE_boolean p1 \
-	false "Only generate/write the 1st partition (loader1)" "1"
-DEFINE_boolean p2 \
-	false "Only generate/write the 2nd partition (env)" "2"
-DEFINE_boolean p3 \
-	false "Only generate/write the 3rd partition (loader2)" "3"
-DEFINE_boolean p4 \
-	false "Only generate/write the 4th partition (trust)" "4"
-DEFINE_boolean p5 \
-	false "Only generate/write the 5th partition (rootfs)" "5"
+UBOOT_DIST=
+KERNEL_DIST=
+OUTPUT_IMAGE=
 
-UBOOT_REPO=
-KERNEL_REPO=
-IMAGE=
-
-FLAGS_HELP="USAGE: $0 <UBOOT_REPO> <KERNEL_REPO> [IMAGE] [flags]"
+FLAGS_HELP="USAGE: $0 [UBOOT_DIST] [KERNEL_DIST] [OUTPUT_IMAGE] [flags]"
 
 FLAGS "$@" || exit $?
 eval set -- "${FLAGS_ARGV}"
 
-if [ ${FLAGS_p1} -eq ${FLAGS_FALSE} ] &&
-   [ ${FLAGS_p2} -eq ${FLAGS_FALSE} ] &&
-   [ ${FLAGS_p3} -eq ${FLAGS_FALSE} ] &&
-   [ ${FLAGS_p4} -eq ${FLAGS_FALSE} ] &&
-   [ ${FLAGS_p5} -eq ${FLAGS_FALSE} ]; then
-	FLAGS_p1=${FLAGS_TRUE}
-	FLAGS_p2=${FLAGS_TRUE}
-	FLAGS_p3=${FLAGS_TRUE}
-	FLAGS_p4=${FLAGS_TRUE}
-	FLAGS_p5=${FLAGS_TRUE}
-fi
-
 for arg in "$@" ; do
-	if [ -z $UBOOT_REPO ]; then
-		UBOOT_REPO=$arg
-	elif [ -z $KERNEL_REPO ]; then
-		KERNEL_REPO=$arg
-	elif [ -z $IMAGE ]; then
-		IMAGE=$arg
+	if [ -z $UBOOT_DIST ]; then
+		UBOOT_DIST=$arg
+	elif [ -z $KERNEL_DIST ]; then
+		KERNEL_DIST=$arg
+	elif [ -z $OUTPUT_IMAGE ]; then
+		OUTPUT_IMAGE=$arg
 	else
 		flags_help
 		exit 1
 	fi
 done
 
-USE_IMAGE=`[ -z "${IMAGE}" ] && echo "0" || echo "1"`
-OVERWRITE=`[ -e "${IMAGE}" ] && echo "1" || echo "0"`
-if [ -z $KERNEL_REPO -o -z $UBOOT_REPO ]; then
-	flags_help
-	exit 1
-fi
-if [ ! -e "${UBOOT_REPO}" ]; then
-	echo "error: can't find '${UBOOT_REPO}'. aborting..."
-	exit 1
-fi
-if [ ! -e "${KERNEL_REPO}" ]; then
-	echo "error: can't find '${KERNEL_REPO}'. aborting..."
-	exit 1
-fi
-if [ $OVERWRITE -eq 1 ]; then
-	OVERWRITE_IMAGE=${IMAGE}
-	IMAGE=`mktemp`
+if [ -z "${KERNEL_DIST}" ]; then
+	OUTPUT_IMAGE="${UBOOT_DIST}"
+	UBOOT_DIST=
 fi
 
-if [ $USE_IMAGE -eq 0 ]; then
+if [ ! -e "${UBOOT_DIST}" ]; then
+	echo "No UBOOT_DIST, use prebuilts"
+	UBOOT_DIST="${ANDROID_BUILD_TOP}"/device/google/cuttlefish_prebuilts/bootloader/rockpi_aarch64
+fi
+if [ ! -e "${KERNEL_DIST}" ]; then
+	echo "No KERNEL_DIST, use prebuilts"
+	KERNEL_DIST=$(find "${ANDROID_BUILD_TOP}"/device/google/cuttlefish_prebuilts/kernel -name '*-arm64-rockpi' | sort | tail -n 1)
+fi
+
+WRITE_TO_IMAGE=`[ -z "${OUTPUT_IMAGE}" ] && echo "0" || echo "1"`
+
+if [ $WRITE_TO_IMAGE -eq 0 ]; then
 	init_devs=`lsblk --nodeps -oNAME -n`
 	echo "Reinsert device (to write to) into PC"
 	while true; do
@@ -125,10 +100,9 @@
 	echo "Detected device at /dev/${mmc_dev}"
 fi
 
-if [ ${FLAGS_p2} -eq ${FLAGS_TRUE} ]; then
-	tmpfile=`mktemp`
-	bootenv=`mktemp`
-	cat > ${tmpfile} << "EOF"
+bootenv_src=`mktemp`
+bootenv=`mktemp`
+cat > ${bootenv_src} << "EOF"
 bootdelay=2
 baudrate=1500000
 scriptaddr=0x00500000
@@ -141,131 +115,121 @@
 scan_for_boot_part=part list mmc ${devnum} -bootable devplist; env exists devplist || setenv devplist 1; for distro_bootpart in ${devplist}; do if fstype mmc ${devnum}:${distro_bootpart} bootfstype; then run find_script; fi; done; setenv devplist;
 find_script=if test -e mmc ${devnum}:${distro_bootpart} /boot/boot.scr; then echo Found U-Boot script /boot/boot.scr; run run_scr; fi
 run_scr=load mmc ${devnum}:${distro_bootpart} ${scriptaddr} /boot/boot.scr; source ${scriptaddr}
+fastboot_raw_partition_raw1=0x0 0x2000000
 EOF
-	echo "Sha=`${script_dir}/gen_sha.sh --uboot ${UBOOT_REPO} --kernel ${KERNEL_REPO}`" >> ${tmpfile}
-	${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_tools/mkenvimage -s 32768 -o ${bootenv} - < ${tmpfile}
+echo "Sha=`${script_dir}/gen_sha.sh --uboot ${UBOOT_DIST} --kernel ${KERNEL_DIST}`" >> ${bootenv_src}
+${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_tools/mkenvimage -s 32768 -o ${bootenv} - < ${bootenv_src}
+rm -f ${bootenv_src}
+
+IMAGE=`mktemp`
+kernel_dist_dir=$(echo ${KERNEL_DIST})
+kernel_dist_dir=$(realpath ${kernel_dist_dir})
+if [ ! -d "${kernel_dist_dir}" ]; then
+    echo "error: running realpath on KERNEL_DIST fail"
+    echo KERNEL_DIST="${KERNEL_DIST}"
+    echo kernel_dist_dir="${kernel_dist_dir}"
+    exit 1
 fi
-
-if [ ${FLAGS_p1} -eq ${FLAGS_TRUE} ] || [ ${FLAGS_p3} -eq ${FLAGS_TRUE} ]; then
-	cd ${UBOOT_REPO}
-	BUILD_CONFIG=u-boot/build.config.rockpi4 build/build.sh -j1
-	cd -
+${ANDROID_BUILD_TOP}/kernel/tests/net/test/build_rootfs.sh \
+	-a arm64 -s bullseye-rockpi -n ${IMAGE} -r ${IMAGE}.initrd -e -g \
+	-k ${kernel_dist_dir}/Image -i ${kernel_dist_dir}/initramfs.img \
+	-d ${kernel_dist_dir}/rk3399-rock-pi-4b.dtb:rockchip
+if [ $? -ne 0 ]; then
+	echo "error: failed to build rootfs. exiting..."
+	rm -f ${IMAGE}
+	exit 1
 fi
+rm -f ${IMAGE}.initrd
 
-if [ ${FLAGS_p5} -eq ${FLAGS_TRUE} ]; then
-	cd ${KERNEL_REPO}
-	rm -rf out
-	BUILD_CONFIG=common/build.config.rockpi4 build/build.sh -j`nproc`
-	cd -
-
-	dist_dir=$(echo ${KERNEL_REPO}/out/android*/dist)
-	${ANDROID_BUILD_TOP}/kernel/tests/net/test/build_rootfs.sh \
-		-a arm64 -s bullseye-rockpi -n ${IMAGE} -r ${IMAGE}.initrd -e \
-		-k ${dist_dir}/Image -i ${dist_dir}/initramfs.img \
-		-d ${dist_dir}/rk3399-rock-pi-4b.dtb:rockchip
-	if [ $? -ne 0 ]; then
-		echo "error: failed to build rootfs. exiting..."
-		exit 1
-	fi
-	rm -f ${IMAGE}.initrd
-	truncate -s +3G ${IMAGE}
-	e2fsck -f ${IMAGE}
-	resize2fs ${IMAGE}
-
-	# Turn on journaling
-	tune2fs -O ^has_journal ${IMAGE}
-	e2fsck -fy ${IMAGE} >/dev/null 2>&1
-fi
-
-if [ ${USE_IMAGE} -eq 0 ]; then
+if [ ${WRITE_TO_IMAGE} -eq 0 ]; then
 	device=/dev/${mmc_dev}
 	devicep=${device}
 
-	# 32GB eMMC size
+	# Burn the whole disk image with partition table
+	sudo dd if=${IMAGE} of=${device} bs=1M conv=fsync
+
+	# Update partition table for 32GB eMMC
 	end_sector=61071326
+	sudo sgdisk --delete=7 ${device}
+	sudo sgdisk --new=7:145M:${end_sector} --typecode=7:8305 --change-name=7:rootfs --attributes=7:set:2 ${device}
 
-	sudo sgdisk --zap-all --set-alignment=1 ${device}
-	sudo sgdisk --set-alignment=1 --new=1:64:8127 --typecode=1:8301 --change-name=1:loader1 ${device}
-	sudo sgdisk --set-alignment=1 --new=2:8128:8191 --typecode=2:8301 --change-name=2:env ${device}
-	sudo sgdisk --set-alignment=1 --new=3:16384:24575 --typecode=3:8301 --change-name=3:loader2 ${device}
-	sudo sgdisk --set-alignment=1 --new=4:24576:32767 --typecode=4:8301 --change-name=4:trust ${device}
-	sudo sgdisk --set-alignment=1 --new=5:32768:${end_sector} --typecode=5:8305 --change-name=5:rootfs --attributes=5:set:2 ${device}
-	if [ ${FLAGS_p5} -eq ${FLAGS_TRUE} ]; then
-		sudo dd if=${IMAGE} of=${devicep}5 bs=1M conv=fsync
-		sudo resize2fs ${devicep}5 >/dev/null 2>&1
-	fi
-else
-	device=$(sudo losetup -f)
-	devicep=${device}p
-
-	if [ ${FLAGS_p5} -eq ${FLAGS_FALSE} ]; then
-		fs_end=3G
-		end_sector=-
-	fi
-	if [ ${FLAGS_p5} -eq ${FLAGS_TRUE} ]; then
-		# Minimize rootfs filesystem
-		while true; do
-			out=`sudo resize2fs -M ${IMAGE} 2>&1`
-			if [[ $out =~ "Nothing to do" ]]; then
-				break
-			fi
-		done
-		# Minimize rootfs file size
-		block_count=`sudo tune2fs -l ${IMAGE} | grep "Block count:" | sed 's/.*: *//'`
-		block_size=`sudo tune2fs -l ${IMAGE} | grep "Block size:" | sed 's/.*: *//'`
-		sector_size=512
-		start_sector=32768
-		fs_size=$(( block_count*block_size ))
-		fs_sectors=$(( fs_size/sector_size ))
-		part_sectors=$(( ((fs_sectors-1)/2048+1)*2048 ))  # 1MB-aligned
-		end_sector=$(( start_sector+part_sectors-1 ))
-		secondary_gpt_sectors=33
-		fs_end=$(( (end_sector+secondary_gpt_sectors+1)*sector_size ))
-		image_size=$(( part_sectors*sector_size ))
-		truncate -s ${image_size} ${IMAGE}
-		e2fsck -fy ${IMAGE} >/dev/null 2>&1
-	fi
-
-	# Create final image
-	if [ $OVERWRITE -eq 1 ]; then
-		tmpimg=${OVERWRITE_IMAGE}
-	else
-		tmpimg=`mktemp`
-	fi
-	truncate -s ${fs_end} ${tmpimg}
-
-	# Create GPT
-	sgdisk --zap-all --set-alignment=1 ${tmpimg}
-	sgdisk --set-alignment=1 --new=1:64:8127 --typecode=1:8301 --change-name=1:loader1 ${tmpimg}
-	sgdisk --set-alignment=1 --new=2:8128:8191 --typecode=2:8301 --change-name=2:env ${tmpimg}
-	sgdisk --set-alignment=1 --new=3:16384:24575 --typecode=3:8301 --change-name=3:loader2 ${tmpimg}
-	sgdisk --set-alignment=1 --new=4:24576:32767 --typecode=4:8301 --change-name=4:trust ${tmpimg}
-	sgdisk --set-alignment=1 --new=5:32768:${end_sector} --typecode=5:8305 --change-name=5:rootfs --attributes=5:set:2 ${tmpimg}
-
-	sudo losetup ${device} ${tmpimg}
+	# Rescan the partition table and resize the rootfs
 	sudo partx -v --add ${device}
+	sudo resize2fs ${devicep}7 >/dev/null 2>&1
+else
+	# Minimize rootfs filesystem
+	rootfs_partition_start=$(partx -g -o START -s -n 7 "${IMAGE}" | xargs)
+	rootfs_partition_end=$(partx -g -o END -s -n 7 "${IMAGE}" | xargs)
+	rootfs_partition_num_sectors=$((${rootfs_partition_end} - ${rootfs_partition_start} + 1))
+	rootfs_partition_offset=$((${rootfs_partition_start} * 512))
+	rootfs_partition_size=$((${rootfs_partition_num_sectors} * 512))
+	e2fsck -fy ${IMAGE}?offset=${rootfs_partition_offset} >/dev/null 2>&1
+	imagesize=`stat -c %s "${IMAGE}"`
+	rootfs_partition_tempfile=$(mktemp)
+	dd if="${IMAGE}" of="${rootfs_partition_tempfile}" bs=512 skip=${rootfs_partition_start} count=${rootfs_partition_num_sectors}
+	while true; do
+		out=`resize2fs -M ${rootfs_partition_tempfile} 2>&1`
+		if [[ $out =~ "Nothing to do" ]]; then
+			break
+		fi
+	done
+	dd if="${rootfs_partition_tempfile}" of="${IMAGE}" bs=512 seek=${rootfs_partition_start} count=${rootfs_partition_num_sectors} conv=fsync,notrunc
+	rm -f "${rootfs_partition_tempfile}"
+	truncate -s "${imagesize}" "${IMAGE}"
+	sgdisk -e "${IMAGE}"
+	e2fsck -fy ${IMAGE}?offset=${rootfs_partition_offset} || true
 
-	if [ ${FLAGS_p5} -eq ${FLAGS_TRUE} ]; then
-		sudo dd if=${IMAGE} of=${devicep}5 bs=1M conv=fsync
-	fi
+	# Minimize rootfs file size
+	block_count=`tune2fs -l ${IMAGE}?offset=${rootfs_partition_offset} | grep "Block count:" | sed 's/.*: *//'`
+	block_size=`tune2fs -l ${IMAGE}?offset=${rootfs_partition_offset} | grep "Block size:" | sed 's/.*: *//'`
+	sector_size=512
+	start_sector=`partx -g -o START -s -n 7 "${IMAGE}" | xargs`
+	fs_size=$(( block_count*block_size ))
+	fs_sectors=$(( fs_size/sector_size ))
+	part_sectors=$(( ((fs_sectors-1)/2048+1)*2048 ))  # 1MB-aligned
+	end_sector=$(( start_sector+part_sectors-1 ))
+	secondary_gpt_sectors=33
+	fs_end=$(( (end_sector+secondary_gpt_sectors+1)*sector_size ))
+	image_size=$(( part_sectors*sector_size ))
+
+        # Disable ext3/4 journal for flashing to SD-Card
+	tune2fs -O ^has_journal ${IMAGE}?offset=${rootfs_partition_offset}
+	e2fsck -fy ${IMAGE}?offset=${rootfs_partition_offset} >/dev/null 2>&1
+
+	# Update partition table
+	sgdisk --delete=7 ${IMAGE}
+	sgdisk --new=7:145M:${end_sector} --typecode=7:8305 --change-name=7:rootfs --attributes=7:set:2 ${IMAGE}
 fi
-if [ ${FLAGS_p1} -eq ${FLAGS_TRUE} ]; then
-	# sudo dd if=${UBOOT_REPO}/out/u-boot-mainline/dist/idbloader.img of=${devicep}1 conv=fsync
-	# loader1
-	sudo dd if=${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_bin/idbloader.img of=${devicep}1 conv=fsync
+
+# idbloader
+if [ ${WRITE_TO_IMAGE} -eq 0 ]; then
+	sudo dd if=${UBOOT_DIST}/idbloader.img of=${devicep}1 conv=fsync
+else
+	idbloader_partition_start=$(partx -g -o START -s -n 1 "${IMAGE}" | xargs)
+	dd if=${UBOOT_DIST}/idbloader.img of="${IMAGE}" bs=512 seek=${idbloader_partition_start} conv=fsync,notrunc
 fi
-if [ ${FLAGS_p2} -eq ${FLAGS_TRUE} ]; then
+# prebuilt
+# sudo dd if=${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_bin/idbloader.img of=${devicep}1 conv=fsync
+
+# uboot_env
+if [ ${WRITE_TO_IMAGE} -eq 0 ]; then
 	sudo dd if=${bootenv} of=${devicep}2 conv=fsync
+else
+	ubootenv_partition_start=$(partx -g -o START -s -n 2 "${IMAGE}" | xargs)
+	dd if=${bootenv} of="${IMAGE}" bs=512 seek=${ubootenv_partition_start} conv=fsync,notrunc
 fi
-if [ ${FLAGS_p3} -eq ${FLAGS_TRUE} ]; then
-	# sudo dd if=${UBOOT_REPO}/out/u-boot-mainline/dist/u-boot.itb of=${devicep}3 conv=fsync
-	# loader2
-	sudo dd if=${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_bin/u-boot.itb of=${devicep}3 conv=fsync
+# uboot
+if [ ${WRITE_TO_IMAGE} -eq 0 ]; then
+	sudo dd if=${UBOOT_DIST}/u-boot.itb of=${devicep}3 conv=fsync
+else
+	uboot_partition_start=$(partx -g -o START -s -n 3 "${IMAGE}" | xargs)
+	dd if=${UBOOT_DIST}/u-boot.itb of="${IMAGE}" bs=512 seek=${uboot_partition_start} conv=fsync,notrunc
 fi
-if [ ${USE_IMAGE} -eq 1 ]; then
-	sudo partx -v --delete ${device}
-	sudo losetup -d ${device}
-	if [ $OVERWRITE -eq 0 ]; then
-		mv ${tmpimg} ${IMAGE}
-	fi
+# prebuilt
+# sudo dd if=${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_bin/u-boot.itb of=${devicep}3 conv=fsync
+
+if [ ${WRITE_TO_IMAGE} -eq 1 ]; then
+	truncate -s ${fs_end} ${IMAGE}
+	sgdisk --move-second-header ${IMAGE}
+	mv -f ${IMAGE} ${OUTPUT_IMAGE}
 fi
diff --git a/tools/create_base_image_combined.sh b/tools/create_base_image_combined.sh
new file mode 100755
index 0000000..7925f47
--- /dev/null
+++ b/tools/create_base_image_combined.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+
+# Copyright 2022 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+set -x
+set -u
+# comment out -e because partx has error message
+# set -o errexit
+# set -e
+
+# get script directory
+script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+
+if [ -z $ANDROID_BUILD_TOP ]; then
+  echo "error: run script after 'lunch'"
+  exit 1
+fi
+
+source "${ANDROID_BUILD_TOP}/external/shflags/shflags"
+
+# prepare pre-requested files, including kernel and uboot
+# get temp directory
+tmpdir=`echo $RANDOM | md5sum | head -c 20`
+mkdir "/tmp/$tmpdir"
+UBOOT_REPO="/tmp/$tmpdir/uboot"
+KERNEL_REPO="/tmp/$tmpdir/kernel"
+IMAGE="/tmp/$tmpdir/test_image"
+ARCH=
+FLAGS_HELP="USAGE: $0 [ARCH]"
+mkdir $UBOOT_REPO
+mkdir $KERNEL_REPO
+cd $KERNEL_REPO
+repo init -u persistent-https://android.git.corp.google.com/kernel/manifest -b common-android13-5.15 && repo sync
+
+# parse input parameters
+FLAGS "$@" || exit $?
+eval set -- "${FLAGS_ARGV}"
+
+for arg in "$@" ; do
+  if [ -z $ARCH ]; then
+    ARCH=$arg
+  else
+    flags_help
+    exit 1
+  fi
+done
+
+if [[ "$ARCH" == "arm" ]]; then
+  cd $UBOOT_REPO
+  repo init -u persistent-https://android.git.corp.google.com/kernel/manifest -b u-boot-mainline && repo sync
+fi
+
+if [ -z $KERNEL_REPO -o -z $UBOOT_REPO ]; then
+  flags_help
+  exit 1
+fi
+if [ ! -e "${UBOOT_REPO}" ]; then
+  echo "error: can't find '${UBOOT_REPO}'. aborting..."
+  exit 1
+fi
+if [ ! -e "${KERNEL_REPO}" ]; then
+  echo "error: can't find '${KERNEL_REPO}'. aborting..."
+  exit 1
+fi
+
+tmpfile=`mktemp`
+bootenv=`mktemp`
+cat > ${tmpfile} << "EOF"
+bootdelay=2
+baudrate=1500000
+scriptaddr=0x00500000
+boot_targets=mmc1 mmc0
+bootcmd=run distro_bootcmd
+distro_bootcmd=for target in ${boot_targets}; do run bootcmd_${target}; done
+bootcmd_mmc0=devnum=0; run mmc_boot
+bootcmd_mmc1=devnum=1; run mmc_boot
+mmc_boot=if mmc dev ${devnum}; then ; run scan_for_boot_part; fi
+scan_for_boot_part=part list mmc ${devnum} -bootable devplist; env exists devplist || setenv devplist 1; for distro_bootpart in ${devplist}; do if fstype mmc ${devnum}:${distro_bootpart} bootfstype; then run find_script; fi; done; setenv devplist;
+find_script=if test -e mmc ${devnum}:${distro_bootpart} /boot/boot.scr; then echo Found U-Boot script /boot/boot.scr; run run_scr; fi
+run_scr=load mmc ${devnum}:${distro_bootpart} ${scriptaddr} /boot/boot.scr; source ${scriptaddr}
+EOF
+echo "Sha=`${script_dir}/gen_sha.sh --uboot ${UBOOT_REPO} --kernel ${KERNEL_REPO}`" >> ${tmpfile}
+${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts/uboot_tools/mkenvimage -s 32768 -o ${bootenv} - < ${tmpfile}
+
+# build uboot based on architecture
+cd ${UBOOT_REPO}
+if [[ "$ARCH" == "arm" ]]; then
+  BUILD_CONFIG=u-boot/build.config.rockpi4 build/build.sh -j1
+fi
+cd -
+
+# build kernel based on architecture
+cd ${KERNEL_REPO}
+rm -rf out
+if [[ "$ARCH" == "arm" ]]; then
+  BUILD_CONFIG=common/build.config.rockpi4 build/build.sh -j`nproc`
+else
+  BUILD_CONFIG=common/build.config.gce.x86_64 build/build.sh -j`nproc`
+fi
+cd -
+
+dist_dir=$(echo ${KERNEL_REPO}/out*/dist)
+
+# build rootfs/host images
+if [[ "$ARCH" == "arm" ]]; then
+  ${ANDROID_BUILD_TOP}/kernel/tests/net/test/build_rootfs.sh \
+    -a arm64 -s bullseye-rockpi -n ${IMAGE} -r ${IMAGE}.initrd -e \
+    -k ${dist_dir}/Image -i ${dist_dir}/initramfs.img \
+    -d ${dist_dir}/rk3399-rock-pi-4b.dtb:rockchip
+else
+  ${ANDROID_BUILD_TOP}/kernel/tests/net/test/build_rootfs.sh \
+    -a amd64 -s bullseye-server -n ${IMAGE} -r ${IMAGE}.initrd -e \
+    -k ${dist_dir}/bzImage -i ${dist_dir}/initramfs.img -g
+fi
+
+if [ $? -ne 0 ]; then
+  echo "error: failed to build rootfs. exiting..."
+  exit 1
+fi
+
+rm -f ${IMAGE}.initrd
+rm -rf ${ANDROID_BUILD_TOP}/disk.raw
+cp ${IMAGE} ${ANDROID_BUILD_TOP}/disk.raw
+cd ${ANDROID_BUILD_TOP}
+rm -rf disk_${USER}.raw.tar.gz
+tar Szcvf disk_${USER}.raw.tar.gz disk.raw
diff --git a/tools/create_base_image_gce.sh b/tools/create_base_image_gce.sh
index 8a3014d..6361c7c 100755
--- a/tools/create_base_image_gce.sh
+++ b/tools/create_base_image_gce.sh
@@ -1,7 +1,31 @@
 #!/bin/bash
 
+# Copyright 2018 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 set -x
 set -o errexit
+shopt -s extglob
+
+# If "true" install host orchestration capabilities.
+host_orchestration_flag="false"
+
+while getopts ":o" flag; do
+    case "${flag}" in
+        o) host_orchestration_flag="true";;
+    esac
+done
 
 sudo apt-get update
 
@@ -9,6 +33,14 @@
 
 sudo apt install -y debhelper ubuntu-dev-tools equivs "${extra_packages[@]}"
 
+# Resize
+sudo apt install -y cloud-utils
+sudo apt install -y cloud-guest-utils
+sudo apt install -y fdisk
+sudo growpart /dev/sdb 1
+sudo e2fsck -f -y /dev/sdb1
+sudo resize2fs /dev/sdb1
+
 # Install the cuttlefish build deps
 
 for dsc in *.dsc; do
@@ -30,8 +62,13 @@
   popd
 done
 
-# Now gather all of the *.deb files to copy them into the image
-debs=(*.deb)
+# Now gather all of the relevant .deb files to copy them into the image
+debs=()
+if [[ "${host_orchestration_flag}" == "true" ]]; then
+  debs=(!(cuttlefish-@(common|user)*).deb)
+else
+  debs=(!(cuttlefish-orchestration*).deb)
+fi
 
 tmp_debs=()
 for i in "${debs[@]}"; do
@@ -53,36 +90,22 @@
 sudo chroot /mnt/image /usr/bin/apt update
 sudo chroot /mnt/image /usr/bin/apt install -y "${tmp_debs[@]}"
 # install tools dependencies
-sudo chroot /mnt/image /usr/bin/apt install -y openjdk-11-jre
+sudo chroot /mnt/image /usr/bin/apt install -y openjdk-17-jre
 sudo chroot /mnt/image /usr/bin/apt install -y unzip bzip2 lzop
 sudo chroot /mnt/image /usr/bin/apt install -y aapt
 sudo chroot /mnt/image /usr/bin/apt install -y screen # needed by tradefed
 
 sudo chroot /mnt/image /usr/bin/find /home -ls
 sudo chroot /mnt/image /usr/bin/apt install -t bullseye-backports -y linux-image-cloud-amd64
-sudo chroot /mnt/image /usr/bin/apt --purge -y remove linux-image-5.10.0-10-cloud-amd64
 
 # update QEMU version to most recent backport
 sudo chroot /mnt/image /usr/bin/apt install -y --only-upgrade qemu-system-x86 -t bullseye-backports
 sudo chroot /mnt/image /usr/bin/apt install -y --only-upgrade qemu-system-arm -t bullseye-backports
 
 # Install GPU driver dependencies
-sudo chroot /mnt/image /usr/bin/apt install -y gcc
-sudo chroot /mnt/image /usr/bin/apt install -y linux-source
-sudo chroot /mnt/image /usr/bin/apt install -y linux-headers-`uname -r`
-sudo chroot /mnt/image /usr/bin/apt install -y make
-sudo chroot /mnt/image /usr/bin/apt install -y software-properties-common
-sudo chroot /mnt/image /usr/bin/add-apt-repository non-free
-sudo chroot /mnt/image /usr/bin/add-apt-repository contrib
-# TODO rammuthiah rootcause why this line is needed
-# For reasons unknown the above two lines don't add non-free and
-# contrib to the bullseye backports.
-sudo chroot /mnt/image /usr/bin/add-apt-repository 'deb http://deb.debian.org/debian bullseye-backports main non-free contrib'
-sudo chroot /mnt/image /usr/bin/apt update
-
-sudo chroot /mnt/image /bin/bash -c 'DEBIAN_FRONTEND=noninteractive /usr/bin/apt install -y nvidia-driver -t bullseye-backports'
-sudo chroot /mnt/image /usr/bin/apt install -y firmware-misc-nonfree -t bullseye-backports
-sudo chroot /mnt/image /usr/bin/apt install -y libglvnd-dev -t bullseye-backports
+sudo cp install_nvidia.sh /mnt/image/
+sudo chroot /mnt/image /usr/bin/bash install_nvidia.sh
+sudo rm /mnt/image/install_nvidia.sh
 
 # Verify
 query_nvidia() {
diff --git a/tools/create_base_image_hostlib.sh b/tools/create_base_image_hostlib.sh
index cc7227f..8045f77 100755
--- a/tools/create_base_image_hostlib.sh
+++ b/tools/create_base_image_hostlib.sh
@@ -1,5 +1,19 @@
 #!/bin/bash
 
+# Copyright 2018 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 # Common code to build a host image on GCE
 
 # INTERNAL_IP can be set to --internal-ip run on a GCE instance
@@ -49,13 +63,14 @@
   local url="$1"
   local branch="$2"
   local repository_dir="${url/*\//}"
-  local debian_dir="$(basename "${repository_dir}" .git)"
+  repository_dir="$(basename "${repository_dir}" .git)"
+  local debian_dir="${repository_dir}"
   if [[ $# -eq 4 ]]; then
     debian_dir="${repository_dir}/$4"
   fi
   git clone "${url}" -b "${branch}"
   dpkg-source -b "${debian_dir}"
-  rm -rf "${debian_dir}"
+  rm -rf "${repository_dir}"
 }
 
 main() {
@@ -70,7 +85,9 @@
   scratch_dir="$(mktemp -d)"
   pushd "${scratch_dir}"
   package_source "${FLAGS_repository_url}" "${FLAGS_repository_branch}" \
-    "cuttlefish-common_${FLAGS_version}"
+    "cuttlefish-common_${FLAGS_version}" "base"
+  package_source "${FLAGS_repository_url}" "${FLAGS_repository_branch}" \
+    "cuttlefish-frontend_${FLAGS_version}" "frontend"
   popd
   source_files=(
     "${ANDROID_BUILD_TOP}/device/google/cuttlefish/tools/create_base_image_gce.sh"
diff --git a/tools/gen_sha.sh b/tools/gen_sha.sh
index d3da1c4..0c3eb52 100755
--- a/tools/gen_sha.sh
+++ b/tools/gen_sha.sh
@@ -41,10 +41,18 @@
 cd "${ANDROID_BUILD_TOP}/device/google/cuttlefish_prebuilts"
 Sha="$Sha,`git rev-parse HEAD`"
 cd - >/dev/null
-cd "${FLAGS_uboot}/external/arm-trusted-firmware"
-Sha="$Sha,`git rev-parse HEAD`"
-cd - >/dev/null
-cd "${FLAGS_kernel}/common"
-Sha="$Sha,`git rev-parse HEAD`"
-cd - >/dev/null
+if [ -d "${FLAGS_uboot}/external/arm-trusted-firmware/.git" ]; then
+    cd "${FLAGS_uboot}/external/arm-trusted-firmware"
+    Sha="$Sha,`git rev-parse HEAD`"
+    cd - >/dev/null
+else
+    Sha="$Sha,"'!TFA'
+fi
+if [ -d "${FLAGS_kernel}/common/.git" ]; then
+    cd "${FLAGS_kernel}/common"
+    Sha="$Sha,`git rev-parse HEAD`"
+    cd - >/dev/null
+else
+    Sha="$Sha,"'!kernelcommon'
+fi
 echo $Sha
diff --git a/tools/graphviz_generator.sh b/tools/graphviz_generator.sh
new file mode 100755
index 0000000..dd40793
--- /dev/null
+++ b/tools/graphviz_generator.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright 2023 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script loops over all dot file in scanning_dir folder and
+# generates all mapped png and svg files with same file names
+# To use this script run ==>  ./graphviz_generator.sh input_fodler_path
+
+scanning_dir=$1
+
+# Find all visgraph dot files recuresively in scanning_dir
+all_dot_file_paths=`find "$scanning_dir" -type f -name "*.dot"`
+
+for file_path in $all_dot_file_paths
+do
+   echo $file_path
+   # Extract file name from file path and remove extensions
+   file_name="$(basename "$file_path" | cut -d. -f1)"
+   # Extract file directory from file path
+   file_dir="$(dirname -- $file_path)"
+   # Generate png and svg output files from input dot file
+   dot -Tpng $file_path > $file_dir/$file_name.png;
+   dot -Tsvg $file_path > $file_dir/$file_name.svg;
+done
diff --git a/tools/install_nvidia.sh b/tools/install_nvidia.sh
new file mode 100755
index 0000000..24d1db9
--- /dev/null
+++ b/tools/install_nvidia.sh
@@ -0,0 +1,47 @@
+# Copyright 2022 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -x
+set -o errexit
+
+arch=$(uname -m)
+nvidia_arch=${arch}
+[ "${arch}" = "x86_64" ] && arch=amd64
+[ "${arch}" = "aarch64" ] && arch=arm64
+
+# NVIDIA driver needs dkms which requires /dev/fd
+if [ ! -d /dev/fd ]; then
+  ln -s /proc/self/fd /dev/fd
+fi
+
+# Using "Depends:" is more reliable than "Version:", because it works for
+# backported ("bpo") kernels as well. NOTE: "Package" can be used instead
+# if we don't install the metapackage ("linux-image-cloud-${arch}") but a
+# specific version in the future
+kmodver=$(dpkg -s linux-image-cloud-${arch} | grep ^Depends: | \
+          cut -d: -f2 | cut -d" " -f2 | sed 's/linux-image-//')
+
+apt-get install -y wget
+# Install headers from backports, to match the linux-image
+apt-get install -y -t bullseye-backports $(echo linux-headers-${kmodver})
+# Dependencies for nvidia-installer
+apt-get install -y dkms libglvnd-dev libc6-dev pkg-config
+
+nvidia_version=515.65.01
+
+wget -q https://us.download.nvidia.com/tesla/${nvidia_version}/NVIDIA-Linux-${nvidia_arch}-${nvidia_version}.run
+chmod a+x NVIDIA-Linux-${nvidia_arch}-${nvidia_version}.run
+./NVIDIA-Linux-${nvidia_arch}-${nvidia_version}.run -x
+NVIDIA-Linux-${nvidia_arch}-${nvidia_version}/nvidia-installer --silent --no-install-compat32-libs --no-backup --no-wine-files --install-libglvnd --dkms -k "${kmodver}"
+
diff --git a/tools/latest_fetch_cvd.sh b/tools/latest_fetch_cvd.sh
index d853786..66174cc 100755
--- a/tools/latest_fetch_cvd.sh
+++ b/tools/latest_fetch_cvd.sh
@@ -1,5 +1,19 @@
 #!/bin/bash
 
+# Copyright 2021 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 LATEST_BUILD_ID=`curl "https://www.googleapis.com/android/internal/build/v3/builds?branch=aosp-master&buildAttemptStatus=complete&buildType=submitted&maxResults=1&successful=true&target=aosp_cf_x86_64_phone-userdebug" 2>/dev/null | \
   python3 -c "import sys, json; print(json.load(sys.stdin)['builds'][0]['buildId'])"`
 LATEST_BUILD_URL=`curl "https://www.googleapis.com/android/internal/build/v3/builds/$LATEST_BUILD_ID/aosp_cf_x86_64_phone-userdebug/attempts/latest/artifacts/fetch_cvd/url" 2>/dev/null | \
diff --git a/tools/launch_cvd_arm64_server.sh b/tools/launch_cvd_arm64_server.sh
new file mode 100755
index 0000000..8c55eb6
--- /dev/null
+++ b/tools/launch_cvd_arm64_server.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+# Copyright 2023 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# validate number of arguments to equal 2
+if [ "$#" -ne 2 ]; then
+  echo "This script requires 2 parameters, server address and assigned base instance number"
+  exit 1
+fi
+
+if [ -z ${ANDROID_PRODUCT_OUT+x} ]; then echo "ANDROID_PRODUCT_OUT is not defined"; exit 1; fi
+if [ -z ${ANDROID_HOST_OUT+x} ]; then echo "ANDROID_HOST_OUT is not defined"; exit 1; fi
+
+set -ex
+
+# map arguments to variables
+server=$1
+base_instance_num=$2
+
+# create a temp directory to store the artifacts
+temp_dir=/tmp/cvd_dist
+rm -rf $temp_dir
+mkdir -p $temp_dir
+
+# copy and compress the artifacts to the temp directory
+ssh $server -t "mkdir -p ~/.cvd_artifact; mkdir -p ~/cvd_home"
+rsync -aSvch --recursive $ANDROID_PRODUCT_OUT --files-from=$ANDROID_PRODUCT_OUT/required_images $server:~/cvd_home --info=progress2
+
+if [ -d $ANDROID_HOST_OUT/../linux_bionic-arm64/cvd-host_package ]; then
+  echo "Use contents in cvd-host_package dir"
+  rsync -avch $ANDROID_HOST_OUT/../linux_bionic-arm64/cvd-host_package/* $server:~/cvd_home --info=progress2
+elif [ -f $ANDROID_HOST_OUT/../linux_bionic-arm64/cvd-host_package.tar.gz ]; then
+  echo "Use contents in cvd-host_package.tar.gz"
+  # re-compress with rsyncable option
+  # TODO(b/275312073): remove this if toxbox supports rsyncable
+  cd $ANDROID_HOST_OUT/../linux_bionic-arm64; pigz -d -c cvd-host_package.tar.gz | pigz -R > $temp_dir/cvd-host_package.tar.gz
+  rsync -avh $temp_dir/* $server:.cvd_artifact --info=progress2
+  ssh $server -t "cd .cvd_artifact; tar -zxvf cvd-host_package.tar.gz -C ~/cvd_home/"
+else
+  echo "There is neither cvd-host_package dir nor cvd-host_package.tar.gz"
+  exit 1
+fi
+
+web_ui_port=$((8443+$base_instance_num-1))
+adb_port=$((6520+$base_instance_num-1))
+fastboot_port=$((7520+$base_instance_num-1))
+instance_id=$(uuidgen)
+# sets up SSH port forwarding to the remote server for various ports and launch cvd instance
+# port forward rule as base_instance_num=1 in local
+ssh $server -L 8443:127.0.0.1:$web_ui_port \
+  -L 15550:127.0.0.1:15550 -L 15551:127.0.0.1:15551 -L 15552:127.0.0.1:15552 \
+  -L 15553:127.0.0.1:15553 -L 15554:127.0.0.1:15554 -L 15555:127.0.0.1:15555 \
+  -L 15556:127.0.0.1:15556 -L 15557:127.0.0.1:15557 -L 15558:127.0.0.1:15558 \
+  -L 6520:127.0.0.1:$adb_port -L 7520:127.0.0.1:$fastboot_port \
+  -t "cd cvd_home && HOME=~/cvd_home bin/launch_cvd --base_instance_num=$base_instance_num"
diff --git a/tools/raw2iso.sh b/tools/raw2iso.sh
new file mode 100755
index 0000000..399328c
--- /dev/null
+++ b/tools/raw2iso.sh
@@ -0,0 +1,214 @@
+#!/bin/bash
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -e
+set -u
+
+SCRIPT_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd -P)
+
+usage() {
+  echo "usage: $0 [-h] -i input.raw -o output.iso"
+  exit 1
+}
+
+input=
+output=
+
+while getopts ":hi:o:" opt; do
+  case "${opt}" in
+    h)
+      usage
+      ;;
+    i)
+      input="${OPTARG}"
+      ;;
+    o)
+      output="${OPTARG}"
+      ;;
+    \?)
+      echo "Invalid option: ${OPTARG}" >&2
+      usage
+      ;;
+    :)
+      echo "Invalid option: ${OPTARG} requires an argument" >&2
+      usage
+      ;;
+  esac
+done
+
+if [[ -z "${input}" ]]; then
+  echo "Must specify input file!"
+  usage
+fi
+
+if [[ -z "${output}" ]]; then
+  echo "Must specify output file!"
+  usage
+fi
+
+grub_cmdline="ro net.ifnames=0 console=ttyAMA0 loglevel=4"
+grub_rootfs="LABEL=install"
+
+# Validate format of the input disk
+/sbin/sgdisk -p "${input}" | grep -q "Disk identifier (GUID)" || \
+  ( echo "${input} is not a GUID partitioned disk!" && exit 2 )
+partitions="$(/sbin/sgdisk -p "${input}" | \
+                grep -m1 -A2 "Number  Start (sector)" | tail -n2)"
+( IFS=$'\n'
+for line in $partitions; do
+  IFS=' ' read -r -a partition <<< "$line"
+  if [[ "${partition[0]}" = "1" && "${partition[5]}" != "EF00" ]]; then
+    echo "${input} partition 1 is not an ESP!" && exit 3
+  fi
+  if [[ "${partition[0]}" = "2" && "${partition[6]}" != "rootfs" ]]; then
+    echo "${input} partition 2 is not rootfs!" && exit 4
+  fi
+done )
+
+failure() {
+  echo "ISO generation process failed." >&2
+  rm -f "${output}"
+}
+trap failure ERR
+
+mount=$(mktemp -d)
+mount_remove() {
+  rmdir "${mount}"
+}
+trap mount_remove EXIT
+
+workdir=$(mktemp -d)
+workdir_remove() {
+  rm -rf "${workdir}"
+  mount_remove
+}
+trap workdir_remove EXIT
+
+# Build a grub.cfg for CD booting
+cat >"${workdir}"/grub.cfg <<EOF
+set timeout=0
+menuentry "Linux" {
+  linux /vmlinuz ${grub_cmdline} root=${grub_rootfs} init=/bin/sh
+  initrd /initrd.img
+}
+EOF
+
+# Build harddisk install script
+cat >"${workdir}"/install.sh << EOF
+#!/bin/sh
+set -e
+set -u
+SCRIPT_DIR=\$(CDPATH= cd -- "\$(dirname -- "\${0}")" && pwd -P)
+if [ "\${1#*nvme}" != "\${1}" ]; then
+  partition=p
+else
+  partition=
+fi
+sgdisk --load-backup="\${SCRIPT_DIR}"/gpt.img \${1}
+sgdisk --delete=2 \${1}
+sgdisk --new=2:129M:0 --typecode=2:8305 --change-name=2:rootfs --attributes=2:set:2 \${1}
+partx -v --update \${1}
+dd if="\${SCRIPT_DIR}"/esp.img of=\${1}\${partition}1 bs=16M
+mkfs.ext4 -L ROOT -U \$(cat \${SCRIPT_DIR}/rootfs_uuid) \${1}\${partition}2
+mount \${1}\${partition}2 /media
+tar -C /media -Spxf \${SCRIPT_DIR}/rootfs.tar.lz4
+umount /media
+EOF
+chmod a+x "${workdir}"/install.sh
+
+# Back up the GPT so we can restore it when installing
+/sbin/sgdisk --backup="${workdir}"/gpt.img "${input}" >/dev/null
+
+loopfile="$(/sbin/losetup -f)"
+sudo losetup -P "${loopfile}" "${input}"
+loopdev_remove() {
+  sudo losetup -d "${loopfile}"
+  workdir_remove
+}
+trap loopdev_remove EXIT
+
+# Back up the ESP so we can restore it when installing
+touch "${workdir}"/esp.img
+sudo dd if="${loopfile}p1" of="${workdir}"/esp.img status=none >/dev/null
+
+# Determine the architecture of the disk from the portable GRUB image path
+sudo mount "${loopfile}p1" "${mount}"
+unmount() {
+  sudo umount "${mount}"
+  loopdev_remove
+}
+trap unmount EXIT
+grub_blob=$(cd "${mount}" && echo EFI/Boot/*)
+case "${grub_blob}" in
+  EFI/Boot/BOOTAA64.EFI)
+    grub_arch=arm64-efi
+    grub_cd=gcdaa64.efi
+    ;;
+  EFI/Boot/BOOTIA64.EFI)
+    grub_arch=x86_64-efi
+    grub_cd=gcdx64.efi
+    ;;
+  *)
+    echo "Unknown GRUB architecture for ${grub_blob}!"
+    exit 5
+    ;;
+esac
+sudo umount "${mount}"
+trap loopdev_remove EXIT
+
+# Mount original rootfs and remove previous patching, then tar
+rootfs_uuid=$(sudo blkid -s UUID -o value "${loopfile}p2")
+sudo mount "${loopfile}p2" "${mount}"
+trap unmount EXIT
+sudo rm -f "${mount}"/root/esp.img "${mount}"/root/gpt.img
+sudo rm -f "${mount}"/root/rootfs.tar.lz4
+sudo rm -f "${mount}"/root/rootfs_uuid
+sudo rm -f "${mount}"/boot/grub/eltorito.img
+sudo rm -f "${mount}"/boot/grub/${grub_arch}/grub.cfg
+sudo rm -rf "${mount}"/tmp/*
+sudo rm -rf "${mount}"/var/tmp/*
+( cd "${mount}" && sudo tar -Szcpf "${workdir}"/rootfs.tar.lz4 * )
+
+# Prepare a new ESP for the ISO's El Torito image
+mkdir -p "${workdir}/EFI/Boot"
+cp "${mount}/usr/lib/grub/${grub_arch}/monolithic/${grub_cd}" \
+  "${workdir}/${grub_blob}"
+truncate -s 4M "${workdir}"/eltorito.img
+/sbin/mkfs.msdos -n SYSTEM -F 12 -M 0xf8 -h 0 -s 4 -g 64/32 -S 512 \
+  "${workdir}"/eltorito.img >/dev/null
+mmd -i "${workdir}"/eltorito.img EFI EFI/Boot
+mcopy -o -i "${workdir}"/eltorito.img -s "${workdir}/EFI" ::
+
+# Build ISO from rootfs
+sudo cp "${workdir}"/esp.img "${workdir}"/gpt.img "${mount}"/root
+sudo cp "${workdir}"/rootfs.tar.lz4 "${workdir}"/install.sh "${mount}"/root
+echo -n "${rootfs_uuid}" | sudo tee "${mount}"/root/rootfs_uuid >/dev/null
+sudo cp "${workdir}"/eltorito.img "${mount}"/boot/grub
+sudo cp "${workdir}"/grub.cfg "${mount}"/boot/grub/${grub_arch}/grub.cfg
+sudo chown root:root \
+  "${mount}"/root/esp.img "${mount}"/root/gpt.img \
+  "${mount}"/boot/grub/eltorito.img \
+  "${mount}"/boot/grub/${grub_arch}/grub.cfg
+rm -f "${output}"
+touch "${output}"
+sudo xorriso \
+  -as mkisofs -r -checksum_algorithm_iso sha256,sha512 -V install "${mount}" \
+  -o "${output}" -e boot/grub/eltorito.img -no-emul-boot \
+  -append_partition 2 0xef "${workdir}"/eltorito.img \
+  -partition_cyl_align all
+
+echo "Output ISO generated at '${output}'."
diff --git a/tools/remove_old_gce_kernel.sh b/tools/remove_old_gce_kernel.sh
index c7d52a1..191b6d0 100755
--- a/tools/remove_old_gce_kernel.sh
+++ b/tools/remove_old_gce_kernel.sh
@@ -1,7 +1,20 @@
 #!/bin/bash
 
+# Copyright 2022 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 set -x
 set -o errexit
 
-sudo apt --purge -y remove linux-image-5.10.0-10-cloud-amd64
 sudo update-grub2
diff --git a/tools/update_gce_kernel.sh b/tools/update_gce_kernel.sh
index d77cc3c..2075a68 100755
--- a/tools/update_gce_kernel.sh
+++ b/tools/update_gce_kernel.sh
@@ -1,5 +1,19 @@
 #!/bin/bash
 
+# Copyright 2022 Google Inc. All rights reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 set -x
 set -o errexit
 
diff --git a/tools/upload_to_gce_and_run.py b/tools/upload_to_gce_and_run.py
index db78340..221ce53 100755
--- a/tools/upload_to_gce_and_run.py
+++ b/tools/upload_to_gce_and_run.py
@@ -1,4 +1,19 @@
 #!/usr/bin/python
+#
+# Copyright 2018 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Upload a local build to Google Compute Engine and run it."""
 
diff --git a/tools/upload_via_ssh.py b/tools/upload_via_ssh.py
index 2b5cfd1..05609f9 100755
--- a/tools/upload_via_ssh.py
+++ b/tools/upload_via_ssh.py
@@ -1,4 +1,19 @@
 #!/usr/bin/python
+#
+# Copyright 2018 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Upload a local build to Google Compute Engine and run it."""
 
diff --git a/tools/vlan_prototype_down.sh b/tools/vlan_prototype_down.sh
deleted file mode 100755
index 1db2bf9..0000000
--- a/tools/vlan_prototype_down.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-# Delete the host networks for the VLAN prototype.
-# Runs as root.
-# Use at your own risk.
-
-delete_interface() {
-	bridge="$(printf cvd-v${1}br-%02d $2)"
-	tap="$(printf cvd-${1}vlan-%02d $2)"
-	network="${3}.$((4*$2 - 4))/30"
-
-	/sbin/ifconfig "${tap}" down
-	ip link delete "${tap}"
-
-	if [ -f /var/run/cuttlefish-dnsmasq-"${bridge}".pid ]; then
-		kill $(cat /var/run/cuttlefish-dnsmasq-"${bridge}".pid)
-	fi
-
-	iptables -t nat -D POSTROUTING -s "${network}" -j MASQUERADE
-
-	/sbin/ifconfig "${bridge}" down
-	/sbin/brctl delbr "${bridge}"
-}
-
-delete_interface w 1 192.168.93
-delete_interface m 1 192.168.94
-delete_interface i 1 192.168.95
-
-ip link delete cvd-net-01
diff --git a/tools/vlan_prototype_up.sh b/tools/vlan_prototype_up.sh
deleted file mode 100755
index fbeef40..0000000
--- a/tools/vlan_prototype_up.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-# Create the host networks for the VLAN prototype.
-# Runs as root.
-# Use at your own risk.
-
-create_interface() {
-	bridge="$(printf cvd-v${1}br-%02d $2)"
-	tap="$(printf cvd-${1}vlan-%02d $2)"
-	gateway="${3}.$((4*$2 - 3))"
-	network="${3}.$((4*$2 - 4))/30"
-	netmask="255.255.255.252"
-	dhcp_range="${3}.$((4*$2 - 2)),${3}.$((4*$2 - 2))"
-
-	/sbin/brctl addbr "${bridge}"
-	/sbin/brctl stp "${bridge}" off
-	/sbin/brctl setfd "${bridge}" 0
-	/sbin/ifconfig "${bridge}" "${gateway}" netmask "${netmask}" up
-
-	iptables -t nat -A POSTROUTING -s "${network}" -j MASQUERADE
-
-	dnsmasq \
-	--strict-order \
-	--except-interface=lo \
-	--interface="${bridge}" \
-	--listen-address="${gateway}" \
-	--bind-interfaces \
-	--dhcp-range="${dhcp_range}" \
-	--conf-file="" \
-	--pid-file=/var/run/cuttlefish-dnsmasq-"${bridge}".pid \
-	--dhcp-leasefile=/var/run/cuttlefish-dnsmasq-"${bridge}".leases \
-	--dhcp-no-override
-
-	ip link add link cvd-net-01 name "${tap}" type vlan id ${4}
-	/sbin/ifconfig "${tap}" 0.0.0.0 up
-	/sbin/brctl addif "${bridge}" "${tap}"
-}
-
-ip tuntap add dev cvd-net-01 mode tap group cvdnetwork
-ifconfig cvd-net-01 0.0.0.0 up
-
-create_interface w 1 192.168.93 11
-create_interface m 1 192.168.94 12
-create_interface i 1 192.168.95 13
diff --git a/vsoc_arm64/BoardConfig.mk b/vsoc_arm64/BoardConfig.mk
index bc016a8..97a8287 100644
--- a/vsoc_arm64/BoardConfig.mk
+++ b/vsoc_arm64/BoardConfig.mk
@@ -18,8 +18,6 @@
 # arm64 target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_arm64
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
@@ -32,10 +30,13 @@
 TARGET_2ND_CPU_VARIANT := cortex-a53
 TARGET_TRANSLATE_2ND_ARCH := false
 
-ifeq ($(BOARD_VENDOR_RAMDISK_KERNEL_MODULES),)
-    BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/arm64/*.ko)
-endif
-
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := arm64
 HOST_CROSS_2ND_ARCH :=
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_arm64/kernel.mk b/vsoc_arm64/kernel.mk
index dca198b..710db12 100644
--- a/vsoc_arm64/kernel.mk
+++ b/vsoc_arm64/kernel.mk
@@ -13,7 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-TARGET_KERNEL_USE ?= 5.15
-TARGET_KERNEL_PATH ?= kernel/prebuilts/$(TARGET_KERNEL_USE)/arm64/kernel-$(TARGET_KERNEL_USE)
-
-PRODUCT_COPY_FILES += $(TARGET_KERNEL_PATH):kernel
+# This file is deprecated.
diff --git a/vsoc_arm64/phone/aosp_cf.mk b/vsoc_arm64/phone/aosp_cf.mk
index 86fe29e..9072dbc 100644
--- a/vsoc_arm64/phone/aosp_cf.mk
+++ b/vsoc_arm64/phone/aosp_cf.mk
@@ -37,8 +37,6 @@
 # All components inherited here go to vendor image
 #
 $(call inherit-product, device/google/cuttlefish/shared/phone/device_vendor.mk)
-# TODO(b/205788876) remove this when openwrt has an image for arm.
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
 
 # Nested virtualization support
 $(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
@@ -46,7 +44,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_arm64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_arm64/bootloader.mk)
 
 PRODUCT_NAME := aosp_cf_arm64_phone
diff --git a/vsoc_arm64_minidroid/BoardConfig.mk b/vsoc_arm64_minidroid/BoardConfig.mk
new file mode 100644
index 0000000..30b167e
--- /dev/null
+++ b/vsoc_arm64_minidroid/BoardConfig.mk
@@ -0,0 +1,31 @@
+#
+# Copyright 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# arm64 (64-bit only) target for Cuttlefish
+#
+
+TARGET_BOARD_PLATFORM := vsoc_arm64
+TARGET_ARCH := arm64
+TARGET_ARCH_VARIANT := armv8-a
+TARGET_CPU_ABI := arm64-v8a
+TARGET_CPU_VARIANT := cortex-a53
+
+HOST_CROSS_OS := linux_bionic
+HOST_CROSS_ARCH := arm64
+HOST_CROSS_2ND_ARCH :=
+
+-include device/google/cuttlefish/shared/minidroid/BoardConfig.mk
diff --git a/vsoc_arm64_minidroid/aosp_cf.mk b/vsoc_arm64_minidroid/aosp_cf.mk
new file mode 100644
index 0000000..0b447a6
--- /dev/null
+++ b/vsoc_arm64_minidroid/aosp_cf.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, device/google/cuttlefish/shared/minidroid/device.mk)
+
+$(call inherit-product, device/google/cuttlefish/vsoc_arm64/bootloader.mk)
+
+PRODUCT_NAME := aosp_cf_arm64_minidroid
+PRODUCT_DEVICE := vsoc_arm64_minidroid
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish arm64 minidroid
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_arm64_only/BoardConfig.mk b/vsoc_arm64_only/BoardConfig.mk
index eede683..74e81091 100644
--- a/vsoc_arm64_only/BoardConfig.mk
+++ b/vsoc_arm64_only/BoardConfig.mk
@@ -18,8 +18,6 @@
 # arm64 (64-bit only) target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_arm64
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
@@ -27,8 +25,14 @@
 TARGET_CPU_VARIANT := cortex-a53
 
 AUDIOSERVER_MULTILIB := first
-BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/arm64/*.ko)
 
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := arm64
 HOST_CROSS_2ND_ARCH :=
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_arm64_only/auto/OWNERS b/vsoc_arm64_only/auto/OWNERS
index 1b990ef..f97912a 100644
--- a/vsoc_arm64_only/auto/OWNERS
+++ b/vsoc_arm64_only/auto/OWNERS
@@ -1,5 +1,4 @@
 # Android Auto leads
+include platform/packages/services/Car:/OWNERS
 ankitarora@google.com
 egranata@google.com
-gurunagarajan@google.com
-keunyoung@google.com
diff --git a/vsoc_arm64_only/auto/aosp_cf.mk b/vsoc_arm64_only/auto/aosp_cf.mk
index 33a8624..6c4b863 100644
--- a/vsoc_arm64_only/auto/aosp_cf.mk
+++ b/vsoc_arm64_only/auto/aosp_cf.mk
@@ -31,6 +31,7 @@
 # All components inherited here go to system_ext image
 #
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
 
 #
 # All components inherited here go to product image
@@ -40,16 +41,11 @@
 #
 # All components inherited here go to vendor image
 #
-LOCAL_DISABLE_OMX := true
 $(call inherit-product, device/google/cuttlefish/shared/auto/device_vendor.mk)
 
-# TODO(b/205788876) remove this when openwrt has an image for arm.
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
-
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_arm64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_arm64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_arm64_only/phone/aosp_cf.mk b/vsoc_arm64_only/phone/aosp_cf.mk
index 0da151f..ae07b68 100644
--- a/vsoc_arm64_only/phone/aosp_cf.mk
+++ b/vsoc_arm64_only/phone/aosp_cf.mk
@@ -36,19 +36,14 @@
 #
 # All components inherited here go to vendor image
 #
-LOCAL_DISABLE_OMX := true
 $(call inherit-product, device/google/cuttlefish/shared/phone/device_vendor.mk)
 
-# TODO(b/205788876) remove this when openwrt has an image for arm.
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
-
 # Nested virtualization support
 $(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
 
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_arm64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_arm64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_arm64_only/phone/aosp_cf_fullmte.mk b/vsoc_arm64_only/phone/aosp_cf_fullmte.mk
new file mode 100644
index 0000000..5d72902
--- /dev/null
+++ b/vsoc_arm64_only/phone/aosp_cf_fullmte.mk
@@ -0,0 +1,20 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, device/google/cuttlefish/vsoc_arm64_only/phone/aosp_cf.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/fullmte.mk)
+
+PRODUCT_NAME := aosp_cf_arm64_phone_fullmte
diff --git a/vsoc_arm64_only/slim/aosp_cf.mk b/vsoc_arm64_only/slim/aosp_cf.mk
index 8699275..d118163 100644
--- a/vsoc_arm64_only/slim/aosp_cf.mk
+++ b/vsoc_arm64_only/slim/aosp_cf.mk
@@ -37,17 +37,12 @@
 #
 # All components inherited here go to vendor image
 #
-LOCAL_DISABLE_OMX := true
 LOCAL_PREFER_VENDOR_APEX := true
 $(call inherit-product, device/google/cuttlefish/shared/slim/device_vendor.mk)
 
-# TODO(b/205788876) remove this when openwrt has an image for arm.
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
-
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_arm64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_arm64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_arm_minidroid/BoardConfig.mk b/vsoc_arm_minidroid/BoardConfig.mk
new file mode 100644
index 0000000..b1130f8
--- /dev/null
+++ b/vsoc_arm_minidroid/BoardConfig.mk
@@ -0,0 +1,39 @@
+#
+# Copyright 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# arm (32-bit only) target for Cuttlefish
+#
+
+TARGET_BOARD_PLATFORM := vsoc_arm_only
+TARGET_ARCH := arm
+TARGET_ARCH_VARIANT := armv7-a-neon
+TARGET_CPU_ABI := armeabi-v7a
+TARGET_CPU_ABI2 := armeabi
+TARGET_CPU_VARIANT := cortex-a32
+
+HOST_CROSS_OS := linux_bionic
+HOST_CROSS_ARCH := arm64
+HOST_CROSS_2ND_ARCH :=
+
+TARGET_USES_64_BIT_BINDER := true
+
+TARGET_KERNEL_ARCH ?= $(TARGET_ARCH)
+TARGET_KERNEL_USE ?= mainline
+KERNEL_MODULES_PATH := device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-$(TARGET_KERNEL_ARCH)
+TARGET_KERNEL_PATH := $(KERNEL_MODULES_PATH)/kernel-$(TARGET_KERNEL_USE)
+
+-include device/google/cuttlefish/shared/minidroid/BoardConfig.mk
diff --git a/vsoc_arm_minidroid/aosp_cf.mk b/vsoc_arm_minidroid/aosp_cf.mk
new file mode 100644
index 0000000..1555160
--- /dev/null
+++ b/vsoc_arm_minidroid/aosp_cf.mk
@@ -0,0 +1,28 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, device/google/cuttlefish/shared/minidroid/device.mk)
+
+$(call inherit-product, device/google/cuttlefish/vsoc_arm_minidroid/bootloader.mk)
+
+PRODUCT_NAME := aosp_cf_arm_minidroid
+PRODUCT_DEVICE := vsoc_arm_minidroid
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish arm minidroid
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_arm_only/bootloader.mk b/vsoc_arm_minidroid/bootloader.mk
similarity index 100%
rename from vsoc_arm_only/bootloader.mk
rename to vsoc_arm_minidroid/bootloader.mk
diff --git a/vsoc_arm_only/BoardConfig.mk b/vsoc_arm_only/BoardConfig.mk
deleted file mode 100644
index 54c656c..0000000
--- a/vsoc_arm_only/BoardConfig.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Copyright 2020 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# arm (32-bit only) target for Cuttlefish
-#
-
--include device/google/cuttlefish/shared/BoardConfig.mk
-
-TARGET_BOARD_PLATFORM := vsoc_arm
-TARGET_ARCH := arm
-TARGET_ARCH_VARIANT := armv7-a-neon
-TARGET_CPU_ABI := armeabi-v7a
-TARGET_CPU_ABI2 := armeabi
-TARGET_CPU_VARIANT := cortex-a15
-
-BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-arm/*.ko)
-
-HOST_CROSS_OS := linux_bionic
-HOST_CROSS_ARCH := arm64
-HOST_CROSS_2ND_ARCH :=
diff --git a/vsoc_arm_only/kernel.mk b/vsoc_arm_only/kernel.mk
deleted file mode 100644
index a216444..0000000
--- a/vsoc_arm_only/kernel.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-TARGET_KERNEL_USE ?= mainline
-TARGET_KERNEL_PATH ?= device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-arm/kernel-$(TARGET_KERNEL_USE)
-
-PRODUCT_COPY_FILES += $(TARGET_KERNEL_PATH):kernel
diff --git a/vsoc_arm_only/phone/aosp_cf.mk b/vsoc_arm_only/phone/aosp_cf.mk
deleted file mode 100644
index 751c503..0000000
--- a/vsoc_arm_only/phone/aosp_cf.mk
+++ /dev/null
@@ -1,78 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image (same as GSI system)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-
-#
-# All components inherited here go to system_ext image (same as GSI system_ext)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image (same as GSI product)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS := false
-
-#
-# FIXME: Set up Go defaults because we are currently limited (by a U-Boot bug)
-#        to 512MB of RAM
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/go_defaults_512.mk)
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
-    system/apex/com.android.tethering.inprocess.apex \
-    system/apex/com.android.tethering.inprocess.capex \
-    system/app/PlatformCaptivePortalLogin/PlatformCaptivePortalLogin.apk \
-    system/priv-app/CellBroadcastServiceModulePlatform/CellBroadcastServiceModulePlatform.apk \
-    system/priv-app/InProcessNetworkStack/InProcessNetworkStack.apk \
-
-#
-# All components inherited here go to vendor image
-#
-$(call inherit-product, device/google/cuttlefish/shared/phone/device_vendor.mk)
-
-# TODO(b/205788876) remove this when openwrt has an image for arm.
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
-
-#
-# Special settings for the target
-#
-$(call inherit-product, device/google/cuttlefish/vsoc_arm_only/kernel.mk)
-$(call inherit-product, device/google/cuttlefish/vsoc_arm_only/bootloader.mk)
-
-# Exclude features that are not available on AOSP devices.
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
-
-PRODUCT_NAME := aosp_cf_arm_only_phone
-PRODUCT_DEVICE := vsoc_arm_only
-PRODUCT_MANUFACTURER := Google
-PRODUCT_MODEL := Cuttlefish arm phone 32-bit only
-
-PRODUCT_VENDOR_PROPERTIES += \
-    ro.config.low_ram=true \
-    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
-    ro.soc.model=$(PRODUCT_DEVICE)
-
-TARGET_SYSTEM_PROP += \
-    build/make/target/board/go_defaults_512.prop \
-    build/make/target/board/go_defaults_common.prop
diff --git a/vsoc_riscv64/BoardConfig.mk b/vsoc_riscv64/BoardConfig.mk
new file mode 100644
index 0000000..3dcc6a9
--- /dev/null
+++ b/vsoc_riscv64/BoardConfig.mk
@@ -0,0 +1,45 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# risv64 (64-bit only) target for Cuttlefish
+#
+
+TARGET_BOARD_PLATFORM := vsoc_riscv64
+TARGET_ARCH := riscv64
+TARGET_ARCH_VARIANT :=
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := riscv64
+
+AUDIOSERVER_MULTILIB := first
+
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
+# Temporary hack while prebuilt modules are missing riscv64.
+ALLOW_MISSING_DEPENDENCIES := true
+
+TARGET_KERNEL_ARCH ?= $(TARGET_ARCH)
+TARGET_KERNEL_USE ?= mainline
+KERNEL_MODULES_PATH := device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-$(TARGET_KERNEL_ARCH)
+TARGET_KERNEL_PATH := $(KERNEL_MODULES_PATH)/kernel-$(TARGET_KERNEL_USE)
+# FIXME: system_dlkm should be specified as well
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_riscv64/OWNERS b/vsoc_riscv64/OWNERS
new file mode 100644
index 0000000..16a0fc8
--- /dev/null
+++ b/vsoc_riscv64/OWNERS
@@ -0,0 +1,3 @@
+prashanthsw@google.com
+plabatut@google.com
+nellyv@google.com
diff --git a/vsoc_riscv64/bootloader.mk b/vsoc_riscv64/bootloader.mk
new file mode 100644
index 0000000..427531b
--- /dev/null
+++ b/vsoc_riscv64/bootloader.mk
@@ -0,0 +1,20 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TARGET_NO_BOOTLOADER := false
+# Only QEMU is supported for now
+BOARD_PREBUILT_BOOTLOADER := \
+    device/google/cuttlefish_prebuilts/bootloader/qemu_riscv64/u-boot.bin
diff --git a/vsoc_riscv64/phone/aosp_cf.mk b/vsoc_riscv64/phone/aosp_cf.mk
new file mode 100644
index 0000000..0eff063
--- /dev/null
+++ b/vsoc_riscv64/phone/aosp_cf.mk
@@ -0,0 +1,134 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image (same as GSI system)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+
+# TODO: FIXME: Start workaround for generic_system.mk ########################
+
+# TODO(b/271573990): It is currently required that dexpreopt be enabled for
+# userdebug builds, but dexpreopt is not yet supported for this architecture.
+# In the interim, this flag allows us to indicate that we cannot run dex2oat
+# to build the ART boot image. Once the requirement is relaxed or support
+# is enabled for this architecture, this flag can be removed.
+PRODUCT_USES_DEFAULT_ART_CONFIG := false
+
+# TODO: FIXME: Stop workaround for generic_system.mk #########################
+
+#
+# All components inherited here go to system_ext image (same as GSI system_ext)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image (same as GSI product)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+LOCAL_PREFER_VENDOR_APEX := true
+#$(call inherit-product, device/google/cuttlefish/shared/phone/device_vendor.mk)
+
+# TODO: FIXME: Start workaround for phone/device_vendor.mk ####################
+PRODUCT_MANIFEST_FILES += device/google/cuttlefish/shared/config/product_manifest.xml
+SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+
+ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
+endif
+
+$(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+# TODO: FIXME: Enable swiftshader for graphics.
+#$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/device.mk)
+
+TARGET_PRODUCT_PROP := $(LOCAL_PATH)/../../shared/phone/product.prop
+
+ifneq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.hardware.biometrics.face.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.biometrics.face.xml \
+    frameworks/native/data/etc/android.hardware.faketouch.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.faketouch.xml \
+    frameworks/native/data/etc/android.hardware.fingerprint.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.fingerprint.xml \
+
+endif
+
+# Runtime Resource Overlays
+ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_PACKAGES += com.google.aosp_cf_phone.rros
+else
+PRODUCT_PACKAGES += cuttlefish_phone_overlay_frameworks_base_core
+endif
+
+TARGET_BOARD_INFO_FILE ?= device/google/cuttlefish/shared/phone/android-info.txt
+# TODO: FIXME: Stop workaround for phone/device_vendor.mk #####################
+
+# TODO: Nested virtualization support
+# $(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_riscv64/bootloader.mk)
+
+# Exclude features that are not available on AOSP devices.
+ifeq ($(LOCAL_PREFER_VENDOR_APEX),true)
+PRODUCT_PACKAGES += com.google.aosp_cf_phone.hardware.core_permissions
+else
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
+endif
+
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
+# TODO(b/271573990): This property can be removed when ART support for JIT on
+# this architecture is available. This is an override as the original property
+# is defined in runtime_libart.mk.
+PRODUCT_PROPERTY_OVERRIDES += \
+    dalvik.vm.usejit=false
+
+PRODUCT_NAME := aosp_cf_riscv64_phone
+PRODUCT_DEVICE := vsoc_riscv64
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish riscv64 phone
+
+# Window sidecar and extensions to enhance activity embedding, multi-display,
+# tablet, and foldable support.
+PRODUCT_PACKAGES += \
+    androidx.window.extensions \
+    androidx.window.sidecar \
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_riscv64/slim/aosp_cf.mk b/vsoc_riscv64/slim/aosp_cf.mk
new file mode 100644
index 0000000..98d6c47
--- /dev/null
+++ b/vsoc_riscv64/slim/aosp_cf.mk
@@ -0,0 +1,110 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image (same as GSI system)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+
+# TODO: FIXME: Start workaround for generic_system.mk ########################
+
+# TODO(b/271573990): It is currently required that dexpreopt be enabled for
+# userdebug builds, but dexpreopt is not yet supported for this architecture.
+# In the interim, this flag allows us to indicate that we cannot run dex2oat
+# to build the ART boot image. Once the requirement is relaxed or support
+# is enabled for this architecture, this flag can be removed.
+PRODUCT_USES_DEFAULT_ART_CONFIG := false
+
+# TODO: FIXME: Stop workaround for generic_system.mk #########################
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_product.mk)
+PRODUCT_PACKAGES += FakeSystemApp
+
+#
+# All components inherited here go to vendor image
+#
+LOCAL_PREFER_VENDOR_APEX := true
+#$(call inherit-product, device/google/cuttlefish/shared/slim/device_vendor.mk)
+
+# TODO: FIXME: Start workaround for slim/device_vendor.mk ####################
+PRODUCT_MANIFEST_FILES += device/google/cuttlefish/shared/config/product_manifest.xml
+SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+
+$(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
+$(call inherit-product, device/google/cuttlefish/shared/camera/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+# TODO: FIXME: Enable swiftshader for graphics.
+#$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/device.mk)
+
+PRODUCT_VENDOR_PROPERTIES += \
+    debug.hwui.drawing_enabled=0 \
+
+PRODUCT_PACKAGES += \
+    com.google.aosp_cf_phone.rros \
+    com.google.aosp_cf_slim.rros
+
+TARGET_BOARD_INFO_FILE ?= device/google/cuttlefish/shared/slim/android-info.txt
+# TODO: FIXME: Stop workaround for slim/device_vendor.mk #####################
+
+# TODO(b/205788876) remove this when openwrt has an image for riscv64
+#PRODUCT_ENFORCE_MAC80211_HWSIM := false
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_riscv64/bootloader.mk)
+
+# Exclude features that are not available on AOSP devices.
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
+
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
+# TODO(b/271573990): This property can be removed when ART support for JIT on
+# this architecture is available. This is an override as the original property
+# is defined in runtime_libart.mk.
+PRODUCT_PROPERTY_OVERRIDES += \
+    dalvik.vm.usejit=false
+
+PRODUCT_NAME := aosp_cf_riscv64_slim
+PRODUCT_DEVICE := vsoc_riscv64
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish riscv64 slim
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_riscv64/wear/aosp_cf.mk b/vsoc_riscv64/wear/aosp_cf.mk
new file mode 100644
index 0000000..53c632d
--- /dev/null
+++ b/vsoc_riscv64/wear/aosp_cf.mk
@@ -0,0 +1,137 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_system.mk)
+
+# TODO: FIXME: Start workaround for aosp_system.mk ########################
+
+# TODO(b/271573990): It is currently required that dexpreopt be enabled for
+# userdebug builds, but dexpreopt is not yet supported for this architecture.
+# In the interim, this flag allows us to indicate that we cannot run dex2oat
+# to build the ART boot image. Once the requirement is relaxed or support
+# is enabled for this architecture, this flag can be removed.
+PRODUCT_USES_DEFAULT_ART_CONFIG := false
+
+# TODO(b/275113769): The riscv64 architecture doesn't support APEX flattening yet.
+# This condition can be removed after support is enabled.
+OVERRIDE_TARGET_FLATTEN_APEX := false
+
+# TODO: FIXME: Stop workaround for aosp_system.mk #########################
+
+# Cuttlefish uses A/B with system_b preopt, so we must install these
+PRODUCT_PACKAGES += \
+    cppreopts.sh \
+    otapreopt_script \
+
+# Hacks to boot with basic AOSP system apps
+PRODUCT_PACKAGES += \
+    Contacts \
+    Launcher3QuickStep \
+    Provision \
+    Settings \
+    StorageManager \
+    SystemUI \
+
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.software.app_widgets.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.app_widgets.xml \
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_vendor.mk)
+#$(call inherit-product, device/google/cuttlefish/shared/wear/device_vendor.mk)
+
+# TODO: FIXME: Start workaround for wear/device_vendor.mk ####################
+PRODUCT_MANIFEST_FILES += device/google/cuttlefish/shared/config/product_manifest.xml
+SYSTEM_EXT_MANIFEST_FILES += device/google/cuttlefish/shared/config/system_ext_manifest.xml
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.software.backup.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.backup.xml \
+    frameworks/native/data/etc/android.software.connectionservice.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.connectionservice.xml \
+    frameworks/native/data/etc/android.software.device_admin.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.device_admin.xml \
+    frameworks/native/data/etc/wearable_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/wearable_core_hardware.xml \
+
+$(call inherit-product, device/google/cuttlefish/shared/graphics/device_vendor.mk)
+# TODO: FIXME: Enable swiftshader for graphics.
+#$(call inherit-product, device/google/cuttlefish/shared/swiftshader/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/telephony/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/virgl/device_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/device.mk)
+
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.hardware.audio.output.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.audio.output.xml \
+    frameworks/native/data/etc/android.hardware.faketouch.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.faketouch.xml \
+    frameworks/native/data/etc/android.hardware.location.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.location.xml \
+
+# Runtime Resource Overlays
+PRODUCT_PACKAGES += \
+    cuttlefish_phone_overlay_frameworks_base_core \
+    cuttlefish_wear_overlay_frameworks_base_core \
+    cuttlefish_wear_overlay_settings_provider \
+
+PRODUCT_PRODUCT_PROPERTIES += \
+    config.disable_cameraservice=true
+
+PRODUCT_CHARACTERISTICS := nosdcard,watch
+
+TARGET_BOARD_INFO_FILE ?= device/google/cuttlefish/shared/wear/android-info.txt
+# TODO: FIXME: Stop workaround for wear/device_vendor.mk #####################
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_riscv64/bootloader.mk)
+
+# Exclude features that are not available on AOSP devices.
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml \
+
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
+# TODO(b/271573990): This property can be removed when ART support for JIT on
+# this architecture is available. This is an override as the original property
+# is defined in runtime_libart.mk.
+PRODUCT_PROPERTY_OVERRIDES += \
+    dalvik.vm.usejit=false
+
+PRODUCT_NAME := aosp_cf_riscv64_wear
+PRODUCT_DEVICE := vsoc_riscv64
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish riscv64 wearable
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_riscv64_minidroid/BoardConfig.mk b/vsoc_riscv64_minidroid/BoardConfig.mk
new file mode 100644
index 0000000..1260f47
--- /dev/null
+++ b/vsoc_riscv64_minidroid/BoardConfig.mk
@@ -0,0 +1,36 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# riscv64 (64-bit only) target for Cuttlefish
+#
+
+TARGET_BOARD_PLATFORM := vsoc_riscv64
+TARGET_ARCH := riscv64
+TARGET_ARCH_VARIANT :=
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := riscv64
+
+# Temporary hack while prebuilt modules are missing riscv64.
+ALLOW_MISSING_DEPENDENCIES := true
+
+TARGET_KERNEL_ARCH ?= $(TARGET_ARCH)
+TARGET_KERNEL_USE ?= mainline
+KERNEL_MODULES_PATH := device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-$(TARGET_KERNEL_ARCH)
+TARGET_KERNEL_PATH := $(KERNEL_MODULES_PATH)/kernel-$(TARGET_KERNEL_USE)
+# FIXME: system_dlkm should be specified as well
+
+-include device/google/cuttlefish/shared/minidroid/BoardConfig.mk
diff --git a/vsoc_riscv64_minidroid/OWNERS b/vsoc_riscv64_minidroid/OWNERS
new file mode 100644
index 0000000..16a0fc8
--- /dev/null
+++ b/vsoc_riscv64_minidroid/OWNERS
@@ -0,0 +1,3 @@
+prashanthsw@google.com
+plabatut@google.com
+nellyv@google.com
diff --git a/vsoc_riscv64_minidroid/aosp_cf.mk b/vsoc_riscv64_minidroid/aosp_cf.mk
new file mode 100644
index 0000000..a49ec77
--- /dev/null
+++ b/vsoc_riscv64_minidroid/aosp_cf.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, device/google/cuttlefish/shared/minidroid/device.mk)
+
+$(call inherit-product, device/google/cuttlefish/vsoc_riscv64/bootloader.mk)
+
+PRODUCT_NAME := aosp_cf_riscv64_minidroid
+PRODUCT_DEVICE := vsoc_riscv64_minidroid
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish riscv64 minidroid
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86/BoardConfig.mk b/vsoc_x86/BoardConfig.mk
index cd2c9d4..86844de 100644
--- a/vsoc_x86/BoardConfig.mk
+++ b/vsoc_x86/BoardConfig.mk
@@ -18,8 +18,6 @@
 # x86 target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
@@ -30,11 +28,16 @@
 TARGET_NATIVE_BRIDGE_CPU_VARIANT := generic
 TARGET_NATIVE_BRIDGE_ABI := armeabi-v7a armeabi
 
-ifeq ($(BOARD_VENDOR_RAMDISK_KERNEL_MODULES),)
-    BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/x86-64/*.ko)
-endif
-
 # TODO(b/156534160): Temporarily allow for the old style PRODUCT_COPY_FILES for ndk_translation_prebuilt
 ifeq ($(USE_NDK_TRANSLATION_BINARY),true)
 BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES := true
 endif
+
+TARGET_KERNEL_ARCH := x86_64
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_x86/auto/OWNERS b/vsoc_x86/auto/OWNERS
deleted file mode 100644
index 1b990ef..0000000
--- a/vsoc_x86/auto/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-# Android Auto leads
-ankitarora@google.com
-egranata@google.com
-gurunagarajan@google.com
-keunyoung@google.com
diff --git a/vsoc_x86/auto/aosp_cf.mk b/vsoc_x86/auto/aosp_cf.mk
deleted file mode 100644
index ecd9063..0000000
--- a/vsoc_x86/auto/aosp_cf.mk
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-# FIXME: generic_system.mk sets 'PRODUCT_ENFORCE_RRO_TARGETS := *'
-#        but this breaks phone_car. So undo it here.
-PRODUCT_ENFORCE_RRO_TARGETS := frameworks-res
-
-# FIXME: Disable mainline path checks
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := false
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# All components inherited here go to vendor image
-#
-$(call inherit-product, device/google/cuttlefish/shared/auto/device_vendor.mk)
-
-#
-# Special settings for the target
-#
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
-
-# Exclude features that are not available on AOSP devices.
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
-
-PRODUCT_NAME := aosp_cf_x86_auto
-PRODUCT_DEVICE := vsoc_x86
-PRODUCT_MANUFACTURER := Google
-PRODUCT_MODEL := Cuttlefish x86 auto
-
-PRODUCT_VENDOR_PROPERTIES += \
-    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
-    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86/go/aosp_cf.mk b/vsoc_x86/go/aosp_cf.mk
index d91c575..a31c566 100644
--- a/vsoc_x86/go/aosp_cf.mk
+++ b/vsoc_x86/go/aosp_cf.mk
@@ -26,6 +26,7 @@
 PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
     system/apex/com.android.tethering.inprocess.capex \
     system/app/PlatformCaptivePortalLogin/PlatformCaptivePortalLogin.apk \
+    system/etc/permissions/platform_privapp_allowlist_com.android.cellbroadcastservice.xml \
     system/priv-app/CellBroadcastServiceModulePlatform/CellBroadcastServiceModulePlatform.apk \
     system/priv-app/InProcessNetworkStack/InProcessNetworkStack.apk \
 
@@ -48,7 +49,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86/pasan/aosp_cf.mk b/vsoc_x86/pasan/aosp_cf.mk
index d4c8d57..f0b4967 100644
--- a/vsoc_x86/pasan/aosp_cf.mk
+++ b/vsoc_x86/pasan/aosp_cf.mk
@@ -40,7 +40,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86/phone/aosp_cf.mk b/vsoc_x86/phone/aosp_cf.mk
index 3e6ec6b..b0df448 100644
--- a/vsoc_x86/phone/aosp_cf.mk
+++ b/vsoc_x86/phone/aosp_cf.mk
@@ -40,7 +40,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86/tv/aosp_cf.mk b/vsoc_x86/tv/aosp_cf.mk
index 757c0a7..61839d6 100644
--- a/vsoc_x86/tv/aosp_cf.mk
+++ b/vsoc_x86/tv/aosp_cf.mk
@@ -39,9 +39,11 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
+# Stub SetupWizard for AOSP TV
+PRODUCT_PACKAGES += TvProvision
+
 # Exclude features that are not available on AOSP devices.
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
diff --git a/vsoc_x86/wear/aosp_cf.mk b/vsoc_x86/wear/aosp_cf.mk
index 35402f0..ac64c42 100644
--- a/vsoc_x86/wear/aosp_cf.mk
+++ b/vsoc_x86/wear/aosp_cf.mk
@@ -19,10 +19,6 @@
 #
 $(call inherit-product, device/google/cuttlefish/shared/wear/aosp_system.mk)
 
-# Allowed for wearables, but not installed to /system by default
-PRODUCT_PACKAGES += \
-    cameraserver \
-
 # Cuttlefish uses A/B with system_b preopt, so we must install these
 PRODUCT_PACKAGES += \
     cppreopts.sh \
@@ -55,12 +51,10 @@
 #
 $(call inherit-product, device/google/cuttlefish/shared/wear/aosp_vendor.mk)
 $(call inherit-product, device/google/cuttlefish/shared/wear/device_vendor.mk)
-PRODUCT_ENFORCE_MAC80211_HWSIM := false
 
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86_64/BoardConfig.mk b/vsoc_x86_64/BoardConfig.mk
index a740a76..45cd841 100644
--- a/vsoc_x86_64/BoardConfig.mk
+++ b/vsoc_x86_64/BoardConfig.mk
@@ -15,11 +15,9 @@
 #
 
 #
-# x86_64  target for Cuttlefish
+# x86_64 target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := silvermont
@@ -39,6 +37,9 @@
 TARGET_NATIVE_BRIDGE_2ND_CPU_VARIANT := generic
 TARGET_NATIVE_BRIDGE_2ND_ABI := armeabi-v7a armeabi
 
-ifeq ($(BOARD_VENDOR_RAMDISK_KERNEL_MODULES),)
-    BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/x86-64/*.ko)
-endif
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_x86_64/auto/OWNERS b/vsoc_x86_64/auto/OWNERS
deleted file mode 100644
index 1b990ef..0000000
--- a/vsoc_x86_64/auto/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-# Android Auto leads
-ankitarora@google.com
-egranata@google.com
-gurunagarajan@google.com
-keunyoung@google.com
diff --git a/vsoc_x86_64/auto/aosp_cf.mk b/vsoc_x86_64/auto/aosp_cf.mk
deleted file mode 100644
index 610c8c1..0000000
--- a/vsoc_x86_64/auto/aosp_cf.mk
+++ /dev/null
@@ -1,62 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-# FIXME: generic_system.mk sets 'PRODUCT_ENFORCE_RRO_TARGETS := *'
-#        but this breaks phone_car. So undo it here.
-PRODUCT_ENFORCE_RRO_TARGETS := frameworks-res
-
-# FIXME: Disable mainline path checks
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := false
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# All components inherited here go to vendor image
-#
-$(call inherit-product, device/google/cuttlefish/shared/auto/device_vendor.mk)
-
-#
-# Special settings for the target
-#
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
-
-# Exclude features that are not available on AOSP devices.
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
-
-PRODUCT_NAME := aosp_cf_x86_64_auto
-PRODUCT_DEVICE := vsoc_x86_64
-PRODUCT_MANUFACTURER := Google
-PRODUCT_MODEL := Cuttlefish x86_64 auto
-
-PRODUCT_VENDOR_PROPERTIES += \
-    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
-    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_64/kernel.mk b/vsoc_x86_64/kernel.mk
index e087f22..710db12 100644
--- a/vsoc_x86_64/kernel.mk
+++ b/vsoc_x86_64/kernel.mk
@@ -13,7 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-TARGET_KERNEL_USE ?= 5.15
-TARGET_KERNEL_PATH ?= kernel/prebuilts/$(TARGET_KERNEL_USE)/x86_64/kernel-$(TARGET_KERNEL_USE)
-
-PRODUCT_COPY_FILES += $(TARGET_KERNEL_PATH):kernel
+# This file is deprecated.
diff --git a/vsoc_x86_64/pc/OWNERS b/vsoc_x86_64/pc/OWNERS
deleted file mode 100644
index 47eb80e..0000000
--- a/vsoc_x86_64/pc/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-# pc cuttlefish leads
-armenk@google.com
-xutan@google.com
\ No newline at end of file
diff --git a/vsoc_x86_64/pc/aosp_cf.mk b/vsoc_x86_64/pc/aosp_cf.mk
deleted file mode 100644
index 1853785..0000000
--- a/vsoc_x86_64/pc/aosp_cf.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image (same as GSI system)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-#
-# All components inherited here go to system_ext image (same as GSI system_ext)a
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-# $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image (same as GSI product)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# All components inherited here go to vendor image
-#
-$(call inherit-product, device/google/cuttlefish/shared/pc/device_vendor.mk)
-
-#
-# Special settings for the target
-#
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
-
-PRODUCT_NAME := aosp_cf_x86_64_pc
-PRODUCT_DEVICE := vsoc_x86_64
-PRODUCT_MANUFACTURER := Google
-PRODUCT_MODEL := Cuttlefish x86_64 pc
-
-PRODUCT_VENDOR_PROPERTIES += \
-    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
-    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_64/phone/OWNERS b/vsoc_x86_64/phone/OWNERS
new file mode 100644
index 0000000..e0d597b
--- /dev/null
+++ b/vsoc_x86_64/phone/OWNERS
@@ -0,0 +1 @@
+per-file *hsum*.mk = file:platform/frameworks/base:/MULTIUSER_OWNERS
diff --git a/vsoc_x86_64/phone/aosp_cf.mk b/vsoc_x86_64/phone/aosp_cf.mk
index 2be93fd..f9e8d62 100644
--- a/vsoc_x86_64/phone/aosp_cf.mk
+++ b/vsoc_x86_64/phone/aosp_cf.mk
@@ -45,7 +45,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
@@ -61,11 +60,8 @@
 PRODUCT_MANUFACTURER := Google
 PRODUCT_MODEL := Cuttlefish x86_64 phone
 
-# Window sidecar and extensions to enhance activity embedding, multi-display,
-# tablet, and foldable support.
-PRODUCT_PACKAGES += \
-    androidx.window.extensions \
-    androidx.window.sidecar \
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
 
 PRODUCT_VENDOR_PROPERTIES += \
     ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
diff --git a/vsoc_x86_64/phone/aosp_cf_ssi.mk b/vsoc_x86_64/phone/aosp_cf_ssi.mk
new file mode 100644
index 0000000..d24b943
--- /dev/null
+++ b/vsoc_x86_64/phone/aosp_cf_ssi.mk
@@ -0,0 +1,25 @@
+# Inherit mostly from aosp_cf_x86_64_phone
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/phone/aosp_cf.mk)
+PRODUCT_NAME := aosp_cf_x86_64_ssi
+
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := true
+# Product image is required for now to pass vbmeta_system build. This can be removed once vbmeta_system can be skipped with PRODUCT_BUILD_VBMETA_IMAGE
+PRODUCT_BUILD_PRODUCT_IMAGE := true
+
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_INIT_BOOT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE := false
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_BOOT_IMAGE := false
+PRODUCT_BUILD_VENDOR_BOOT_IMAGE := false
+PRODUCT_BUILD_RECOVERY_IMAGE := false
+PRODUCT_BUILD_SUPER_PARTITION := false
+PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false
+PRODUCT_BUILD_VBMETA_IMAGE := false
+
+TARGET_SKIP_OTA_PACKAGE := true
diff --git a/vsoc_x86_64/phone/aosp_cf_vendor.mk b/vsoc_x86_64/phone/aosp_cf_vendor.mk
new file mode 100644
index 0000000..4b613b4
--- /dev/null
+++ b/vsoc_x86_64/phone/aosp_cf_vendor.mk
@@ -0,0 +1,23 @@
+# Inherit mostly from aosp_cf_x86_64_phone
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/phone/aosp_cf.mk)
+PRODUCT_NAME := aosp_cf_x86_64_phone_vendor
+
+PRODUCT_BUILD_SYSTEM_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+
+PRODUCT_BUILD_VENDOR_IMAGE := true
+PRODUCT_BUILD_ODM_IMAGE := true
+PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE := false
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_BOOT_IMAGE := false
+PRODUCT_BUILD_VENDOR_BOOT_IMAGE := false
+PRODUCT_BUILD_RECOVERY_IMAGE := false
+PRODUCT_BUILD_INIT_BOOT_IMAGE := false
+PRODUCT_BUILD_SUPER_PARTITION := false
+PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false
+
+TARGET_SKIP_OTA_PACKAGE := true
diff --git a/vsoc_x86_64/tv/aosp_cf.mk b/vsoc_x86_64/tv/aosp_cf.mk
index c59f1b0..dc2a7ed 100644
--- a/vsoc_x86_64/tv/aosp_cf.mk
+++ b/vsoc_x86_64/tv/aosp_cf.mk
@@ -40,9 +40,11 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
+# Stub SetupWizard for AOSP TV
+PRODUCT_PACKAGES += TvProvision
+
 # Exclude features that are not available on AOSP devices.
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
diff --git a/vsoc_x86_64_minidroid/BoardConfig.mk b/vsoc_x86_64_minidroid/BoardConfig.mk
new file mode 100644
index 0000000..fa73b01
--- /dev/null
+++ b/vsoc_x86_64_minidroid/BoardConfig.mk
@@ -0,0 +1,26 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# x86_64 (64-bit only) target for Cuttlefish
+#
+
+TARGET_BOARD_PLATFORM := vsoc_x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := silvermont
+TARGET_CPU_ABI := x86_64
+
+-include device/google/cuttlefish/shared/minidroid/BoardConfig.mk
diff --git a/vsoc_x86_64_minidroid/aosp_cf.mk b/vsoc_x86_64_minidroid/aosp_cf.mk
new file mode 100644
index 0000000..70f2c45
--- /dev/null
+++ b/vsoc_x86_64_minidroid/aosp_cf.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, device/google/cuttlefish/shared/minidroid/device.mk)
+
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
+
+PRODUCT_NAME := aosp_cf_x86_64_minidroid
+PRODUCT_DEVICE := vsoc_x86_64_minidroid
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish x86_64 minidroid
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_64_only/BoardConfig.mk b/vsoc_x86_64_only/BoardConfig.mk
index 7185b2d..3222f2a 100644
--- a/vsoc_x86_64_only/BoardConfig.mk
+++ b/vsoc_x86_64_only/BoardConfig.mk
@@ -18,8 +18,6 @@
 # x86_64 (64-bit only) target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := silvermont
@@ -31,4 +29,10 @@
 TARGET_NATIVE_BRIDGE_ABI := arm64-v8a
 
 AUDIOSERVER_MULTILIB := first
-BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard kernel/prebuilts/common-modules/virtual-device/$(TARGET_KERNEL_USE)/x86-64/*.ko)
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_x86_64_only/auto/OWNERS b/vsoc_x86_64_only/auto/OWNERS
new file mode 100644
index 0000000..f97912a
--- /dev/null
+++ b/vsoc_x86_64_only/auto/OWNERS
@@ -0,0 +1,4 @@
+# Android Auto leads
+include platform/packages/services/Car:/OWNERS
+ankitarora@google.com
+egranata@google.com
diff --git a/vsoc_x86_64_only/auto/aosp_cf.mk b/vsoc_x86_64_only/auto/aosp_cf.mk
new file mode 100644
index 0000000..b817e21
--- /dev/null
+++ b/vsoc_x86_64_only/auto/aosp_cf.mk
@@ -0,0 +1,67 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+
+# FIXME: generic_system.mk sets 'PRODUCT_ENFORCE_RRO_TARGETS := *'
+#        but this breaks phone_car. So undo it here.
+PRODUCT_ENFORCE_RRO_TARGETS := frameworks-res
+
+# FIXME: Disable mainline path checks
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := false
+
+# HSUM is currently incompatible with telephony.
+# TODO(b/283853205): Properly disable telephony using per-partition makefile.
+TARGET_NO_TELEPHONY := true
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product, device/google/cuttlefish/shared/auto/device_vendor.mk)
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
+
+# Exclude features that are not available on AOSP devices.
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml
+
+PRODUCT_NAME := aosp_cf_x86_64_only_auto
+PRODUCT_DEVICE := vsoc_x86_64_only
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish x86_64 auto 64-bit only
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_64_only/auto_md/OWNERS b/vsoc_x86_64_only/auto_md/OWNERS
new file mode 100644
index 0000000..5482d9b
--- /dev/null
+++ b/vsoc_x86_64_only/auto_md/OWNERS
@@ -0,0 +1 @@
+include device/google/cuttlefish:/shared/auto_md/OWNERS
diff --git a/vsoc_x86_64_only/auto_md/aosp_cf.mk b/vsoc_x86_64_only/auto_md/aosp_cf.mk
new file mode 100644
index 0000000..c058498
--- /dev/null
+++ b/vsoc_x86_64_only/auto_md/aosp_cf.mk
@@ -0,0 +1,55 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set board, as displays are set in the config_BOARD.json file (in
+# that file, display0 is main, display1 is cluster, and any other displays
+# are passenger displays - notice that the maximum allowed is 4 total).
+TARGET_BOARD_INFO_FILE := device/google/cuttlefish/shared/auto_md/android-info.txt
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/auto_md/display_settings.xml:$(TARGET_COPY_OUT_VENDOR)/etc/display_settings.xml
+
+PRODUCT_PACKAGE_OVERLAYS += \
+    device/google/cuttlefish/shared/auto_md/overlay
+
+# HSUM is currently incompatible with telephony.
+# TODO(b/283853205): Properly disable telephony using per-partition makefile.
+TARGET_NO_TELEPHONY := true
+
+ENABLE_CLUSTER_OS_DOUBLE:=true
+
+PRODUCT_PACKAGES += \
+    ClusterHomeSample \
+    ClusterOsDouble \
+    CarServiceOverlayEmulatorOsDouble \
+    CarServiceOverlayMdEmulatorOsDouble \
+    MultiDisplaySecondaryHomeTestLauncher \
+    MultiDisplayTest
+
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+    com.android.car.internal.debug.num_auto_populated_users=1 # 1 passenger only (so 2nd display shows user picker)
+# TODO(b/233370174): add audio multi-zone
+#   ro.vendor.simulateMultiZoneAudio=true \
+
+
+# This will disable dynamic displays and enable hardcoded displays on hwservicemanager.
+$(call inherit-product, device/generic/car/emulator/cluster/cluster-hwservicemanager.mk)
+
+# Add the regular stuff.
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64_only/auto/aosp_cf.mk)
+
+PRODUCT_NAME := aosp_cf_x86_64_auto_md
+PRODUCT_MODEL := Cuttlefish x86_64 auto 64-bit only multi-displays
diff --git a/vsoc_x86_64_only/auto_mdnd/OWNERS b/vsoc_x86_64_only/auto_mdnd/OWNERS
new file mode 100644
index 0000000..5482d9b
--- /dev/null
+++ b/vsoc_x86_64_only/auto_mdnd/OWNERS
@@ -0,0 +1 @@
+include device/google/cuttlefish:/shared/auto_md/OWNERS
diff --git a/vsoc_x86_64_only/auto_mdnd/aosp_cf.mk b/vsoc_x86_64_only/auto_mdnd/aosp_cf.mk
new file mode 100644
index 0000000..a80d837
--- /dev/null
+++ b/vsoc_x86_64_only/auto_mdnd/aosp_cf.mk
@@ -0,0 +1,32 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# TODO(b/264958209): for now it's just inheriting aosp_cf_md and setting
+# config_multiuserVisibleBackgroundUsersOnDefaultDisplay , but in the
+# long-run it should be customized further (for example, setting
+# occupancy zone and removing cluster and other unnecessary stuff)
+
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64_only/auto_md/aosp_cf.mk)
+
+# HSUM is currently incompatible with telephony.
+# TODO(b/283853205): Properly disable telephony using per-partition makefile.
+TARGET_NO_TELEPHONY := true
+
+PRODUCT_NAME := aosp_cf_x86_64_auto_mdnd
+PRODUCT_MODEL := Cuttlefish x86_64 auto 64-bit only multi-displays, no-driver
+
+PRODUCT_PACKAGE_OVERLAYS += \
+    device/google/cuttlefish/shared/auto_mdnd/overlay
diff --git a/vsoc_x86_64_only/auto_portrait/OWNERS b/vsoc_x86_64_only/auto_portrait/OWNERS
new file mode 100644
index 0000000..5bc897b
--- /dev/null
+++ b/vsoc_x86_64_only/auto_portrait/OWNERS
@@ -0,0 +1,4 @@
+include device/google/cuttlefish:/shared/auto/OWNERS
+babakbo@google.com
+calhuang@google.com
+priyanksingh@google.com
diff --git a/vsoc_x86_64_only/auto_portrait/aosp_cf.mk b/vsoc_x86_64_only/auto_portrait/aosp_cf.mk
new file mode 100644
index 0000000..dc8fb91
--- /dev/null
+++ b/vsoc_x86_64_only/auto_portrait/aosp_cf.mk
@@ -0,0 +1,51 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# AOSP Car UI Portrait Cuttlefish Target
+
+TARGET_BOARD_INFO_FILE := device/google/cuttlefish/shared/auto_portrait/android-info.txt
+
+PRODUCT_COPY_FILES += \
+    device/google/cuttlefish/shared/auto_portrait/display_settings.xml:$(TARGET_COPY_OUT_VENDOR)/etc/display_settings.xml
+
+# Exclude AAE Car System UI
+DO_NOT_INCLUDE_AAE_CAR_SYSTEM_UI := true
+
+# Exclude Car UI Reference Design
+DO_NOT_INCLUDE_CAR_UI_REFERENCE_DESIGN := true
+
+# Exclude Car Visual Overlay
+DISABLE_CAR_PRODUCT_VISUAL_OVERLAY := true
+
+# Copy additional files
+PRODUCT_COPY_FILES += \
+    packages/services/Car/car_product/car_ui_portrait/bootanimation/bootanimation.zip:system/media/bootanimation.zip
+
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64_only/auto/aosp_cf.mk)
+
+PRODUCT_NAME := aosp_cf_x86_64_only_auto_portrait
+PRODUCT_DEVICE := vsoc_x86_64_only
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := AOSP Cuttlefish x86_64 auto 64-bit only with portrait UI
+
+$(call inherit-product, packages/services/Car/car_product/car_ui_portrait/apps/car_ui_portrait_apps.mk)
+$(call inherit-product, packages/services/Car/car_product/car_ui_portrait/rro/car_ui_portrait_rro.mk)
+
+PRODUCT_COPY_FILES += \
+    packages/services/Car/car_product/car_ui_portrait/android.software.car.splitscreen_multitasking.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.car.splitscreen_multitasking.xml
+
+# Include the`launch_cvd --config auto_portrait` option.
+$(call soong_config_append,cvd,launch_configs,cvd_config_auto_portrait.json)
\ No newline at end of file
diff --git a/vsoc_x86_64_only/pc/OWNERS b/vsoc_x86_64_only/pc/OWNERS
new file mode 100644
index 0000000..c337561
--- /dev/null
+++ b/vsoc_x86_64_only/pc/OWNERS
@@ -0,0 +1 @@
+include device/google/cuttlefish:/shared/pc/OWNERS
diff --git a/vsoc_x86_64_only/pc/aosp_cf.mk b/vsoc_x86_64_only/pc/aosp_cf.mk
new file mode 100644
index 0000000..e6b9fe6
--- /dev/null
+++ b/vsoc_x86_64_only/pc/aosp_cf.mk
@@ -0,0 +1,51 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image (same as GSI system)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+
+#
+# All components inherited here go to system_ext image (same as GSI system_ext)a
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image (same as GSI product)
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product, device/google/cuttlefish/shared/pc/device_vendor.mk)
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
+
+PRODUCT_NAME := aosp_cf_x86_64_pc
+PRODUCT_DEVICE := vsoc_x86_64_only
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish x86_64 pc
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_64_only/phone/aosp_cf.mk b/vsoc_x86_64_only/phone/aosp_cf.mk
index 0e8757d..45fed56 100644
--- a/vsoc_x86_64_only/phone/aosp_cf.mk
+++ b/vsoc_x86_64_only/phone/aosp_cf.mk
@@ -36,7 +36,6 @@
 #
 # All components inherited here go to vendor image
 #
-LOCAL_DISABLE_OMX := true
 $(call inherit-product, device/google/cuttlefish/shared/phone/device_vendor.mk)
 
 # Nested virtualization support
@@ -45,7 +44,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86_64_only/phone/aosp_cf_hsum.mk b/vsoc_x86_64_only/phone/aosp_cf_hsum.mk
new file mode 100644
index 0000000..3abca8c
--- /dev/null
+++ b/vsoc_x86_64_only/phone/aosp_cf_hsum.mk
@@ -0,0 +1,28 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit mostly from aosp_cf_x86_64_phone
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64_only/phone/aosp_cf.mk)
+PRODUCT_NAME := aosp_cf_x86_64_only_phone_hsum
+PRODUCT_MODEL := Cuttlefish x86_64 phone 64-bit only Headless System User Mode
+
+# Set Headless System User Mode
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES = \
+    ro.fw.mu.headless_system_user=true
+
+# TODO(b/204071542): add package allow-list; something like
+# PRODUCT_COPY_FILES += \
+#    device/google/cuttlefish/SOME_PATH/preinstalled-packages.xml:$(TARGET_COPY_OUT_PRODUCT)/etc/sysconfig/preinstalled-packages-cf_phone.xml
diff --git a/vsoc_x86_64_only/slim/aosp_cf.mk b/vsoc_x86_64_only/slim/aosp_cf.mk
index c7ca328..536a4b7 100644
--- a/vsoc_x86_64_only/slim/aosp_cf.mk
+++ b/vsoc_x86_64_only/slim/aosp_cf.mk
@@ -37,14 +37,12 @@
 #
 # All components inherited here go to vendor image
 #
-LOCAL_DISABLE_OMX := true
 LOCAL_PREFER_VENDOR_APEX := true
 $(call inherit-product, device/google/cuttlefish/shared/slim/device_vendor.mk)
 
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/kernel.mk)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
 
 # Exclude features that are not available on AOSP devices.
diff --git a/vsoc_x86_64_only/wear/aosp_cf.mk b/vsoc_x86_64_only/wear/aosp_cf.mk
new file mode 100644
index 0000000..b59129c
--- /dev/null
+++ b/vsoc_x86_64_only/wear/aosp_cf.mk
@@ -0,0 +1,72 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_system.mk)
+
+# Cuttlefish uses A/B with system_b preopt, so we must install these
+PRODUCT_PACKAGES += \
+    cppreopts.sh \
+    otapreopt_script \
+
+# Hacks to boot with basic AOSP system apps
+PRODUCT_PACKAGES += \
+    Contacts \
+    Launcher3QuickStep \
+    Provision \
+    Settings \
+    StorageManager \
+    SystemUI \
+
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/android.software.app_widgets.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.app_widgets.xml \
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product, device/google/cuttlefish/shared/wear/aosp_vendor.mk)
+$(call inherit-product, device/google/cuttlefish/shared/wear/device_vendor.mk)
+
+#
+# Special settings for the target
+#
+$(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)
+
+# Exclude features that are not available on AOSP devices.
+PRODUCT_COPY_FILES += \
+    frameworks/native/data/etc/aosp_excluded_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/aosp_excluded_hardware.xml \
+
+PRODUCT_NAME := aosp_cf_x86_64_wear
+PRODUCT_DEVICE := vsoc_x86_64_only
+PRODUCT_MANUFACTURER := Google
+PRODUCT_MODEL := Cuttlefish x86 wearable 64-bit only
+
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.soc.manufacturer=$(PRODUCT_MANUFACTURER) \
+    ro.soc.model=$(PRODUCT_DEVICE)
diff --git a/vsoc_x86_only/BoardConfig.mk b/vsoc_x86_only/BoardConfig.mk
index 8703a10..d914222 100644
--- a/vsoc_x86_only/BoardConfig.mk
+++ b/vsoc_x86_only/BoardConfig.mk
@@ -18,11 +18,20 @@
 # x86 (32-bit kernel) target for Cuttlefish
 #
 
--include device/google/cuttlefish/shared/BoardConfig.mk
-
 TARGET_BOARD_PLATFORM := vsoc_x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
 TARGET_CPU_ABI := x86
 
-BOARD_VENDOR_RAMDISK_KERNEL_MODULES += $(wildcard device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-i686/*.ko)
+TARGET_KERNEL_ARCH ?= i686
+TARGET_KERNEL_USE ?= 5.15
+KERNEL_MODULES_PATH := device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-$(TARGET_KERNEL_ARCH)
+TARGET_KERNEL_PATH := $(KERNEL_MODULES_PATH)/kernel-$(TARGET_KERNEL_USE)
+# FIXME: system_dlkm should be specified as well
+
+-include device/google/cuttlefish/shared/BoardConfig.mk
+-include device/google/cuttlefish/shared/camera/BoardConfig.mk
+-include device/google/cuttlefish/shared/graphics/BoardConfig.mk
+-include device/google/cuttlefish/shared/swiftshader/BoardConfig.mk
+-include device/google/cuttlefish/shared/telephony/BoardConfig.mk
+-include device/google/cuttlefish/shared/virgl/BoardConfig.mk
diff --git a/vsoc_x86_only/kernel.mk b/vsoc_x86_only/kernel.mk
index 0eec8d9..60bf204 100644
--- a/vsoc_x86_only/kernel.mk
+++ b/vsoc_x86_only/kernel.mk
@@ -13,7 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-TARGET_KERNEL_USE ?= 5.15
-TARGET_KERNEL_PATH ?= device/google/cuttlefish_prebuilts/kernel/$(TARGET_KERNEL_USE)-i686/kernel-$(TARGET_KERNEL_USE)
-
-PRODUCT_COPY_FILES +=$(TARGET_KERNEL_PATH):kernel
+# This file is deprecated.
diff --git a/vsoc_x86_only/phone/aosp_cf.mk b/vsoc_x86_only/phone/aosp_cf.mk
index 1aacd16..72e0521 100644
--- a/vsoc_x86_only/phone/aosp_cf.mk
+++ b/vsoc_x86_only/phone/aosp_cf.mk
@@ -41,7 +41,6 @@
 #
 # Special settings for the target
 #
-$(call inherit-product, device/google/cuttlefish/vsoc_x86_only/kernel.mk)
 # FIXME: For now, this uses the "64-bit" bootloader (for why, take a look at
 #        http://u-boot.10912.n7.nabble.com/64-bit-x86-U-Boot-td244620.html)
 $(call inherit-product, device/google/cuttlefish/vsoc_x86_64/bootloader.mk)